VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 65879

Last change on this file since 65879 was 65879, checked in by vboxsync, 8 years ago

IEM,DIS: Updates

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 373.1 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 65879 2017-02-25 14:00:07Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/** @def og_gen General
25 * @{
26 */
27
28/** @def og_gen_arith Arithmetic
29 * @{
30 */
31/** @defgroup og_gen_arith_bin Binary numbers */
32/** @defgroup og_gen_arith_dec Decimal numbers */
33/** @} */
34
35
36
37/** @name One byte opcodes.
38 * @{
39 */
40
41/* Instruction specification format - work in progress: */
42
43/**
44 * @opcode 0x00
45 * @opmnemonic add
46 * @op1 rm:Eb
47 * @op2 reg:Gb
48 * @opmaps one
49 * @openc ModR/M
50 * @opflmodify of,sf,zf,af,pf,cf
51 * @ophints harmless ignores_op_size
52 * @opstats add_Eb_Gb
53 * @opgroup op_gen_arith_bin
54 * @optest op1=1 op2=1 -> op1=2 efl=of,sf,zf,af
55 */
56FNIEMOP_DEF(iemOp_add_Eb_Gb)
57{
58 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
59 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
60}
61
62
63/**
64 * @opcode 0x01
65 * @opgroup op_gen_arith_bin
66 * @opflmodify of,sf,zf,af,pf,cf
67 */
68FNIEMOP_DEF(iemOp_add_Ev_Gv)
69{
70 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, 0);
71 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
72}
73
74
75/**
76 * @opcode 0x02
77 * @opgroup op_gen_arith_bin
78 * @opflmodify of,sf,zf,af,pf,cf
79 */
80FNIEMOP_DEF(iemOp_add_Gb_Eb)
81{
82 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
83 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
84}
85
86
87/**
88 * @opcode 0x03
89 * @opgroup op_gen_arith_bin
90 * @opflmodify of,sf,zf,af,pf,cf
91 */
92FNIEMOP_DEF(iemOp_add_Gv_Ev)
93{
94 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
95 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
96}
97
98
99/**
100 * @opcode 0x04
101 * @opgroup op_gen_arith_bin
102 * @opflmodify of,sf,zf,af,pf,cf
103 */
104FNIEMOP_DEF(iemOp_add_Al_Ib)
105{
106 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
107 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
108}
109
110
111/**
112 * @opcode 0x05
113 * @opgroup op_gen_arith_bin
114 * @opflmodify of,sf,zf,af,pf,cf
115 */
116FNIEMOP_DEF(iemOp_add_eAX_Iz)
117{
118 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
119 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
120}
121
122
123/**
124 * @opcode 0x06
125 * @opgroup op_stack_sreg
126 */
127FNIEMOP_DEF(iemOp_push_ES)
128{
129 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
130 IEMOP_HLP_NO_64BIT();
131 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
132}
133
134
135/**
136 * @opcode 0x07
137 * @opgroup op_stack_sreg
138 */
139FNIEMOP_DEF(iemOp_pop_ES)
140{
141 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
142 IEMOP_HLP_NO_64BIT();
143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
144 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
145}
146
147
148/**
149 * @opcode 0x08
150 * @opgroup op_gen_arith_bin
151 * @opflmodify of,sf,zf,af,pf,cf
152 * @opflundef af
153 * @opflclear of,cf
154 */
155FNIEMOP_DEF(iemOp_or_Eb_Gb)
156{
157 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
158 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
159 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
160}
161
162
163/**
164 * @opcode 0x09
165 * @opgroup op_gen_arith_bin
166 * @opflmodify of,sf,zf,af,pf,cf
167 * @opflundef af
168 * @opflclear of,cf
169 */
170FNIEMOP_DEF(iemOp_or_Ev_Gv)
171{
172 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, 0);
173 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
174 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
175}
176
177
178/**
179 * @opcode 0x0a
180 * @opgroup op_gen_arith_bin
181 * @opflmodify of,sf,zf,af,pf,cf
182 * @opflundef af
183 * @opflclear of,cf
184 */
185FNIEMOP_DEF(iemOp_or_Gb_Eb)
186{
187 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
188 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
189 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
190}
191
192
193/**
194 * @opcode 0x0b
195 * @opgroup op_gen_arith_bin
196 * @opflmodify of,sf,zf,af,pf,cf
197 * @opflundef af
198 * @opflclear of,cf
199 */
200FNIEMOP_DEF(iemOp_or_Gv_Ev)
201{
202 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
203 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
204 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
205}
206
207
208/**
209 * @opcode 0x0c
210 * @opgroup op_gen_arith_bin
211 * @opflmodify of,sf,zf,af,pf,cf
212 * @opflundef af
213 * @opflclear of,cf
214 */
215FNIEMOP_DEF(iemOp_or_Al_Ib)
216{
217 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
218 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
219 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
220}
221
222
223/**
224 * @opcode 0x0d
225 * @opgroup op_gen_arith_bin
226 * @opflmodify of,sf,zf,af,pf,cf
227 * @opflundef af
228 * @opflclear of,cf
229 */
230FNIEMOP_DEF(iemOp_or_eAX_Iz)
231{
232 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
233 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
234 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
235}
236
237
238/**
239 * @opcode 0x0e
240 * @opgroup op_stack_sreg
241 */
242FNIEMOP_DEF(iemOp_push_CS)
243{
244 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
245 IEMOP_HLP_NO_64BIT();
246 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
247}
248
249
250/**
251 * @opcode 0x0f
252 * @opmnemonic EscTwo0f
253 * @openc two0f
254 * @opdisenum OP_2B_ESC
255 * @ophints harmless
256 * @opgroup op_escapes
257 */
258FNIEMOP_DEF(iemOp_2byteEscape)
259{
260#ifdef VBOX_STRICT
261 /* Sanity check the table the first time around. */
262 static bool s_fTested = false;
263 if (RT_LIKELY(s_fTested)) { /* likely */ }
264 else
265 {
266 s_fTested = true;
267 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
268 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
269 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
270 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
271 }
272#endif
273
274 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
275 {
276 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
277 IEMOP_HLP_MIN_286();
278 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
279 }
280 /* @opdone */
281
282 /*
283 * On the 8086 this is a POP CS instruction.
284 * For the time being we don't specify this this.
285 */
286 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
287 IEMOP_HLP_NO_64BIT();
288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
289 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
290}
291
292/**
293 * @opcode 0x10
294 * @opgroup op_gen_arith_bin
295 * @opfltest cf
296 * @opflmodify of,sf,zf,af,pf,cf
297 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=of,sf,zf,af
298 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=of,sf,zf,af
299 */
300FNIEMOP_DEF(iemOp_adc_Eb_Gb)
301{
302 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
303 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
304}
305
306
307/**
308 * @opcode 0x11
309 * @opgroup op_gen_arith_bin
310 * @opfltest cf
311 * @opflmodify of,sf,zf,af,pf,cf
312 */
313FNIEMOP_DEF(iemOp_adc_Ev_Gv)
314{
315 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, 0);
316 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
317}
318
319
320/**
321 * @opcode 0x12
322 * @opgroup op_gen_arith_bin
323 * @opfltest cf
324 * @opflmodify of,sf,zf,af,pf,cf
325 */
326FNIEMOP_DEF(iemOp_adc_Gb_Eb)
327{
328 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
329 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
330}
331
332
333/**
334 * @opcode 0x13
335 * @opgroup op_gen_arith_bin
336 * @opfltest cf
337 * @opflmodify of,sf,zf,af,pf,cf
338 */
339FNIEMOP_DEF(iemOp_adc_Gv_Ev)
340{
341 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
342 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
343}
344
345
346/**
347 * @opcode 0x14
348 * @opgroup op_gen_arith_bin
349 * @opfltest cf
350 * @opflmodify of,sf,zf,af,pf,cf
351 */
352FNIEMOP_DEF(iemOp_adc_Al_Ib)
353{
354 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
355 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
356}
357
358
359/**
360 * @opcode 0x15
361 * @opgroup op_gen_arith_bin
362 * @opfltest cf
363 * @opflmodify of,sf,zf,af,pf,cf
364 */
365FNIEMOP_DEF(iemOp_adc_eAX_Iz)
366{
367 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
368 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
369}
370
371
372/**
373 * @opcode 0x16
374 */
375FNIEMOP_DEF(iemOp_push_SS)
376{
377 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
378 IEMOP_HLP_NO_64BIT();
379 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
380}
381
382
383/**
384 * @opcode 0x17
385 * @opgroup op_gen_arith_bin
386 * @opfltest cf
387 * @opflmodify of,sf,zf,af,pf,cf
388 */
389FNIEMOP_DEF(iemOp_pop_SS)
390{
391 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
393 IEMOP_HLP_NO_64BIT();
394 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
395}
396
397
398/**
399 * @opcode 0x18
400 * @opgroup op_gen_arith_bin
401 * @opfltest cf
402 * @opflmodify of,sf,zf,af,pf,cf
403 */
404FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
405{
406 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
407 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
408}
409
410
411/**
412 * @opcode 0x19
413 * @opgroup op_gen_arith_bin
414 * @opfltest cf
415 * @opflmodify of,sf,zf,af,pf,cf
416 */
417FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
418{
419 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, 0);
420 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
421}
422
423
424/**
425 * @opcode 0x1a
426 * @opgroup op_gen_arith_bin
427 * @opfltest cf
428 * @opflmodify of,sf,zf,af,pf,cf
429 */
430FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
431{
432 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
433 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
434}
435
436
437/**
438 * @opcode 0x1b
439 * @opgroup op_gen_arith_bin
440 * @opfltest cf
441 * @opflmodify of,sf,zf,af,pf,cf
442 */
443FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
444{
445 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
446 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
447}
448
449
450/**
451 * @opcode 0x1c
452 * @opgroup op_gen_arith_bin
453 * @opfltest cf
454 * @opflmodify of,sf,zf,af,pf,cf
455 */
456FNIEMOP_DEF(iemOp_sbb_Al_Ib)
457{
458 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
459 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
460}
461
462
463/**
464 * @opcode 0x1d
465 * @opgroup op_gen_arith_bin
466 * @opfltest cf
467 * @opflmodify of,sf,zf,af,pf,cf
468 */
469FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
470{
471 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
472 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
473}
474
475
476/**
477 * @opcode 0x1e
478 * @opgroup op_stack_sreg
479 */
480FNIEMOP_DEF(iemOp_push_DS)
481{
482 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
483 IEMOP_HLP_NO_64BIT();
484 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
485}
486
487
488/**
489 * @opcode 0x1f
490 * @opgroup op_stack_sreg
491 */
492FNIEMOP_DEF(iemOp_pop_DS)
493{
494 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
496 IEMOP_HLP_NO_64BIT();
497 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
498}
499
500
501/**
502 * @opcode 0x20
503 */
504FNIEMOP_DEF(iemOp_and_Eb_Gb)
505{
506 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
507 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
508 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
509}
510
511
512/**
513 * @opcode 0x21
514 */
515FNIEMOP_DEF(iemOp_and_Ev_Gv)
516{
517 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
518 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
519 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
520}
521
522
523/**
524 * @opcode 0x22
525 */
526FNIEMOP_DEF(iemOp_and_Gb_Eb)
527{
528 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
529 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
530 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
531}
532
533
534/**
535 * @opcode 0x23
536 */
537FNIEMOP_DEF(iemOp_and_Gv_Ev)
538{
539 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
540 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
541 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
542}
543
544
545/**
546 * @opcode 0x24
547 */
548FNIEMOP_DEF(iemOp_and_Al_Ib)
549{
550 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
551 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
552 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
553}
554
555
556/**
557 * @opcode 0x25
558 */
559FNIEMOP_DEF(iemOp_and_eAX_Iz)
560{
561 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
562 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
563 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
564}
565
566
567/**
568 * @opcode 0x26
569 */
570FNIEMOP_DEF(iemOp_seg_ES)
571{
572 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
573 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
574 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
575
576 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
577 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
578}
579
580
581/**
582 * @opcode 0x27
583 */
584FNIEMOP_DEF(iemOp_daa)
585{
586 IEMOP_MNEMONIC(daa_AL, "daa AL");
587 IEMOP_HLP_NO_64BIT();
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
589 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
590 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
591}
592
593
594/**
595 * @opcode 0x28
596 */
597FNIEMOP_DEF(iemOp_sub_Eb_Gb)
598{
599 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
600 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
601}
602
603
604/**
605 * @opcode 0x29
606 */
607FNIEMOP_DEF(iemOp_sub_Ev_Gv)
608{
609 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
610 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
611}
612
613
614/**
615 * @opcode 0x2a
616 */
617FNIEMOP_DEF(iemOp_sub_Gb_Eb)
618{
619 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
620 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
621}
622
623
624/**
625 * @opcode 0x2b
626 */
627FNIEMOP_DEF(iemOp_sub_Gv_Ev)
628{
629 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
630 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
631}
632
633
634/**
635 * @opcode 0x2c
636 */
637FNIEMOP_DEF(iemOp_sub_Al_Ib)
638{
639 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
640 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
641}
642
643
644/**
645 * @opcode 0x2d
646 */
647FNIEMOP_DEF(iemOp_sub_eAX_Iz)
648{
649 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
650 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
651}
652
653
654/**
655 * @opcode 0x2e
656 */
657FNIEMOP_DEF(iemOp_seg_CS)
658{
659 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
660 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
661 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
662
663 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
664 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
665}
666
667
668/**
669 * @opcode 0x2f
670 */
671FNIEMOP_DEF(iemOp_das)
672{
673 IEMOP_MNEMONIC(das_AL, "das AL");
674 IEMOP_HLP_NO_64BIT();
675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
678}
679
680
681/**
682 * @opcode 0x30
683 */
684FNIEMOP_DEF(iemOp_xor_Eb_Gb)
685{
686 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
687 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
688 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
689}
690
691
692/**
693 * @opcode 0x31
694 */
695FNIEMOP_DEF(iemOp_xor_Ev_Gv)
696{
697 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
698 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
699 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
700}
701
702
703/**
704 * @opcode 0x32
705 */
706FNIEMOP_DEF(iemOp_xor_Gb_Eb)
707{
708 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
709 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
710 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
711}
712
713
714/**
715 * @opcode 0x33
716 */
717FNIEMOP_DEF(iemOp_xor_Gv_Ev)
718{
719 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
720 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
721 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
722}
723
724
725/**
726 * @opcode 0x34
727 */
728FNIEMOP_DEF(iemOp_xor_Al_Ib)
729{
730 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
731 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
732 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
733}
734
735
736/**
737 * @opcode 0x35
738 */
739FNIEMOP_DEF(iemOp_xor_eAX_Iz)
740{
741 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
742 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
743 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
744}
745
746
747/**
748 * @opcode 0x36
749 */
750FNIEMOP_DEF(iemOp_seg_SS)
751{
752 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
753 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
754 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
755
756 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
757 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
758}
759
760
761/**
762 * @opcode 0x37
763 */
764FNIEMOP_STUB(iemOp_aaa);
765
766
767/**
768 * @opcode 0x38
769 */
770FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
771{
772 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
773 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
774}
775
776
777/**
778 * @opcode 0x39
779 */
780FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
781{
782 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
783 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
784}
785
786
787/**
788 * @opcode 0x3a
789 */
790FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
791{
792 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
793 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
794}
795
796
797/**
798 * @opcode 0x3b
799 */
800FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
801{
802 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
803 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
804}
805
806
807/**
808 * @opcode 0x3c
809 */
810FNIEMOP_DEF(iemOp_cmp_Al_Ib)
811{
812 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
813 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
814}
815
816
817/**
818 * @opcode 0x3d
819 */
820FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
821{
822 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
823 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
824}
825
826
827/**
828 * @opcode 0x3e
829 */
830FNIEMOP_DEF(iemOp_seg_DS)
831{
832 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
833 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
834 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
835
836 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
837 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
838}
839
840
841/**
842 * @opcode 0x3f
843 */
844FNIEMOP_STUB(iemOp_aas);
845
846/**
847 * Common 'inc/dec/not/neg register' helper.
848 */
849FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
850{
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 switch (pVCpu->iem.s.enmEffOpSize)
853 {
854 case IEMMODE_16BIT:
855 IEM_MC_BEGIN(2, 0);
856 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
857 IEM_MC_ARG(uint32_t *, pEFlags, 1);
858 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
859 IEM_MC_REF_EFLAGS(pEFlags);
860 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
861 IEM_MC_ADVANCE_RIP();
862 IEM_MC_END();
863 return VINF_SUCCESS;
864
865 case IEMMODE_32BIT:
866 IEM_MC_BEGIN(2, 0);
867 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
868 IEM_MC_ARG(uint32_t *, pEFlags, 1);
869 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
870 IEM_MC_REF_EFLAGS(pEFlags);
871 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
872 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
873 IEM_MC_ADVANCE_RIP();
874 IEM_MC_END();
875 return VINF_SUCCESS;
876
877 case IEMMODE_64BIT:
878 IEM_MC_BEGIN(2, 0);
879 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
880 IEM_MC_ARG(uint32_t *, pEFlags, 1);
881 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
882 IEM_MC_REF_EFLAGS(pEFlags);
883 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
884 IEM_MC_ADVANCE_RIP();
885 IEM_MC_END();
886 return VINF_SUCCESS;
887 }
888 return VINF_SUCCESS;
889}
890
891
892/**
893 * @opcode 0x40
894 */
895FNIEMOP_DEF(iemOp_inc_eAX)
896{
897 /*
898 * This is a REX prefix in 64-bit mode.
899 */
900 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
901 {
902 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
903 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
904
905 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
906 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
907 }
908
909 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
910 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
911}
912
913
914/**
915 * @opcode 0x41
916 */
917FNIEMOP_DEF(iemOp_inc_eCX)
918{
919 /*
920 * This is a REX prefix in 64-bit mode.
921 */
922 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
923 {
924 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
925 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
926 pVCpu->iem.s.uRexB = 1 << 3;
927
928 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
929 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
930 }
931
932 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
933 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
934}
935
936
937/**
938 * @opcode 0x42
939 */
940FNIEMOP_DEF(iemOp_inc_eDX)
941{
942 /*
943 * This is a REX prefix in 64-bit mode.
944 */
945 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
946 {
947 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
948 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
949 pVCpu->iem.s.uRexIndex = 1 << 3;
950
951 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
952 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
953 }
954
955 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
956 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
957}
958
959
960
961/**
962 * @opcode 0x43
963 */
964FNIEMOP_DEF(iemOp_inc_eBX)
965{
966 /*
967 * This is a REX prefix in 64-bit mode.
968 */
969 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
970 {
971 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
972 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
973 pVCpu->iem.s.uRexB = 1 << 3;
974 pVCpu->iem.s.uRexIndex = 1 << 3;
975
976 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
977 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
978 }
979
980 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
981 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
982}
983
984
985/**
986 * @opcode 0x44
987 */
988FNIEMOP_DEF(iemOp_inc_eSP)
989{
990 /*
991 * This is a REX prefix in 64-bit mode.
992 */
993 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
994 {
995 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
996 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
997 pVCpu->iem.s.uRexReg = 1 << 3;
998
999 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1000 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1001 }
1002
1003 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1004 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1005}
1006
1007
1008/**
1009 * @opcode 0x45
1010 */
1011FNIEMOP_DEF(iemOp_inc_eBP)
1012{
1013 /*
1014 * This is a REX prefix in 64-bit mode.
1015 */
1016 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1017 {
1018 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1019 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1020 pVCpu->iem.s.uRexReg = 1 << 3;
1021 pVCpu->iem.s.uRexB = 1 << 3;
1022
1023 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1024 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1025 }
1026
1027 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1028 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1029}
1030
1031
1032/**
1033 * @opcode 0x46
1034 */
1035FNIEMOP_DEF(iemOp_inc_eSI)
1036{
1037 /*
1038 * This is a REX prefix in 64-bit mode.
1039 */
1040 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1041 {
1042 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1043 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1044 pVCpu->iem.s.uRexReg = 1 << 3;
1045 pVCpu->iem.s.uRexIndex = 1 << 3;
1046
1047 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1048 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1049 }
1050
1051 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1052 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1053}
1054
1055
1056/**
1057 * @opcode 0x47
1058 */
1059FNIEMOP_DEF(iemOp_inc_eDI)
1060{
1061 /*
1062 * This is a REX prefix in 64-bit mode.
1063 */
1064 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1065 {
1066 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1067 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1068 pVCpu->iem.s.uRexReg = 1 << 3;
1069 pVCpu->iem.s.uRexB = 1 << 3;
1070 pVCpu->iem.s.uRexIndex = 1 << 3;
1071
1072 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1073 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1074 }
1075
1076 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1077 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1078}
1079
1080
1081/**
1082 * @opcode 0x48
1083 */
1084FNIEMOP_DEF(iemOp_dec_eAX)
1085{
1086 /*
1087 * This is a REX prefix in 64-bit mode.
1088 */
1089 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1090 {
1091 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1092 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1093 iemRecalEffOpSize(pVCpu);
1094
1095 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1096 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1097 }
1098
1099 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1100 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1101}
1102
1103
1104/**
1105 * @opcode 0x49
1106 */
1107FNIEMOP_DEF(iemOp_dec_eCX)
1108{
1109 /*
1110 * This is a REX prefix in 64-bit mode.
1111 */
1112 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1113 {
1114 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1116 pVCpu->iem.s.uRexB = 1 << 3;
1117 iemRecalEffOpSize(pVCpu);
1118
1119 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1120 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1121 }
1122
1123 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1124 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1125}
1126
1127
1128/**
1129 * @opcode 0x4a
1130 */
1131FNIEMOP_DEF(iemOp_dec_eDX)
1132{
1133 /*
1134 * This is a REX prefix in 64-bit mode.
1135 */
1136 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1137 {
1138 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1139 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1140 pVCpu->iem.s.uRexIndex = 1 << 3;
1141 iemRecalEffOpSize(pVCpu);
1142
1143 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1144 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1145 }
1146
1147 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1148 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1149}
1150
1151
1152/**
1153 * @opcode 0x4b
1154 */
1155FNIEMOP_DEF(iemOp_dec_eBX)
1156{
1157 /*
1158 * This is a REX prefix in 64-bit mode.
1159 */
1160 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1161 {
1162 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1163 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1164 pVCpu->iem.s.uRexB = 1 << 3;
1165 pVCpu->iem.s.uRexIndex = 1 << 3;
1166 iemRecalEffOpSize(pVCpu);
1167
1168 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1169 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1170 }
1171
1172 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1173 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1174}
1175
1176
1177/**
1178 * @opcode 0x4c
1179 */
1180FNIEMOP_DEF(iemOp_dec_eSP)
1181{
1182 /*
1183 * This is a REX prefix in 64-bit mode.
1184 */
1185 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1186 {
1187 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1188 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1189 pVCpu->iem.s.uRexReg = 1 << 3;
1190 iemRecalEffOpSize(pVCpu);
1191
1192 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1193 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1194 }
1195
1196 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1197 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1198}
1199
1200
1201/**
1202 * @opcode 0x4d
1203 */
1204FNIEMOP_DEF(iemOp_dec_eBP)
1205{
1206 /*
1207 * This is a REX prefix in 64-bit mode.
1208 */
1209 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1210 {
1211 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1212 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1213 pVCpu->iem.s.uRexReg = 1 << 3;
1214 pVCpu->iem.s.uRexB = 1 << 3;
1215 iemRecalEffOpSize(pVCpu);
1216
1217 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1218 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1219 }
1220
1221 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1222 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1223}
1224
1225
1226/**
1227 * @opcode 0x4e
1228 */
1229FNIEMOP_DEF(iemOp_dec_eSI)
1230{
1231 /*
1232 * This is a REX prefix in 64-bit mode.
1233 */
1234 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1235 {
1236 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1237 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1238 pVCpu->iem.s.uRexReg = 1 << 3;
1239 pVCpu->iem.s.uRexIndex = 1 << 3;
1240 iemRecalEffOpSize(pVCpu);
1241
1242 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1243 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1244 }
1245
1246 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1247 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1248}
1249
1250
1251/**
1252 * @opcode 0x4f
1253 */
1254FNIEMOP_DEF(iemOp_dec_eDI)
1255{
1256 /*
1257 * This is a REX prefix in 64-bit mode.
1258 */
1259 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1260 {
1261 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1262 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1263 pVCpu->iem.s.uRexReg = 1 << 3;
1264 pVCpu->iem.s.uRexB = 1 << 3;
1265 pVCpu->iem.s.uRexIndex = 1 << 3;
1266 iemRecalEffOpSize(pVCpu);
1267
1268 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1269 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1270 }
1271
1272 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1273 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1274}
1275
1276
1277/**
1278 * Common 'push register' helper.
1279 */
1280FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1281{
1282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1283 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1284 {
1285 iReg |= pVCpu->iem.s.uRexB;
1286 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1287 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1288 }
1289
1290 switch (pVCpu->iem.s.enmEffOpSize)
1291 {
1292 case IEMMODE_16BIT:
1293 IEM_MC_BEGIN(0, 1);
1294 IEM_MC_LOCAL(uint16_t, u16Value);
1295 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1296 IEM_MC_PUSH_U16(u16Value);
1297 IEM_MC_ADVANCE_RIP();
1298 IEM_MC_END();
1299 break;
1300
1301 case IEMMODE_32BIT:
1302 IEM_MC_BEGIN(0, 1);
1303 IEM_MC_LOCAL(uint32_t, u32Value);
1304 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1305 IEM_MC_PUSH_U32(u32Value);
1306 IEM_MC_ADVANCE_RIP();
1307 IEM_MC_END();
1308 break;
1309
1310 case IEMMODE_64BIT:
1311 IEM_MC_BEGIN(0, 1);
1312 IEM_MC_LOCAL(uint64_t, u64Value);
1313 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1314 IEM_MC_PUSH_U64(u64Value);
1315 IEM_MC_ADVANCE_RIP();
1316 IEM_MC_END();
1317 break;
1318 }
1319
1320 return VINF_SUCCESS;
1321}
1322
1323
1324/**
1325 * @opcode 0x50
1326 */
1327FNIEMOP_DEF(iemOp_push_eAX)
1328{
1329 IEMOP_MNEMONIC(push_rAX, "push rAX");
1330 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1331}
1332
1333
1334/**
1335 * @opcode 0x51
1336 */
1337FNIEMOP_DEF(iemOp_push_eCX)
1338{
1339 IEMOP_MNEMONIC(push_rCX, "push rCX");
1340 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1341}
1342
1343
1344/**
1345 * @opcode 0x52
1346 */
1347FNIEMOP_DEF(iemOp_push_eDX)
1348{
1349 IEMOP_MNEMONIC(push_rDX, "push rDX");
1350 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1351}
1352
1353
1354/**
1355 * @opcode 0x53
1356 */
1357FNIEMOP_DEF(iemOp_push_eBX)
1358{
1359 IEMOP_MNEMONIC(push_rBX, "push rBX");
1360 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1361}
1362
1363
1364/**
1365 * @opcode 0x54
1366 */
1367FNIEMOP_DEF(iemOp_push_eSP)
1368{
1369 IEMOP_MNEMONIC(push_rSP, "push rSP");
1370 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1371 {
1372 IEM_MC_BEGIN(0, 1);
1373 IEM_MC_LOCAL(uint16_t, u16Value);
1374 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1375 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1376 IEM_MC_PUSH_U16(u16Value);
1377 IEM_MC_ADVANCE_RIP();
1378 IEM_MC_END();
1379 }
1380 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1381}
1382
1383
1384/**
1385 * @opcode 0x55
1386 */
1387FNIEMOP_DEF(iemOp_push_eBP)
1388{
1389 IEMOP_MNEMONIC(push_rBP, "push rBP");
1390 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1391}
1392
1393
1394/**
1395 * @opcode 0x56
1396 */
1397FNIEMOP_DEF(iemOp_push_eSI)
1398{
1399 IEMOP_MNEMONIC(push_rSI, "push rSI");
1400 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1401}
1402
1403
1404/**
1405 * @opcode 0x57
1406 */
1407FNIEMOP_DEF(iemOp_push_eDI)
1408{
1409 IEMOP_MNEMONIC(push_rDI, "push rDI");
1410 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1411}
1412
1413
1414/**
1415 * Common 'pop register' helper.
1416 */
1417FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1418{
1419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1420 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1421 {
1422 iReg |= pVCpu->iem.s.uRexB;
1423 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1424 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1425 }
1426
1427 switch (pVCpu->iem.s.enmEffOpSize)
1428 {
1429 case IEMMODE_16BIT:
1430 IEM_MC_BEGIN(0, 1);
1431 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1432 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1433 IEM_MC_POP_U16(pu16Dst);
1434 IEM_MC_ADVANCE_RIP();
1435 IEM_MC_END();
1436 break;
1437
1438 case IEMMODE_32BIT:
1439 IEM_MC_BEGIN(0, 1);
1440 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1441 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1442 IEM_MC_POP_U32(pu32Dst);
1443 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1444 IEM_MC_ADVANCE_RIP();
1445 IEM_MC_END();
1446 break;
1447
1448 case IEMMODE_64BIT:
1449 IEM_MC_BEGIN(0, 1);
1450 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1451 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1452 IEM_MC_POP_U64(pu64Dst);
1453 IEM_MC_ADVANCE_RIP();
1454 IEM_MC_END();
1455 break;
1456 }
1457
1458 return VINF_SUCCESS;
1459}
1460
1461
1462/**
1463 * @opcode 0x58
1464 */
1465FNIEMOP_DEF(iemOp_pop_eAX)
1466{
1467 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1468 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1469}
1470
1471
1472/**
1473 * @opcode 0x59
1474 */
1475FNIEMOP_DEF(iemOp_pop_eCX)
1476{
1477 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1478 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1479}
1480
1481
1482/**
1483 * @opcode 0x5a
1484 */
1485FNIEMOP_DEF(iemOp_pop_eDX)
1486{
1487 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1488 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1489}
1490
1491
1492/**
1493 * @opcode 0x5b
1494 */
1495FNIEMOP_DEF(iemOp_pop_eBX)
1496{
1497 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1498 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1499}
1500
1501
1502/**
1503 * @opcode 0x5c
1504 */
1505FNIEMOP_DEF(iemOp_pop_eSP)
1506{
1507 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1508 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1509 {
1510 if (pVCpu->iem.s.uRexB)
1511 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1512 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1513 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1514 }
1515
1516 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1517 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1518 /** @todo add testcase for this instruction. */
1519 switch (pVCpu->iem.s.enmEffOpSize)
1520 {
1521 case IEMMODE_16BIT:
1522 IEM_MC_BEGIN(0, 1);
1523 IEM_MC_LOCAL(uint16_t, u16Dst);
1524 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1525 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1526 IEM_MC_ADVANCE_RIP();
1527 IEM_MC_END();
1528 break;
1529
1530 case IEMMODE_32BIT:
1531 IEM_MC_BEGIN(0, 1);
1532 IEM_MC_LOCAL(uint32_t, u32Dst);
1533 IEM_MC_POP_U32(&u32Dst);
1534 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1535 IEM_MC_ADVANCE_RIP();
1536 IEM_MC_END();
1537 break;
1538
1539 case IEMMODE_64BIT:
1540 IEM_MC_BEGIN(0, 1);
1541 IEM_MC_LOCAL(uint64_t, u64Dst);
1542 IEM_MC_POP_U64(&u64Dst);
1543 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1544 IEM_MC_ADVANCE_RIP();
1545 IEM_MC_END();
1546 break;
1547 }
1548
1549 return VINF_SUCCESS;
1550}
1551
1552
1553/**
1554 * @opcode 0x5d
1555 */
1556FNIEMOP_DEF(iemOp_pop_eBP)
1557{
1558 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1559 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1560}
1561
1562
1563/**
1564 * @opcode 0x5e
1565 */
1566FNIEMOP_DEF(iemOp_pop_eSI)
1567{
1568 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1569 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1570}
1571
1572
1573/**
1574 * @opcode 0x5f
1575 */
1576FNIEMOP_DEF(iemOp_pop_eDI)
1577{
1578 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1579 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1580}
1581
1582
1583/**
1584 * @opcode 0x60
1585 */
1586FNIEMOP_DEF(iemOp_pusha)
1587{
1588 IEMOP_MNEMONIC(pusha, "pusha");
1589 IEMOP_HLP_MIN_186();
1590 IEMOP_HLP_NO_64BIT();
1591 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1592 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1593 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1594 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1595}
1596
1597
1598/**
1599 * @opcode 0x61
1600 */
1601FNIEMOP_DEF(iemOp_popa__mvex)
1602{
1603 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1604 {
1605 IEMOP_MNEMONIC(popa, "popa");
1606 IEMOP_HLP_MIN_186();
1607 IEMOP_HLP_NO_64BIT();
1608 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1609 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1610 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1611 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1612 }
1613 IEMOP_MNEMONIC(mvex, "mvex");
1614 Log(("mvex prefix is not supported!\n"));
1615 return IEMOP_RAISE_INVALID_OPCODE();
1616}
1617
1618
1619/**
1620 * @opcode 0x62
1621 * @opmnemonic bound
1622 * @op1 Gv
1623 * @op2 Ma
1624 * @opmincpu 80186
1625 * @ophints harmless invalid_64
1626 */
1627FNIEMOP_STUB(iemOp_bound_Gv_Ma__evex);
1628// IEMOP_HLP_MIN_186();
1629
1630
1631/** Opcode 0x63 - non-64-bit modes. */
1632FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1633{
1634 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1635 IEMOP_HLP_MIN_286();
1636 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1638
1639 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1640 {
1641 /* Register */
1642 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1643 IEM_MC_BEGIN(3, 0);
1644 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1645 IEM_MC_ARG(uint16_t, u16Src, 1);
1646 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1647
1648 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1649 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1650 IEM_MC_REF_EFLAGS(pEFlags);
1651 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1652
1653 IEM_MC_ADVANCE_RIP();
1654 IEM_MC_END();
1655 }
1656 else
1657 {
1658 /* Memory */
1659 IEM_MC_BEGIN(3, 2);
1660 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1661 IEM_MC_ARG(uint16_t, u16Src, 1);
1662 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1666 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1667 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1668 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1669 IEM_MC_FETCH_EFLAGS(EFlags);
1670 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1671
1672 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
1673 IEM_MC_COMMIT_EFLAGS(EFlags);
1674 IEM_MC_ADVANCE_RIP();
1675 IEM_MC_END();
1676 }
1677 return VINF_SUCCESS;
1678
1679}
1680
1681
1682/**
1683 * @opcode 0x63
1684 *
1685 * @note This is a weird one. It works like a regular move instruction if
1686 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
1687 * @todo This definitely needs a testcase to verify the odd cases. */
1688FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
1689{
1690 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
1691
1692 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
1693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1694
1695 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1696 {
1697 /*
1698 * Register to register.
1699 */
1700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1701 IEM_MC_BEGIN(0, 1);
1702 IEM_MC_LOCAL(uint64_t, u64Value);
1703 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1704 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1705 IEM_MC_ADVANCE_RIP();
1706 IEM_MC_END();
1707 }
1708 else
1709 {
1710 /*
1711 * We're loading a register from memory.
1712 */
1713 IEM_MC_BEGIN(0, 2);
1714 IEM_MC_LOCAL(uint64_t, u64Value);
1715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1718 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1719 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 }
1723 return VINF_SUCCESS;
1724}
1725
1726
1727/**
1728 * @opcode 0x64
1729 * @opmnemonic segfs
1730 * @opmincpu 80386
1731 * @opgroup op_prefixes
1732 */
1733FNIEMOP_DEF(iemOp_seg_FS)
1734{
1735 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
1736 IEMOP_HLP_MIN_386();
1737
1738 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
1739 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
1740
1741 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1742 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1743}
1744
1745
1746/**
1747 * @opcode 0x65
1748 * @opmnemonic seggs
1749 * @opmincpu 80386
1750 * @opgroup op_prefixes
1751 */
1752FNIEMOP_DEF(iemOp_seg_GS)
1753{
1754 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
1755 IEMOP_HLP_MIN_386();
1756
1757 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
1758 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
1759
1760 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1761 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1762}
1763
1764
1765/**
1766 * @opcode 0x66
1767 * @opmnemonic opsize
1768 * @openc prefix
1769 * @opmincpu 80386
1770 * @ophints harmless
1771 * @opgroup op_prefixes
1772 */
1773FNIEMOP_DEF(iemOp_op_size)
1774{
1775 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
1776 IEMOP_HLP_MIN_386();
1777
1778 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
1779 iemRecalEffOpSize(pVCpu);
1780
1781 /* For the 4 entry opcode tables, the operand prefix doesn't not count
1782 when REPZ or REPNZ are present. */
1783 if (pVCpu->iem.s.idxPrefix == 0)
1784 pVCpu->iem.s.idxPrefix = 1;
1785
1786 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1787 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1788}
1789
1790
1791/**
1792 * @opcode 0x67
1793 * @opmnemonic addrsize
1794 * @openc prefix
1795 * @opmincpu 80386
1796 * @ophints harmless
1797 * @opgroup op_prefixes
1798 */
1799FNIEMOP_DEF(iemOp_addr_size)
1800{
1801 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
1802 IEMOP_HLP_MIN_386();
1803
1804 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
1805 switch (pVCpu->iem.s.enmDefAddrMode)
1806 {
1807 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1808 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
1809 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1810 default: AssertFailed();
1811 }
1812
1813 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1814 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1815}
1816
1817
1818/**
1819 * @opcode 0x68
1820 */
1821FNIEMOP_DEF(iemOp_push_Iz)
1822{
1823 IEMOP_MNEMONIC(push_Iz, "push Iz");
1824 IEMOP_HLP_MIN_186();
1825 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1826 switch (pVCpu->iem.s.enmEffOpSize)
1827 {
1828 case IEMMODE_16BIT:
1829 {
1830 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1832 IEM_MC_BEGIN(0,0);
1833 IEM_MC_PUSH_U16(u16Imm);
1834 IEM_MC_ADVANCE_RIP();
1835 IEM_MC_END();
1836 return VINF_SUCCESS;
1837 }
1838
1839 case IEMMODE_32BIT:
1840 {
1841 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1843 IEM_MC_BEGIN(0,0);
1844 IEM_MC_PUSH_U32(u32Imm);
1845 IEM_MC_ADVANCE_RIP();
1846 IEM_MC_END();
1847 return VINF_SUCCESS;
1848 }
1849
1850 case IEMMODE_64BIT:
1851 {
1852 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1854 IEM_MC_BEGIN(0,0);
1855 IEM_MC_PUSH_U64(u64Imm);
1856 IEM_MC_ADVANCE_RIP();
1857 IEM_MC_END();
1858 return VINF_SUCCESS;
1859 }
1860
1861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1862 }
1863}
1864
1865
1866/**
1867 * @opcode 0x69
1868 */
1869FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
1870{
1871 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
1872 IEMOP_HLP_MIN_186();
1873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1875
1876 switch (pVCpu->iem.s.enmEffOpSize)
1877 {
1878 case IEMMODE_16BIT:
1879 {
1880 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1881 {
1882 /* register operand */
1883 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1885
1886 IEM_MC_BEGIN(3, 1);
1887 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1888 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
1889 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1890 IEM_MC_LOCAL(uint16_t, u16Tmp);
1891
1892 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1893 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1894 IEM_MC_REF_EFLAGS(pEFlags);
1895 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1896 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1897
1898 IEM_MC_ADVANCE_RIP();
1899 IEM_MC_END();
1900 }
1901 else
1902 {
1903 /* memory operand */
1904 IEM_MC_BEGIN(3, 2);
1905 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1906 IEM_MC_ARG(uint16_t, u16Src, 1);
1907 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1908 IEM_MC_LOCAL(uint16_t, u16Tmp);
1909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1910
1911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
1912 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1913 IEM_MC_ASSIGN(u16Src, u16Imm);
1914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1915 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1916 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1917 IEM_MC_REF_EFLAGS(pEFlags);
1918 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1919 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1920
1921 IEM_MC_ADVANCE_RIP();
1922 IEM_MC_END();
1923 }
1924 return VINF_SUCCESS;
1925 }
1926
1927 case IEMMODE_32BIT:
1928 {
1929 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1930 {
1931 /* register operand */
1932 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1934
1935 IEM_MC_BEGIN(3, 1);
1936 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1937 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
1938 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1939 IEM_MC_LOCAL(uint32_t, u32Tmp);
1940
1941 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1942 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1943 IEM_MC_REF_EFLAGS(pEFlags);
1944 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1945 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1946
1947 IEM_MC_ADVANCE_RIP();
1948 IEM_MC_END();
1949 }
1950 else
1951 {
1952 /* memory operand */
1953 IEM_MC_BEGIN(3, 2);
1954 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1955 IEM_MC_ARG(uint32_t, u32Src, 1);
1956 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1957 IEM_MC_LOCAL(uint32_t, u32Tmp);
1958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1959
1960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
1961 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1962 IEM_MC_ASSIGN(u32Src, u32Imm);
1963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1964 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1965 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1966 IEM_MC_REF_EFLAGS(pEFlags);
1967 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1968 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1969
1970 IEM_MC_ADVANCE_RIP();
1971 IEM_MC_END();
1972 }
1973 return VINF_SUCCESS;
1974 }
1975
1976 case IEMMODE_64BIT:
1977 {
1978 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1979 {
1980 /* register operand */
1981 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1983
1984 IEM_MC_BEGIN(3, 1);
1985 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1986 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
1987 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1988 IEM_MC_LOCAL(uint64_t, u64Tmp);
1989
1990 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1991 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1992 IEM_MC_REF_EFLAGS(pEFlags);
1993 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1994 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1995
1996 IEM_MC_ADVANCE_RIP();
1997 IEM_MC_END();
1998 }
1999 else
2000 {
2001 /* memory operand */
2002 IEM_MC_BEGIN(3, 2);
2003 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2004 IEM_MC_ARG(uint64_t, u64Src, 1);
2005 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2006 IEM_MC_LOCAL(uint64_t, u64Tmp);
2007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2008
2009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2010 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2011 IEM_MC_ASSIGN(u64Src, u64Imm);
2012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2013 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2014 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2015 IEM_MC_REF_EFLAGS(pEFlags);
2016 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2017 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2018
2019 IEM_MC_ADVANCE_RIP();
2020 IEM_MC_END();
2021 }
2022 return VINF_SUCCESS;
2023 }
2024 }
2025 AssertFailedReturn(VERR_IEM_IPE_9);
2026}
2027
2028
2029/**
2030 * @opcode 0x6a
2031 */
2032FNIEMOP_DEF(iemOp_push_Ib)
2033{
2034 IEMOP_MNEMONIC(push_Ib, "push Ib");
2035 IEMOP_HLP_MIN_186();
2036 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2038 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2039
2040 IEM_MC_BEGIN(0,0);
2041 switch (pVCpu->iem.s.enmEffOpSize)
2042 {
2043 case IEMMODE_16BIT:
2044 IEM_MC_PUSH_U16(i8Imm);
2045 break;
2046 case IEMMODE_32BIT:
2047 IEM_MC_PUSH_U32(i8Imm);
2048 break;
2049 case IEMMODE_64BIT:
2050 IEM_MC_PUSH_U64(i8Imm);
2051 break;
2052 }
2053 IEM_MC_ADVANCE_RIP();
2054 IEM_MC_END();
2055 return VINF_SUCCESS;
2056}
2057
2058
2059/**
2060 * @opcode 0x6b
2061 */
2062FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2063{
2064 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2065 IEMOP_HLP_MIN_186();
2066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2067 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2068
2069 switch (pVCpu->iem.s.enmEffOpSize)
2070 {
2071 case IEMMODE_16BIT:
2072 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2073 {
2074 /* register operand */
2075 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2077
2078 IEM_MC_BEGIN(3, 1);
2079 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2080 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2081 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2082 IEM_MC_LOCAL(uint16_t, u16Tmp);
2083
2084 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2085 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2086 IEM_MC_REF_EFLAGS(pEFlags);
2087 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2088 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2089
2090 IEM_MC_ADVANCE_RIP();
2091 IEM_MC_END();
2092 }
2093 else
2094 {
2095 /* memory operand */
2096 IEM_MC_BEGIN(3, 2);
2097 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2098 IEM_MC_ARG(uint16_t, u16Src, 1);
2099 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2100 IEM_MC_LOCAL(uint16_t, u16Tmp);
2101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2102
2103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2104 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2105 IEM_MC_ASSIGN(u16Src, u16Imm);
2106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2107 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2108 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2109 IEM_MC_REF_EFLAGS(pEFlags);
2110 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2111 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2112
2113 IEM_MC_ADVANCE_RIP();
2114 IEM_MC_END();
2115 }
2116 return VINF_SUCCESS;
2117
2118 case IEMMODE_32BIT:
2119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2120 {
2121 /* register operand */
2122 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2124
2125 IEM_MC_BEGIN(3, 1);
2126 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2127 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2128 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2129 IEM_MC_LOCAL(uint32_t, u32Tmp);
2130
2131 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2132 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2133 IEM_MC_REF_EFLAGS(pEFlags);
2134 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2135 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2136
2137 IEM_MC_ADVANCE_RIP();
2138 IEM_MC_END();
2139 }
2140 else
2141 {
2142 /* memory operand */
2143 IEM_MC_BEGIN(3, 2);
2144 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2145 IEM_MC_ARG(uint32_t, u32Src, 1);
2146 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2147 IEM_MC_LOCAL(uint32_t, u32Tmp);
2148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2149
2150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2151 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2152 IEM_MC_ASSIGN(u32Src, u32Imm);
2153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2154 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2155 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2156 IEM_MC_REF_EFLAGS(pEFlags);
2157 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2158 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2159
2160 IEM_MC_ADVANCE_RIP();
2161 IEM_MC_END();
2162 }
2163 return VINF_SUCCESS;
2164
2165 case IEMMODE_64BIT:
2166 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2167 {
2168 /* register operand */
2169 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2171
2172 IEM_MC_BEGIN(3, 1);
2173 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2174 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2175 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2176 IEM_MC_LOCAL(uint64_t, u64Tmp);
2177
2178 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2179 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2180 IEM_MC_REF_EFLAGS(pEFlags);
2181 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2182 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2183
2184 IEM_MC_ADVANCE_RIP();
2185 IEM_MC_END();
2186 }
2187 else
2188 {
2189 /* memory operand */
2190 IEM_MC_BEGIN(3, 2);
2191 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2192 IEM_MC_ARG(uint64_t, u64Src, 1);
2193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2194 IEM_MC_LOCAL(uint64_t, u64Tmp);
2195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2196
2197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2198 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2199 IEM_MC_ASSIGN(u64Src, u64Imm);
2200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2201 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2202 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2203 IEM_MC_REF_EFLAGS(pEFlags);
2204 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2205 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2206
2207 IEM_MC_ADVANCE_RIP();
2208 IEM_MC_END();
2209 }
2210 return VINF_SUCCESS;
2211 }
2212 AssertFailedReturn(VERR_IEM_IPE_8);
2213}
2214
2215
2216/**
2217 * @opcode 0x6c
2218 */
2219FNIEMOP_DEF(iemOp_insb_Yb_DX)
2220{
2221 IEMOP_HLP_MIN_186();
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2224 {
2225 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2226 switch (pVCpu->iem.s.enmEffAddrMode)
2227 {
2228 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2229 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2230 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2232 }
2233 }
2234 else
2235 {
2236 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2237 switch (pVCpu->iem.s.enmEffAddrMode)
2238 {
2239 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2240 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2241 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2243 }
2244 }
2245}
2246
2247
2248/**
2249 * @opcode 0x6d
2250 */
2251FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2252{
2253 IEMOP_HLP_MIN_186();
2254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2255 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2256 {
2257 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2258 switch (pVCpu->iem.s.enmEffOpSize)
2259 {
2260 case IEMMODE_16BIT:
2261 switch (pVCpu->iem.s.enmEffAddrMode)
2262 {
2263 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2264 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2265 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2267 }
2268 break;
2269 case IEMMODE_64BIT:
2270 case IEMMODE_32BIT:
2271 switch (pVCpu->iem.s.enmEffAddrMode)
2272 {
2273 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2274 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2275 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2277 }
2278 break;
2279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2280 }
2281 }
2282 else
2283 {
2284 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2285 switch (pVCpu->iem.s.enmEffOpSize)
2286 {
2287 case IEMMODE_16BIT:
2288 switch (pVCpu->iem.s.enmEffAddrMode)
2289 {
2290 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2291 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2292 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2294 }
2295 break;
2296 case IEMMODE_64BIT:
2297 case IEMMODE_32BIT:
2298 switch (pVCpu->iem.s.enmEffAddrMode)
2299 {
2300 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2301 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2302 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2304 }
2305 break;
2306 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2307 }
2308 }
2309}
2310
2311
2312/**
2313 * @opcode 0x6e
2314 */
2315FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2316{
2317 IEMOP_HLP_MIN_186();
2318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2319 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2320 {
2321 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2322 switch (pVCpu->iem.s.enmEffAddrMode)
2323 {
2324 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2325 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2326 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2328 }
2329 }
2330 else
2331 {
2332 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2333 switch (pVCpu->iem.s.enmEffAddrMode)
2334 {
2335 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2336 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2337 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2339 }
2340 }
2341}
2342
2343
2344/**
2345 * @opcode 0x6f
2346 */
2347FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2348{
2349 IEMOP_HLP_MIN_186();
2350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2351 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2352 {
2353 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2354 switch (pVCpu->iem.s.enmEffOpSize)
2355 {
2356 case IEMMODE_16BIT:
2357 switch (pVCpu->iem.s.enmEffAddrMode)
2358 {
2359 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2360 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2361 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2363 }
2364 break;
2365 case IEMMODE_64BIT:
2366 case IEMMODE_32BIT:
2367 switch (pVCpu->iem.s.enmEffAddrMode)
2368 {
2369 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2370 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2371 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2373 }
2374 break;
2375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2376 }
2377 }
2378 else
2379 {
2380 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2381 switch (pVCpu->iem.s.enmEffOpSize)
2382 {
2383 case IEMMODE_16BIT:
2384 switch (pVCpu->iem.s.enmEffAddrMode)
2385 {
2386 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2387 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2388 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2390 }
2391 break;
2392 case IEMMODE_64BIT:
2393 case IEMMODE_32BIT:
2394 switch (pVCpu->iem.s.enmEffAddrMode)
2395 {
2396 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2397 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2398 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2400 }
2401 break;
2402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2403 }
2404 }
2405}
2406
2407
2408/**
2409 * @opcode 0x70
2410 */
2411FNIEMOP_DEF(iemOp_jo_Jb)
2412{
2413 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2414 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2416 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2417
2418 IEM_MC_BEGIN(0, 0);
2419 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2420 IEM_MC_REL_JMP_S8(i8Imm);
2421 } IEM_MC_ELSE() {
2422 IEM_MC_ADVANCE_RIP();
2423 } IEM_MC_ENDIF();
2424 IEM_MC_END();
2425 return VINF_SUCCESS;
2426}
2427
2428
2429/**
2430 * @opcode 0x71
2431 */
2432FNIEMOP_DEF(iemOp_jno_Jb)
2433{
2434 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2435 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2437 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2438
2439 IEM_MC_BEGIN(0, 0);
2440 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2441 IEM_MC_ADVANCE_RIP();
2442 } IEM_MC_ELSE() {
2443 IEM_MC_REL_JMP_S8(i8Imm);
2444 } IEM_MC_ENDIF();
2445 IEM_MC_END();
2446 return VINF_SUCCESS;
2447}
2448
2449/**
2450 * @opcode 0x72
2451 */
2452FNIEMOP_DEF(iemOp_jc_Jb)
2453{
2454 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2455 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2457 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2458
2459 IEM_MC_BEGIN(0, 0);
2460 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2461 IEM_MC_REL_JMP_S8(i8Imm);
2462 } IEM_MC_ELSE() {
2463 IEM_MC_ADVANCE_RIP();
2464 } IEM_MC_ENDIF();
2465 IEM_MC_END();
2466 return VINF_SUCCESS;
2467}
2468
2469
2470/**
2471 * @opcode 0x73
2472 */
2473FNIEMOP_DEF(iemOp_jnc_Jb)
2474{
2475 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2476 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2478 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2479
2480 IEM_MC_BEGIN(0, 0);
2481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2482 IEM_MC_ADVANCE_RIP();
2483 } IEM_MC_ELSE() {
2484 IEM_MC_REL_JMP_S8(i8Imm);
2485 } IEM_MC_ENDIF();
2486 IEM_MC_END();
2487 return VINF_SUCCESS;
2488}
2489
2490
2491/**
2492 * @opcode 0x74
2493 */
2494FNIEMOP_DEF(iemOp_je_Jb)
2495{
2496 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2497 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2499 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2500
2501 IEM_MC_BEGIN(0, 0);
2502 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2503 IEM_MC_REL_JMP_S8(i8Imm);
2504 } IEM_MC_ELSE() {
2505 IEM_MC_ADVANCE_RIP();
2506 } IEM_MC_ENDIF();
2507 IEM_MC_END();
2508 return VINF_SUCCESS;
2509}
2510
2511
2512/**
2513 * @opcode 0x75
2514 */
2515FNIEMOP_DEF(iemOp_jne_Jb)
2516{
2517 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2518 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2520 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2521
2522 IEM_MC_BEGIN(0, 0);
2523 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2524 IEM_MC_ADVANCE_RIP();
2525 } IEM_MC_ELSE() {
2526 IEM_MC_REL_JMP_S8(i8Imm);
2527 } IEM_MC_ENDIF();
2528 IEM_MC_END();
2529 return VINF_SUCCESS;
2530}
2531
2532
2533/**
2534 * @opcode 0x76
2535 */
2536FNIEMOP_DEF(iemOp_jbe_Jb)
2537{
2538 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2539 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2541 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2542
2543 IEM_MC_BEGIN(0, 0);
2544 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2545 IEM_MC_REL_JMP_S8(i8Imm);
2546 } IEM_MC_ELSE() {
2547 IEM_MC_ADVANCE_RIP();
2548 } IEM_MC_ENDIF();
2549 IEM_MC_END();
2550 return VINF_SUCCESS;
2551}
2552
2553
2554/**
2555 * @opcode 0x77
2556 */
2557FNIEMOP_DEF(iemOp_jnbe_Jb)
2558{
2559 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2560 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2562 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2563
2564 IEM_MC_BEGIN(0, 0);
2565 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2566 IEM_MC_ADVANCE_RIP();
2567 } IEM_MC_ELSE() {
2568 IEM_MC_REL_JMP_S8(i8Imm);
2569 } IEM_MC_ENDIF();
2570 IEM_MC_END();
2571 return VINF_SUCCESS;
2572}
2573
2574
2575/**
2576 * @opcode 0x78
2577 */
2578FNIEMOP_DEF(iemOp_js_Jb)
2579{
2580 IEMOP_MNEMONIC(js_Jb, "js Jb");
2581 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2583 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2584
2585 IEM_MC_BEGIN(0, 0);
2586 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2587 IEM_MC_REL_JMP_S8(i8Imm);
2588 } IEM_MC_ELSE() {
2589 IEM_MC_ADVANCE_RIP();
2590 } IEM_MC_ENDIF();
2591 IEM_MC_END();
2592 return VINF_SUCCESS;
2593}
2594
2595
2596/**
2597 * @opcode 0x79
2598 */
2599FNIEMOP_DEF(iemOp_jns_Jb)
2600{
2601 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2602 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2604 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2605
2606 IEM_MC_BEGIN(0, 0);
2607 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2608 IEM_MC_ADVANCE_RIP();
2609 } IEM_MC_ELSE() {
2610 IEM_MC_REL_JMP_S8(i8Imm);
2611 } IEM_MC_ENDIF();
2612 IEM_MC_END();
2613 return VINF_SUCCESS;
2614}
2615
2616
2617/**
2618 * @opcode 0x7a
2619 */
2620FNIEMOP_DEF(iemOp_jp_Jb)
2621{
2622 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2623 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2625 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2626
2627 IEM_MC_BEGIN(0, 0);
2628 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2629 IEM_MC_REL_JMP_S8(i8Imm);
2630 } IEM_MC_ELSE() {
2631 IEM_MC_ADVANCE_RIP();
2632 } IEM_MC_ENDIF();
2633 IEM_MC_END();
2634 return VINF_SUCCESS;
2635}
2636
2637
2638/**
2639 * @opcode 0x7b
2640 */
2641FNIEMOP_DEF(iemOp_jnp_Jb)
2642{
2643 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2644 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2646 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2647
2648 IEM_MC_BEGIN(0, 0);
2649 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2650 IEM_MC_ADVANCE_RIP();
2651 } IEM_MC_ELSE() {
2652 IEM_MC_REL_JMP_S8(i8Imm);
2653 } IEM_MC_ENDIF();
2654 IEM_MC_END();
2655 return VINF_SUCCESS;
2656}
2657
2658
2659/**
2660 * @opcode 0x7c
2661 */
2662FNIEMOP_DEF(iemOp_jl_Jb)
2663{
2664 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2665 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2667 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2668
2669 IEM_MC_BEGIN(0, 0);
2670 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2671 IEM_MC_REL_JMP_S8(i8Imm);
2672 } IEM_MC_ELSE() {
2673 IEM_MC_ADVANCE_RIP();
2674 } IEM_MC_ENDIF();
2675 IEM_MC_END();
2676 return VINF_SUCCESS;
2677}
2678
2679
2680/**
2681 * @opcode 0x7d
2682 */
2683FNIEMOP_DEF(iemOp_jnl_Jb)
2684{
2685 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
2686 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2688 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2689
2690 IEM_MC_BEGIN(0, 0);
2691 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2692 IEM_MC_ADVANCE_RIP();
2693 } IEM_MC_ELSE() {
2694 IEM_MC_REL_JMP_S8(i8Imm);
2695 } IEM_MC_ENDIF();
2696 IEM_MC_END();
2697 return VINF_SUCCESS;
2698}
2699
2700
2701/**
2702 * @opcode 0x7e
2703 */
2704FNIEMOP_DEF(iemOp_jle_Jb)
2705{
2706 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
2707 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2710
2711 IEM_MC_BEGIN(0, 0);
2712 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2713 IEM_MC_REL_JMP_S8(i8Imm);
2714 } IEM_MC_ELSE() {
2715 IEM_MC_ADVANCE_RIP();
2716 } IEM_MC_ENDIF();
2717 IEM_MC_END();
2718 return VINF_SUCCESS;
2719}
2720
2721
2722/**
2723 * @opcode 0x7f
2724 */
2725FNIEMOP_DEF(iemOp_jnle_Jb)
2726{
2727 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
2728 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2730 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2731
2732 IEM_MC_BEGIN(0, 0);
2733 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2734 IEM_MC_ADVANCE_RIP();
2735 } IEM_MC_ELSE() {
2736 IEM_MC_REL_JMP_S8(i8Imm);
2737 } IEM_MC_ENDIF();
2738 IEM_MC_END();
2739 return VINF_SUCCESS;
2740}
2741
2742
2743/**
2744 * @opcode 0x80
2745 */
2746FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
2747{
2748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2749 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2750 {
2751 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
2752 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
2753 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
2754 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
2755 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
2756 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
2757 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
2758 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
2759 }
2760 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2761
2762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2763 {
2764 /* register target */
2765 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2767 IEM_MC_BEGIN(3, 0);
2768 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2769 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2770 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2771
2772 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2773 IEM_MC_REF_EFLAGS(pEFlags);
2774 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2775
2776 IEM_MC_ADVANCE_RIP();
2777 IEM_MC_END();
2778 }
2779 else
2780 {
2781 /* memory target */
2782 uint32_t fAccess;
2783 if (pImpl->pfnLockedU8)
2784 fAccess = IEM_ACCESS_DATA_RW;
2785 else /* CMP */
2786 fAccess = IEM_ACCESS_DATA_R;
2787 IEM_MC_BEGIN(3, 2);
2788 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2789 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2791
2792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2793 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2794 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2795 if (pImpl->pfnLockedU8)
2796 IEMOP_HLP_DONE_DECODING();
2797 else
2798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2799
2800 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2801 IEM_MC_FETCH_EFLAGS(EFlags);
2802 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2803 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2804 else
2805 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
2806
2807 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
2808 IEM_MC_COMMIT_EFLAGS(EFlags);
2809 IEM_MC_ADVANCE_RIP();
2810 IEM_MC_END();
2811 }
2812 return VINF_SUCCESS;
2813}
2814
2815
2816/**
2817 * @opcode 0x81
2818 */
2819FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
2820{
2821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2822 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2823 {
2824 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
2825 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
2826 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
2827 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
2828 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
2829 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
2830 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
2831 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
2832 }
2833 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2834
2835 switch (pVCpu->iem.s.enmEffOpSize)
2836 {
2837 case IEMMODE_16BIT:
2838 {
2839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2840 {
2841 /* register target */
2842 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2844 IEM_MC_BEGIN(3, 0);
2845 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2846 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
2847 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2848
2849 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2850 IEM_MC_REF_EFLAGS(pEFlags);
2851 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2852
2853 IEM_MC_ADVANCE_RIP();
2854 IEM_MC_END();
2855 }
2856 else
2857 {
2858 /* memory target */
2859 uint32_t fAccess;
2860 if (pImpl->pfnLockedU16)
2861 fAccess = IEM_ACCESS_DATA_RW;
2862 else /* CMP, TEST */
2863 fAccess = IEM_ACCESS_DATA_R;
2864 IEM_MC_BEGIN(3, 2);
2865 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2866 IEM_MC_ARG(uint16_t, u16Src, 1);
2867 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2869
2870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2871 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2872 IEM_MC_ASSIGN(u16Src, u16Imm);
2873 if (pImpl->pfnLockedU16)
2874 IEMOP_HLP_DONE_DECODING();
2875 else
2876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2877 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2878 IEM_MC_FETCH_EFLAGS(EFlags);
2879 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2880 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2881 else
2882 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2883
2884 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2885 IEM_MC_COMMIT_EFLAGS(EFlags);
2886 IEM_MC_ADVANCE_RIP();
2887 IEM_MC_END();
2888 }
2889 break;
2890 }
2891
2892 case IEMMODE_32BIT:
2893 {
2894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2895 {
2896 /* register target */
2897 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2899 IEM_MC_BEGIN(3, 0);
2900 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2901 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
2902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2903
2904 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2905 IEM_MC_REF_EFLAGS(pEFlags);
2906 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2907 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
2908
2909 IEM_MC_ADVANCE_RIP();
2910 IEM_MC_END();
2911 }
2912 else
2913 {
2914 /* memory target */
2915 uint32_t fAccess;
2916 if (pImpl->pfnLockedU32)
2917 fAccess = IEM_ACCESS_DATA_RW;
2918 else /* CMP, TEST */
2919 fAccess = IEM_ACCESS_DATA_R;
2920 IEM_MC_BEGIN(3, 2);
2921 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2922 IEM_MC_ARG(uint32_t, u32Src, 1);
2923 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2925
2926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2927 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2928 IEM_MC_ASSIGN(u32Src, u32Imm);
2929 if (pImpl->pfnLockedU32)
2930 IEMOP_HLP_DONE_DECODING();
2931 else
2932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2933 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2934 IEM_MC_FETCH_EFLAGS(EFlags);
2935 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2936 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2937 else
2938 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
2939
2940 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
2941 IEM_MC_COMMIT_EFLAGS(EFlags);
2942 IEM_MC_ADVANCE_RIP();
2943 IEM_MC_END();
2944 }
2945 break;
2946 }
2947
2948 case IEMMODE_64BIT:
2949 {
2950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2951 {
2952 /* register target */
2953 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2955 IEM_MC_BEGIN(3, 0);
2956 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2957 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
2958 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2959
2960 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2961 IEM_MC_REF_EFLAGS(pEFlags);
2962 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2963
2964 IEM_MC_ADVANCE_RIP();
2965 IEM_MC_END();
2966 }
2967 else
2968 {
2969 /* memory target */
2970 uint32_t fAccess;
2971 if (pImpl->pfnLockedU64)
2972 fAccess = IEM_ACCESS_DATA_RW;
2973 else /* CMP */
2974 fAccess = IEM_ACCESS_DATA_R;
2975 IEM_MC_BEGIN(3, 2);
2976 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2977 IEM_MC_ARG(uint64_t, u64Src, 1);
2978 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2980
2981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2982 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2983 if (pImpl->pfnLockedU64)
2984 IEMOP_HLP_DONE_DECODING();
2985 else
2986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2987 IEM_MC_ASSIGN(u64Src, u64Imm);
2988 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2989 IEM_MC_FETCH_EFLAGS(EFlags);
2990 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2991 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2992 else
2993 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
2994
2995 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
2996 IEM_MC_COMMIT_EFLAGS(EFlags);
2997 IEM_MC_ADVANCE_RIP();
2998 IEM_MC_END();
2999 }
3000 break;
3001 }
3002 }
3003 return VINF_SUCCESS;
3004}
3005
3006
3007/**
3008 * @opcode 0x82
3009 * @opmnemonic grp1_82
3010 * @opgroup op_groups
3011 */
3012FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3013{
3014 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3015 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3016}
3017
3018
3019/**
3020 * @opcode 0x83
3021 */
3022FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3023{
3024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3025 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3026 {
3027 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3028 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3029 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3030 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3031 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3032 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3033 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3034 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3035 }
3036 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3037 to the 386 even if absent in the intel reference manuals and some
3038 3rd party opcode listings. */
3039 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3040
3041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3042 {
3043 /*
3044 * Register target
3045 */
3046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3047 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3048 switch (pVCpu->iem.s.enmEffOpSize)
3049 {
3050 case IEMMODE_16BIT:
3051 {
3052 IEM_MC_BEGIN(3, 0);
3053 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3054 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3055 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3056
3057 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3058 IEM_MC_REF_EFLAGS(pEFlags);
3059 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3060
3061 IEM_MC_ADVANCE_RIP();
3062 IEM_MC_END();
3063 break;
3064 }
3065
3066 case IEMMODE_32BIT:
3067 {
3068 IEM_MC_BEGIN(3, 0);
3069 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3070 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3071 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3072
3073 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3074 IEM_MC_REF_EFLAGS(pEFlags);
3075 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3076 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3077
3078 IEM_MC_ADVANCE_RIP();
3079 IEM_MC_END();
3080 break;
3081 }
3082
3083 case IEMMODE_64BIT:
3084 {
3085 IEM_MC_BEGIN(3, 0);
3086 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3087 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3088 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3089
3090 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3091 IEM_MC_REF_EFLAGS(pEFlags);
3092 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3093
3094 IEM_MC_ADVANCE_RIP();
3095 IEM_MC_END();
3096 break;
3097 }
3098 }
3099 }
3100 else
3101 {
3102 /*
3103 * Memory target.
3104 */
3105 uint32_t fAccess;
3106 if (pImpl->pfnLockedU16)
3107 fAccess = IEM_ACCESS_DATA_RW;
3108 else /* CMP */
3109 fAccess = IEM_ACCESS_DATA_R;
3110
3111 switch (pVCpu->iem.s.enmEffOpSize)
3112 {
3113 case IEMMODE_16BIT:
3114 {
3115 IEM_MC_BEGIN(3, 2);
3116 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3117 IEM_MC_ARG(uint16_t, u16Src, 1);
3118 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3120
3121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3122 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3123 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3124 if (pImpl->pfnLockedU16)
3125 IEMOP_HLP_DONE_DECODING();
3126 else
3127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3128 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3129 IEM_MC_FETCH_EFLAGS(EFlags);
3130 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3131 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3132 else
3133 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3134
3135 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3136 IEM_MC_COMMIT_EFLAGS(EFlags);
3137 IEM_MC_ADVANCE_RIP();
3138 IEM_MC_END();
3139 break;
3140 }
3141
3142 case IEMMODE_32BIT:
3143 {
3144 IEM_MC_BEGIN(3, 2);
3145 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3146 IEM_MC_ARG(uint32_t, u32Src, 1);
3147 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3149
3150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3151 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3152 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3153 if (pImpl->pfnLockedU32)
3154 IEMOP_HLP_DONE_DECODING();
3155 else
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3158 IEM_MC_FETCH_EFLAGS(EFlags);
3159 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3160 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3161 else
3162 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3163
3164 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3165 IEM_MC_COMMIT_EFLAGS(EFlags);
3166 IEM_MC_ADVANCE_RIP();
3167 IEM_MC_END();
3168 break;
3169 }
3170
3171 case IEMMODE_64BIT:
3172 {
3173 IEM_MC_BEGIN(3, 2);
3174 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3175 IEM_MC_ARG(uint64_t, u64Src, 1);
3176 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3178
3179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3180 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3181 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3182 if (pImpl->pfnLockedU64)
3183 IEMOP_HLP_DONE_DECODING();
3184 else
3185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3186 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3187 IEM_MC_FETCH_EFLAGS(EFlags);
3188 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3189 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3190 else
3191 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3192
3193 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3194 IEM_MC_COMMIT_EFLAGS(EFlags);
3195 IEM_MC_ADVANCE_RIP();
3196 IEM_MC_END();
3197 break;
3198 }
3199 }
3200 }
3201 return VINF_SUCCESS;
3202}
3203
3204
3205/**
3206 * @opcode 0x84
3207 */
3208FNIEMOP_DEF(iemOp_test_Eb_Gb)
3209{
3210 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3211 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3212 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3213}
3214
3215
3216/**
3217 * @opcode 0x85
3218 */
3219FNIEMOP_DEF(iemOp_test_Ev_Gv)
3220{
3221 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3222 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3223 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3224}
3225
3226
3227/**
3228 * @opcode 0x86
3229 */
3230FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3231{
3232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3233 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3234
3235 /*
3236 * If rm is denoting a register, no more instruction bytes.
3237 */
3238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3239 {
3240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3241
3242 IEM_MC_BEGIN(0, 2);
3243 IEM_MC_LOCAL(uint8_t, uTmp1);
3244 IEM_MC_LOCAL(uint8_t, uTmp2);
3245
3246 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3247 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3248 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3249 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3250
3251 IEM_MC_ADVANCE_RIP();
3252 IEM_MC_END();
3253 }
3254 else
3255 {
3256 /*
3257 * We're accessing memory.
3258 */
3259/** @todo the register must be committed separately! */
3260 IEM_MC_BEGIN(2, 2);
3261 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3262 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3264
3265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3266 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3267 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3268 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3269 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3270
3271 IEM_MC_ADVANCE_RIP();
3272 IEM_MC_END();
3273 }
3274 return VINF_SUCCESS;
3275}
3276
3277
3278/**
3279 * @opcode 0x87
3280 */
3281FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3282{
3283 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3285
3286 /*
3287 * If rm is denoting a register, no more instruction bytes.
3288 */
3289 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3290 {
3291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3292
3293 switch (pVCpu->iem.s.enmEffOpSize)
3294 {
3295 case IEMMODE_16BIT:
3296 IEM_MC_BEGIN(0, 2);
3297 IEM_MC_LOCAL(uint16_t, uTmp1);
3298 IEM_MC_LOCAL(uint16_t, uTmp2);
3299
3300 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3301 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3302 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3303 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3304
3305 IEM_MC_ADVANCE_RIP();
3306 IEM_MC_END();
3307 return VINF_SUCCESS;
3308
3309 case IEMMODE_32BIT:
3310 IEM_MC_BEGIN(0, 2);
3311 IEM_MC_LOCAL(uint32_t, uTmp1);
3312 IEM_MC_LOCAL(uint32_t, uTmp2);
3313
3314 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3315 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3316 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3317 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3318
3319 IEM_MC_ADVANCE_RIP();
3320 IEM_MC_END();
3321 return VINF_SUCCESS;
3322
3323 case IEMMODE_64BIT:
3324 IEM_MC_BEGIN(0, 2);
3325 IEM_MC_LOCAL(uint64_t, uTmp1);
3326 IEM_MC_LOCAL(uint64_t, uTmp2);
3327
3328 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3329 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3330 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3331 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3332
3333 IEM_MC_ADVANCE_RIP();
3334 IEM_MC_END();
3335 return VINF_SUCCESS;
3336
3337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3338 }
3339 }
3340 else
3341 {
3342 /*
3343 * We're accessing memory.
3344 */
3345 switch (pVCpu->iem.s.enmEffOpSize)
3346 {
3347/** @todo the register must be committed separately! */
3348 case IEMMODE_16BIT:
3349 IEM_MC_BEGIN(2, 2);
3350 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3351 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3353
3354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3355 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3356 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3357 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3358 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3359
3360 IEM_MC_ADVANCE_RIP();
3361 IEM_MC_END();
3362 return VINF_SUCCESS;
3363
3364 case IEMMODE_32BIT:
3365 IEM_MC_BEGIN(2, 2);
3366 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3367 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3369
3370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3371 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3372 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3373 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3374 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3375
3376 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3377 IEM_MC_ADVANCE_RIP();
3378 IEM_MC_END();
3379 return VINF_SUCCESS;
3380
3381 case IEMMODE_64BIT:
3382 IEM_MC_BEGIN(2, 2);
3383 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3384 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3386
3387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3388 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3389 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3390 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3391 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3392
3393 IEM_MC_ADVANCE_RIP();
3394 IEM_MC_END();
3395 return VINF_SUCCESS;
3396
3397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3398 }
3399 }
3400}
3401
3402
3403/**
3404 * @opcode 0x88
3405 */
3406FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3407{
3408 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3409
3410 uint8_t bRm;
3411 IEM_OPCODE_GET_NEXT_U8(&bRm);
3412
3413 /*
3414 * If rm is denoting a register, no more instruction bytes.
3415 */
3416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3417 {
3418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3419 IEM_MC_BEGIN(0, 1);
3420 IEM_MC_LOCAL(uint8_t, u8Value);
3421 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3422 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3423 IEM_MC_ADVANCE_RIP();
3424 IEM_MC_END();
3425 }
3426 else
3427 {
3428 /*
3429 * We're writing a register to memory.
3430 */
3431 IEM_MC_BEGIN(0, 2);
3432 IEM_MC_LOCAL(uint8_t, u8Value);
3433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3436 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3437 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3438 IEM_MC_ADVANCE_RIP();
3439 IEM_MC_END();
3440 }
3441 return VINF_SUCCESS;
3442
3443}
3444
3445
3446/**
3447 * @opcode 0x89
3448 */
3449FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3450{
3451 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3452
3453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3454
3455 /*
3456 * If rm is denoting a register, no more instruction bytes.
3457 */
3458 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3459 {
3460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3461 switch (pVCpu->iem.s.enmEffOpSize)
3462 {
3463 case IEMMODE_16BIT:
3464 IEM_MC_BEGIN(0, 1);
3465 IEM_MC_LOCAL(uint16_t, u16Value);
3466 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3467 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3468 IEM_MC_ADVANCE_RIP();
3469 IEM_MC_END();
3470 break;
3471
3472 case IEMMODE_32BIT:
3473 IEM_MC_BEGIN(0, 1);
3474 IEM_MC_LOCAL(uint32_t, u32Value);
3475 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3476 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3477 IEM_MC_ADVANCE_RIP();
3478 IEM_MC_END();
3479 break;
3480
3481 case IEMMODE_64BIT:
3482 IEM_MC_BEGIN(0, 1);
3483 IEM_MC_LOCAL(uint64_t, u64Value);
3484 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3485 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3486 IEM_MC_ADVANCE_RIP();
3487 IEM_MC_END();
3488 break;
3489 }
3490 }
3491 else
3492 {
3493 /*
3494 * We're writing a register to memory.
3495 */
3496 switch (pVCpu->iem.s.enmEffOpSize)
3497 {
3498 case IEMMODE_16BIT:
3499 IEM_MC_BEGIN(0, 2);
3500 IEM_MC_LOCAL(uint16_t, u16Value);
3501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3504 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3505 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3506 IEM_MC_ADVANCE_RIP();
3507 IEM_MC_END();
3508 break;
3509
3510 case IEMMODE_32BIT:
3511 IEM_MC_BEGIN(0, 2);
3512 IEM_MC_LOCAL(uint32_t, u32Value);
3513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3516 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3517 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3518 IEM_MC_ADVANCE_RIP();
3519 IEM_MC_END();
3520 break;
3521
3522 case IEMMODE_64BIT:
3523 IEM_MC_BEGIN(0, 2);
3524 IEM_MC_LOCAL(uint64_t, u64Value);
3525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3528 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3529 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3530 IEM_MC_ADVANCE_RIP();
3531 IEM_MC_END();
3532 break;
3533 }
3534 }
3535 return VINF_SUCCESS;
3536}
3537
3538
3539/**
3540 * @opcode 0x8a
3541 */
3542FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3543{
3544 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3545
3546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3547
3548 /*
3549 * If rm is denoting a register, no more instruction bytes.
3550 */
3551 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3552 {
3553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3554 IEM_MC_BEGIN(0, 1);
3555 IEM_MC_LOCAL(uint8_t, u8Value);
3556 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3557 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3558 IEM_MC_ADVANCE_RIP();
3559 IEM_MC_END();
3560 }
3561 else
3562 {
3563 /*
3564 * We're loading a register from memory.
3565 */
3566 IEM_MC_BEGIN(0, 2);
3567 IEM_MC_LOCAL(uint8_t, u8Value);
3568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3571 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3572 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3573 IEM_MC_ADVANCE_RIP();
3574 IEM_MC_END();
3575 }
3576 return VINF_SUCCESS;
3577}
3578
3579
3580/**
3581 * @opcode 0x8b
3582 */
3583FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3584{
3585 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3586
3587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3588
3589 /*
3590 * If rm is denoting a register, no more instruction bytes.
3591 */
3592 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3593 {
3594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3595 switch (pVCpu->iem.s.enmEffOpSize)
3596 {
3597 case IEMMODE_16BIT:
3598 IEM_MC_BEGIN(0, 1);
3599 IEM_MC_LOCAL(uint16_t, u16Value);
3600 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3601 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3602 IEM_MC_ADVANCE_RIP();
3603 IEM_MC_END();
3604 break;
3605
3606 case IEMMODE_32BIT:
3607 IEM_MC_BEGIN(0, 1);
3608 IEM_MC_LOCAL(uint32_t, u32Value);
3609 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3610 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3611 IEM_MC_ADVANCE_RIP();
3612 IEM_MC_END();
3613 break;
3614
3615 case IEMMODE_64BIT:
3616 IEM_MC_BEGIN(0, 1);
3617 IEM_MC_LOCAL(uint64_t, u64Value);
3618 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3619 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3620 IEM_MC_ADVANCE_RIP();
3621 IEM_MC_END();
3622 break;
3623 }
3624 }
3625 else
3626 {
3627 /*
3628 * We're loading a register from memory.
3629 */
3630 switch (pVCpu->iem.s.enmEffOpSize)
3631 {
3632 case IEMMODE_16BIT:
3633 IEM_MC_BEGIN(0, 2);
3634 IEM_MC_LOCAL(uint16_t, u16Value);
3635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3638 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3639 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3640 IEM_MC_ADVANCE_RIP();
3641 IEM_MC_END();
3642 break;
3643
3644 case IEMMODE_32BIT:
3645 IEM_MC_BEGIN(0, 2);
3646 IEM_MC_LOCAL(uint32_t, u32Value);
3647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3650 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3651 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3652 IEM_MC_ADVANCE_RIP();
3653 IEM_MC_END();
3654 break;
3655
3656 case IEMMODE_64BIT:
3657 IEM_MC_BEGIN(0, 2);
3658 IEM_MC_LOCAL(uint64_t, u64Value);
3659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3662 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3663 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3664 IEM_MC_ADVANCE_RIP();
3665 IEM_MC_END();
3666 break;
3667 }
3668 }
3669 return VINF_SUCCESS;
3670}
3671
3672
3673/**
3674 * opcode 0x63
3675 * @todo Table fixme
3676 */
3677FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
3678{
3679 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
3680 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
3681 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
3682 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
3683 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
3684}
3685
3686
3687/**
3688 * @opcode 0x8c
3689 */
3690FNIEMOP_DEF(iemOp_mov_Ev_Sw)
3691{
3692 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
3693
3694 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3695
3696 /*
3697 * Check that the destination register exists. The REX.R prefix is ignored.
3698 */
3699 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3700 if ( iSegReg > X86_SREG_GS)
3701 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3702
3703 /*
3704 * If rm is denoting a register, no more instruction bytes.
3705 * In that case, the operand size is respected and the upper bits are
3706 * cleared (starting with some pentium).
3707 */
3708 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3709 {
3710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3711 switch (pVCpu->iem.s.enmEffOpSize)
3712 {
3713 case IEMMODE_16BIT:
3714 IEM_MC_BEGIN(0, 1);
3715 IEM_MC_LOCAL(uint16_t, u16Value);
3716 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3717 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 break;
3721
3722 case IEMMODE_32BIT:
3723 IEM_MC_BEGIN(0, 1);
3724 IEM_MC_LOCAL(uint32_t, u32Value);
3725 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
3726 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3727 IEM_MC_ADVANCE_RIP();
3728 IEM_MC_END();
3729 break;
3730
3731 case IEMMODE_64BIT:
3732 IEM_MC_BEGIN(0, 1);
3733 IEM_MC_LOCAL(uint64_t, u64Value);
3734 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
3735 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3736 IEM_MC_ADVANCE_RIP();
3737 IEM_MC_END();
3738 break;
3739 }
3740 }
3741 else
3742 {
3743 /*
3744 * We're saving the register to memory. The access is word sized
3745 * regardless of operand size prefixes.
3746 */
3747#if 0 /* not necessary */
3748 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3749#endif
3750 IEM_MC_BEGIN(0, 2);
3751 IEM_MC_LOCAL(uint16_t, u16Value);
3752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3755 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3756 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3757 IEM_MC_ADVANCE_RIP();
3758 IEM_MC_END();
3759 }
3760 return VINF_SUCCESS;
3761}
3762
3763
3764
3765
3766/**
3767 * @opcode 0x8d
3768 */
3769FNIEMOP_DEF(iemOp_lea_Gv_M)
3770{
3771 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
3772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3773 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3774 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
3775
3776 switch (pVCpu->iem.s.enmEffOpSize)
3777 {
3778 case IEMMODE_16BIT:
3779 IEM_MC_BEGIN(0, 2);
3780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3781 IEM_MC_LOCAL(uint16_t, u16Cast);
3782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3784 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
3785 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
3786 IEM_MC_ADVANCE_RIP();
3787 IEM_MC_END();
3788 return VINF_SUCCESS;
3789
3790 case IEMMODE_32BIT:
3791 IEM_MC_BEGIN(0, 2);
3792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3793 IEM_MC_LOCAL(uint32_t, u32Cast);
3794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3796 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
3797 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
3798 IEM_MC_ADVANCE_RIP();
3799 IEM_MC_END();
3800 return VINF_SUCCESS;
3801
3802 case IEMMODE_64BIT:
3803 IEM_MC_BEGIN(0, 1);
3804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3807 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
3808 IEM_MC_ADVANCE_RIP();
3809 IEM_MC_END();
3810 return VINF_SUCCESS;
3811 }
3812 AssertFailedReturn(VERR_IEM_IPE_7);
3813}
3814
3815
3816/**
3817 * @opcode 0x8e
3818 */
3819FNIEMOP_DEF(iemOp_mov_Sw_Ev)
3820{
3821 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
3822
3823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3824
3825 /*
3826 * The practical operand size is 16-bit.
3827 */
3828#if 0 /* not necessary */
3829 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3830#endif
3831
3832 /*
3833 * Check that the destination register exists and can be used with this
3834 * instruction. The REX.R prefix is ignored.
3835 */
3836 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3837 if ( iSegReg == X86_SREG_CS
3838 || iSegReg > X86_SREG_GS)
3839 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3840
3841 /*
3842 * If rm is denoting a register, no more instruction bytes.
3843 */
3844 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3845 {
3846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3847 IEM_MC_BEGIN(2, 0);
3848 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3849 IEM_MC_ARG(uint16_t, u16Value, 1);
3850 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3851 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3852 IEM_MC_END();
3853 }
3854 else
3855 {
3856 /*
3857 * We're loading the register from memory. The access is word sized
3858 * regardless of operand size prefixes.
3859 */
3860 IEM_MC_BEGIN(2, 1);
3861 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3862 IEM_MC_ARG(uint16_t, u16Value, 1);
3863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3866 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3867 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3868 IEM_MC_END();
3869 }
3870 return VINF_SUCCESS;
3871}
3872
3873
3874/** Opcode 0x8f /0. */
3875FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
3876{
3877 /* This bugger is rather annoying as it requires rSP to be updated before
3878 doing the effective address calculations. Will eventually require a
3879 split between the R/M+SIB decoding and the effective address
3880 calculation - which is something that is required for any attempt at
3881 reusing this code for a recompiler. It may also be good to have if we
3882 need to delay #UD exception caused by invalid lock prefixes.
3883
3884 For now, we'll do a mostly safe interpreter-only implementation here. */
3885 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
3886 * now until tests show it's checked.. */
3887 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
3888
3889 /* Register access is relatively easy and can share code. */
3890 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3891 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3892
3893 /*
3894 * Memory target.
3895 *
3896 * Intel says that RSP is incremented before it's used in any effective
3897 * address calcuations. This means some serious extra annoyance here since
3898 * we decode and calculate the effective address in one step and like to
3899 * delay committing registers till everything is done.
3900 *
3901 * So, we'll decode and calculate the effective address twice. This will
3902 * require some recoding if turned into a recompiler.
3903 */
3904 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
3905
3906#ifndef TST_IEM_CHECK_MC
3907 /* Calc effective address with modified ESP. */
3908/** @todo testcase */
3909 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
3910 RTGCPTR GCPtrEff;
3911 VBOXSTRICTRC rcStrict;
3912 switch (pVCpu->iem.s.enmEffOpSize)
3913 {
3914 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
3915 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
3916 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
3917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3918 }
3919 if (rcStrict != VINF_SUCCESS)
3920 return rcStrict;
3921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3922
3923 /* Perform the operation - this should be CImpl. */
3924 RTUINT64U TmpRsp;
3925 TmpRsp.u = pCtx->rsp;
3926 switch (pVCpu->iem.s.enmEffOpSize)
3927 {
3928 case IEMMODE_16BIT:
3929 {
3930 uint16_t u16Value;
3931 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
3932 if (rcStrict == VINF_SUCCESS)
3933 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
3934 break;
3935 }
3936
3937 case IEMMODE_32BIT:
3938 {
3939 uint32_t u32Value;
3940 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
3941 if (rcStrict == VINF_SUCCESS)
3942 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
3943 break;
3944 }
3945
3946 case IEMMODE_64BIT:
3947 {
3948 uint64_t u64Value;
3949 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
3950 if (rcStrict == VINF_SUCCESS)
3951 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
3952 break;
3953 }
3954
3955 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3956 }
3957 if (rcStrict == VINF_SUCCESS)
3958 {
3959 pCtx->rsp = TmpRsp.u;
3960 iemRegUpdateRipAndClearRF(pVCpu);
3961 }
3962 return rcStrict;
3963
3964#else
3965 return VERR_IEM_IPE_2;
3966#endif
3967}
3968
3969
3970/**
3971 * @opcode 0x8f
3972 */
3973FNIEMOP_DEF(iemOp_Grp1A__xop)
3974{
3975 /*
3976 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
3977 * three byte VEX prefix, except that the mmmmm field cannot have the values
3978 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
3979 */
3980 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3981 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
3982 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
3983
3984 IEMOP_MNEMONIC(xop, "xop");
3985 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
3986 {
3987 /** @todo Test when exctly the XOP conformance checks kick in during
3988 * instruction decoding and fetching (using \#PF). */
3989 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
3990 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
3991 if ( ( pVCpu->iem.s.fPrefixes
3992 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
3993 == 0)
3994 {
3995 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
3996 if (bXop2 & 0x80 /* XOP.W */)
3997 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
3998 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
3999 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
4000 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
4001 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4002 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4003 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4004
4005 /** @todo XOP: Just use new tables and decoders. */
4006 switch (bRm & 0x1f)
4007 {
4008 case 8: /* xop opcode map 8. */
4009 IEMOP_BITCH_ABOUT_STUB();
4010 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4011
4012 case 9: /* xop opcode map 9. */
4013 IEMOP_BITCH_ABOUT_STUB();
4014 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4015
4016 case 10: /* xop opcode map 10. */
4017 IEMOP_BITCH_ABOUT_STUB();
4018 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4019
4020 default:
4021 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4022 return IEMOP_RAISE_INVALID_OPCODE();
4023 }
4024 }
4025 else
4026 Log(("XOP: Invalid prefix mix!\n"));
4027 }
4028 else
4029 Log(("XOP: XOP support disabled!\n"));
4030 return IEMOP_RAISE_INVALID_OPCODE();
4031}
4032
4033
4034/**
4035 * Common 'xchg reg,rAX' helper.
4036 */
4037FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4038{
4039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4040
4041 iReg |= pVCpu->iem.s.uRexB;
4042 switch (pVCpu->iem.s.enmEffOpSize)
4043 {
4044 case IEMMODE_16BIT:
4045 IEM_MC_BEGIN(0, 2);
4046 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4047 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4048 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4049 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4050 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4051 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4052 IEM_MC_ADVANCE_RIP();
4053 IEM_MC_END();
4054 return VINF_SUCCESS;
4055
4056 case IEMMODE_32BIT:
4057 IEM_MC_BEGIN(0, 2);
4058 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4059 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4060 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4061 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4062 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4063 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4064 IEM_MC_ADVANCE_RIP();
4065 IEM_MC_END();
4066 return VINF_SUCCESS;
4067
4068 case IEMMODE_64BIT:
4069 IEM_MC_BEGIN(0, 2);
4070 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4071 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4072 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4073 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4074 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4075 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4076 IEM_MC_ADVANCE_RIP();
4077 IEM_MC_END();
4078 return VINF_SUCCESS;
4079
4080 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4081 }
4082}
4083
4084
4085/**
4086 * @opcode 0x90
4087 */
4088FNIEMOP_DEF(iemOp_nop)
4089{
4090 /* R8/R8D and RAX/EAX can be exchanged. */
4091 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4092 {
4093 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4094 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4095 }
4096
4097 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4098 IEMOP_MNEMONIC(pause, "pause");
4099 else
4100 IEMOP_MNEMONIC(nop, "nop");
4101 IEM_MC_BEGIN(0, 0);
4102 IEM_MC_ADVANCE_RIP();
4103 IEM_MC_END();
4104 return VINF_SUCCESS;
4105}
4106
4107
4108/**
4109 * @opcode 0x91
4110 */
4111FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4112{
4113 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4114 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4115}
4116
4117
4118/**
4119 * @opcode 0x92
4120 */
4121FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4122{
4123 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4124 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4125}
4126
4127
4128/**
4129 * @opcode 0x93
4130 */
4131FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4132{
4133 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4134 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4135}
4136
4137
4138/**
4139 * @opcode 0x94
4140 */
4141FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4142{
4143 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4144 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4145}
4146
4147
4148/**
4149 * @opcode 0x95
4150 */
4151FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4152{
4153 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4154 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4155}
4156
4157
4158/**
4159 * @opcode 0x96
4160 */
4161FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4162{
4163 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4164 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4165}
4166
4167
4168/**
4169 * @opcode 0x97
4170 */
4171FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4172{
4173 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4174 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4175}
4176
4177
4178/**
4179 * @opcode 0x98
4180 */
4181FNIEMOP_DEF(iemOp_cbw)
4182{
4183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4184 switch (pVCpu->iem.s.enmEffOpSize)
4185 {
4186 case IEMMODE_16BIT:
4187 IEMOP_MNEMONIC(cbw, "cbw");
4188 IEM_MC_BEGIN(0, 1);
4189 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4190 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4191 } IEM_MC_ELSE() {
4192 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4193 } IEM_MC_ENDIF();
4194 IEM_MC_ADVANCE_RIP();
4195 IEM_MC_END();
4196 return VINF_SUCCESS;
4197
4198 case IEMMODE_32BIT:
4199 IEMOP_MNEMONIC(cwde, "cwde");
4200 IEM_MC_BEGIN(0, 1);
4201 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4202 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4203 } IEM_MC_ELSE() {
4204 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4205 } IEM_MC_ENDIF();
4206 IEM_MC_ADVANCE_RIP();
4207 IEM_MC_END();
4208 return VINF_SUCCESS;
4209
4210 case IEMMODE_64BIT:
4211 IEMOP_MNEMONIC(cdqe, "cdqe");
4212 IEM_MC_BEGIN(0, 1);
4213 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4214 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4215 } IEM_MC_ELSE() {
4216 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4217 } IEM_MC_ENDIF();
4218 IEM_MC_ADVANCE_RIP();
4219 IEM_MC_END();
4220 return VINF_SUCCESS;
4221
4222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4223 }
4224}
4225
4226
4227/**
4228 * @opcode 0x99
4229 */
4230FNIEMOP_DEF(iemOp_cwd)
4231{
4232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4233 switch (pVCpu->iem.s.enmEffOpSize)
4234 {
4235 case IEMMODE_16BIT:
4236 IEMOP_MNEMONIC(cwd, "cwd");
4237 IEM_MC_BEGIN(0, 1);
4238 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4239 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4240 } IEM_MC_ELSE() {
4241 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4242 } IEM_MC_ENDIF();
4243 IEM_MC_ADVANCE_RIP();
4244 IEM_MC_END();
4245 return VINF_SUCCESS;
4246
4247 case IEMMODE_32BIT:
4248 IEMOP_MNEMONIC(cdq, "cdq");
4249 IEM_MC_BEGIN(0, 1);
4250 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4251 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4252 } IEM_MC_ELSE() {
4253 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4254 } IEM_MC_ENDIF();
4255 IEM_MC_ADVANCE_RIP();
4256 IEM_MC_END();
4257 return VINF_SUCCESS;
4258
4259 case IEMMODE_64BIT:
4260 IEMOP_MNEMONIC(cqo, "cqo");
4261 IEM_MC_BEGIN(0, 1);
4262 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4263 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4264 } IEM_MC_ELSE() {
4265 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4266 } IEM_MC_ENDIF();
4267 IEM_MC_ADVANCE_RIP();
4268 IEM_MC_END();
4269 return VINF_SUCCESS;
4270
4271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4272 }
4273}
4274
4275
4276/**
4277 * @opcode 0x9a
4278 */
4279FNIEMOP_DEF(iemOp_call_Ap)
4280{
4281 IEMOP_MNEMONIC(call_Ap, "call Ap");
4282 IEMOP_HLP_NO_64BIT();
4283
4284 /* Decode the far pointer address and pass it on to the far call C implementation. */
4285 uint32_t offSeg;
4286 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4287 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4288 else
4289 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4290 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4292 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4293}
4294
4295
4296/** Opcode 0x9b. (aka fwait) */
4297FNIEMOP_DEF(iemOp_wait)
4298{
4299 IEMOP_MNEMONIC(wait, "wait");
4300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4301
4302 IEM_MC_BEGIN(0, 0);
4303 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4305 IEM_MC_ADVANCE_RIP();
4306 IEM_MC_END();
4307 return VINF_SUCCESS;
4308}
4309
4310
4311/**
4312 * @opcode 0x9c
4313 */
4314FNIEMOP_DEF(iemOp_pushf_Fv)
4315{
4316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4317 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4318 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4319}
4320
4321
4322/**
4323 * @opcode 0x9d
4324 */
4325FNIEMOP_DEF(iemOp_popf_Fv)
4326{
4327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4328 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4329 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4330}
4331
4332
4333/**
4334 * @opcode 0x9e
4335 */
4336FNIEMOP_DEF(iemOp_sahf)
4337{
4338 IEMOP_MNEMONIC(sahf, "sahf");
4339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4340 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4341 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4342 return IEMOP_RAISE_INVALID_OPCODE();
4343 IEM_MC_BEGIN(0, 2);
4344 IEM_MC_LOCAL(uint32_t, u32Flags);
4345 IEM_MC_LOCAL(uint32_t, EFlags);
4346 IEM_MC_FETCH_EFLAGS(EFlags);
4347 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4348 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4349 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4350 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4351 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4352 IEM_MC_COMMIT_EFLAGS(EFlags);
4353 IEM_MC_ADVANCE_RIP();
4354 IEM_MC_END();
4355 return VINF_SUCCESS;
4356}
4357
4358
4359/**
4360 * @opcode 0x9f
4361 */
4362FNIEMOP_DEF(iemOp_lahf)
4363{
4364 IEMOP_MNEMONIC(lahf, "lahf");
4365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4366 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4367 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4368 return IEMOP_RAISE_INVALID_OPCODE();
4369 IEM_MC_BEGIN(0, 1);
4370 IEM_MC_LOCAL(uint8_t, u8Flags);
4371 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4372 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4373 IEM_MC_ADVANCE_RIP();
4374 IEM_MC_END();
4375 return VINF_SUCCESS;
4376}
4377
4378
4379/**
4380 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4381 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
4382 * prefixes. Will return on failures.
4383 * @param a_GCPtrMemOff The variable to store the offset in.
4384 */
4385#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4386 do \
4387 { \
4388 switch (pVCpu->iem.s.enmEffAddrMode) \
4389 { \
4390 case IEMMODE_16BIT: \
4391 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4392 break; \
4393 case IEMMODE_32BIT: \
4394 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4395 break; \
4396 case IEMMODE_64BIT: \
4397 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4398 break; \
4399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4400 } \
4401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4402 } while (0)
4403
4404/**
4405 * @opcode 0xa0
4406 */
4407FNIEMOP_DEF(iemOp_mov_AL_Ob)
4408{
4409 /*
4410 * Get the offset and fend of lock prefixes.
4411 */
4412 RTGCPTR GCPtrMemOff;
4413 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4414
4415 /*
4416 * Fetch AL.
4417 */
4418 IEM_MC_BEGIN(0,1);
4419 IEM_MC_LOCAL(uint8_t, u8Tmp);
4420 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4421 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4422 IEM_MC_ADVANCE_RIP();
4423 IEM_MC_END();
4424 return VINF_SUCCESS;
4425}
4426
4427
4428/**
4429 * @opcode 0xa1
4430 */
4431FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4432{
4433 /*
4434 * Get the offset and fend of lock prefixes.
4435 */
4436 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4437 RTGCPTR GCPtrMemOff;
4438 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4439
4440 /*
4441 * Fetch rAX.
4442 */
4443 switch (pVCpu->iem.s.enmEffOpSize)
4444 {
4445 case IEMMODE_16BIT:
4446 IEM_MC_BEGIN(0,1);
4447 IEM_MC_LOCAL(uint16_t, u16Tmp);
4448 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4449 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4450 IEM_MC_ADVANCE_RIP();
4451 IEM_MC_END();
4452 return VINF_SUCCESS;
4453
4454 case IEMMODE_32BIT:
4455 IEM_MC_BEGIN(0,1);
4456 IEM_MC_LOCAL(uint32_t, u32Tmp);
4457 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4458 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4459 IEM_MC_ADVANCE_RIP();
4460 IEM_MC_END();
4461 return VINF_SUCCESS;
4462
4463 case IEMMODE_64BIT:
4464 IEM_MC_BEGIN(0,1);
4465 IEM_MC_LOCAL(uint64_t, u64Tmp);
4466 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4467 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4468 IEM_MC_ADVANCE_RIP();
4469 IEM_MC_END();
4470 return VINF_SUCCESS;
4471
4472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4473 }
4474}
4475
4476
4477/**
4478 * @opcode 0xa2
4479 */
4480FNIEMOP_DEF(iemOp_mov_Ob_AL)
4481{
4482 /*
4483 * Get the offset and fend of lock prefixes.
4484 */
4485 RTGCPTR GCPtrMemOff;
4486 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4487
4488 /*
4489 * Store AL.
4490 */
4491 IEM_MC_BEGIN(0,1);
4492 IEM_MC_LOCAL(uint8_t, u8Tmp);
4493 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4494 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4495 IEM_MC_ADVANCE_RIP();
4496 IEM_MC_END();
4497 return VINF_SUCCESS;
4498}
4499
4500
4501/**
4502 * @opcode 0xa3
4503 */
4504FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4505{
4506 /*
4507 * Get the offset and fend of lock prefixes.
4508 */
4509 RTGCPTR GCPtrMemOff;
4510 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4511
4512 /*
4513 * Store rAX.
4514 */
4515 switch (pVCpu->iem.s.enmEffOpSize)
4516 {
4517 case IEMMODE_16BIT:
4518 IEM_MC_BEGIN(0,1);
4519 IEM_MC_LOCAL(uint16_t, u16Tmp);
4520 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4521 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4522 IEM_MC_ADVANCE_RIP();
4523 IEM_MC_END();
4524 return VINF_SUCCESS;
4525
4526 case IEMMODE_32BIT:
4527 IEM_MC_BEGIN(0,1);
4528 IEM_MC_LOCAL(uint32_t, u32Tmp);
4529 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4530 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4531 IEM_MC_ADVANCE_RIP();
4532 IEM_MC_END();
4533 return VINF_SUCCESS;
4534
4535 case IEMMODE_64BIT:
4536 IEM_MC_BEGIN(0,1);
4537 IEM_MC_LOCAL(uint64_t, u64Tmp);
4538 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4539 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4540 IEM_MC_ADVANCE_RIP();
4541 IEM_MC_END();
4542 return VINF_SUCCESS;
4543
4544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4545 }
4546}
4547
4548/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4549#define IEM_MOVS_CASE(ValBits, AddrBits) \
4550 IEM_MC_BEGIN(0, 2); \
4551 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4552 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4553 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4554 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4555 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4556 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4557 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4558 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4559 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4560 } IEM_MC_ELSE() { \
4561 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4562 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4563 } IEM_MC_ENDIF(); \
4564 IEM_MC_ADVANCE_RIP(); \
4565 IEM_MC_END();
4566
4567/**
4568 * @opcode 0xa4
4569 */
4570FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4571{
4572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4573
4574 /*
4575 * Use the C implementation if a repeat prefix is encountered.
4576 */
4577 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4578 {
4579 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4580 switch (pVCpu->iem.s.enmEffAddrMode)
4581 {
4582 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4583 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4584 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4586 }
4587 }
4588 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4589
4590 /*
4591 * Sharing case implementation with movs[wdq] below.
4592 */
4593 switch (pVCpu->iem.s.enmEffAddrMode)
4594 {
4595 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4596 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4597 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4599 }
4600 return VINF_SUCCESS;
4601}
4602
4603
4604/**
4605 * @opcode 0xa5
4606 */
4607FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4608{
4609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4610
4611 /*
4612 * Use the C implementation if a repeat prefix is encountered.
4613 */
4614 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4615 {
4616 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4617 switch (pVCpu->iem.s.enmEffOpSize)
4618 {
4619 case IEMMODE_16BIT:
4620 switch (pVCpu->iem.s.enmEffAddrMode)
4621 {
4622 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4623 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4624 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4626 }
4627 break;
4628 case IEMMODE_32BIT:
4629 switch (pVCpu->iem.s.enmEffAddrMode)
4630 {
4631 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4632 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4633 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4635 }
4636 case IEMMODE_64BIT:
4637 switch (pVCpu->iem.s.enmEffAddrMode)
4638 {
4639 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4640 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4641 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4643 }
4644 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4645 }
4646 }
4647 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4648
4649 /*
4650 * Annoying double switch here.
4651 * Using ugly macro for implementing the cases, sharing it with movsb.
4652 */
4653 switch (pVCpu->iem.s.enmEffOpSize)
4654 {
4655 case IEMMODE_16BIT:
4656 switch (pVCpu->iem.s.enmEffAddrMode)
4657 {
4658 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4659 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4660 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4662 }
4663 break;
4664
4665 case IEMMODE_32BIT:
4666 switch (pVCpu->iem.s.enmEffAddrMode)
4667 {
4668 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4669 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4670 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
4671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4672 }
4673 break;
4674
4675 case IEMMODE_64BIT:
4676 switch (pVCpu->iem.s.enmEffAddrMode)
4677 {
4678 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4679 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
4680 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
4681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4682 }
4683 break;
4684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4685 }
4686 return VINF_SUCCESS;
4687}
4688
4689#undef IEM_MOVS_CASE
4690
4691/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
4692#define IEM_CMPS_CASE(ValBits, AddrBits) \
4693 IEM_MC_BEGIN(3, 3); \
4694 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
4695 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
4696 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4697 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
4698 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4699 \
4700 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4701 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
4702 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4703 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
4704 IEM_MC_REF_LOCAL(puValue1, uValue1); \
4705 IEM_MC_REF_EFLAGS(pEFlags); \
4706 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
4707 \
4708 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4709 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4710 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4711 } IEM_MC_ELSE() { \
4712 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4713 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4714 } IEM_MC_ENDIF(); \
4715 IEM_MC_ADVANCE_RIP(); \
4716 IEM_MC_END(); \
4717
4718/**
4719 * @opcode 0xa6
4720 */
4721FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
4722{
4723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4724
4725 /*
4726 * Use the C implementation if a repeat prefix is encountered.
4727 */
4728 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4729 {
4730 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
4731 switch (pVCpu->iem.s.enmEffAddrMode)
4732 {
4733 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4734 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4735 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4737 }
4738 }
4739 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4740 {
4741 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
4742 switch (pVCpu->iem.s.enmEffAddrMode)
4743 {
4744 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4745 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4746 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4748 }
4749 }
4750 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
4751
4752 /*
4753 * Sharing case implementation with cmps[wdq] below.
4754 */
4755 switch (pVCpu->iem.s.enmEffAddrMode)
4756 {
4757 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
4758 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
4759 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
4760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4761 }
4762 return VINF_SUCCESS;
4763
4764}
4765
4766
4767/**
4768 * @opcode 0xa7
4769 */
4770FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
4771{
4772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4773
4774 /*
4775 * Use the C implementation if a repeat prefix is encountered.
4776 */
4777 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4778 {
4779 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
4780 switch (pVCpu->iem.s.enmEffOpSize)
4781 {
4782 case IEMMODE_16BIT:
4783 switch (pVCpu->iem.s.enmEffAddrMode)
4784 {
4785 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4786 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4787 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4789 }
4790 break;
4791 case IEMMODE_32BIT:
4792 switch (pVCpu->iem.s.enmEffAddrMode)
4793 {
4794 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4795 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4796 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4798 }
4799 case IEMMODE_64BIT:
4800 switch (pVCpu->iem.s.enmEffAddrMode)
4801 {
4802 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
4803 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4804 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4806 }
4807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4808 }
4809 }
4810
4811 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4812 {
4813 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
4814 switch (pVCpu->iem.s.enmEffOpSize)
4815 {
4816 case IEMMODE_16BIT:
4817 switch (pVCpu->iem.s.enmEffAddrMode)
4818 {
4819 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4820 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4821 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4823 }
4824 break;
4825 case IEMMODE_32BIT:
4826 switch (pVCpu->iem.s.enmEffAddrMode)
4827 {
4828 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4829 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4830 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4832 }
4833 case IEMMODE_64BIT:
4834 switch (pVCpu->iem.s.enmEffAddrMode)
4835 {
4836 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
4837 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4838 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4840 }
4841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4842 }
4843 }
4844
4845 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
4846
4847 /*
4848 * Annoying double switch here.
4849 * Using ugly macro for implementing the cases, sharing it with cmpsb.
4850 */
4851 switch (pVCpu->iem.s.enmEffOpSize)
4852 {
4853 case IEMMODE_16BIT:
4854 switch (pVCpu->iem.s.enmEffAddrMode)
4855 {
4856 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
4857 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
4858 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
4859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4860 }
4861 break;
4862
4863 case IEMMODE_32BIT:
4864 switch (pVCpu->iem.s.enmEffAddrMode)
4865 {
4866 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
4867 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
4868 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
4869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4870 }
4871 break;
4872
4873 case IEMMODE_64BIT:
4874 switch (pVCpu->iem.s.enmEffAddrMode)
4875 {
4876 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4877 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
4878 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
4879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4880 }
4881 break;
4882 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4883 }
4884 return VINF_SUCCESS;
4885
4886}
4887
4888#undef IEM_CMPS_CASE
4889
4890/**
4891 * @opcode 0xa8
4892 */
4893FNIEMOP_DEF(iemOp_test_AL_Ib)
4894{
4895 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
4896 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4897 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
4898}
4899
4900
4901/**
4902 * @opcode 0xa9
4903 */
4904FNIEMOP_DEF(iemOp_test_eAX_Iz)
4905{
4906 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
4907 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4908 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
4909}
4910
4911
4912/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
4913#define IEM_STOS_CASE(ValBits, AddrBits) \
4914 IEM_MC_BEGIN(0, 2); \
4915 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4916 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4917 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
4918 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4919 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4920 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4921 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4922 } IEM_MC_ELSE() { \
4923 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4924 } IEM_MC_ENDIF(); \
4925 IEM_MC_ADVANCE_RIP(); \
4926 IEM_MC_END(); \
4927
4928/**
4929 * @opcode 0xaa
4930 */
4931FNIEMOP_DEF(iemOp_stosb_Yb_AL)
4932{
4933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4934
4935 /*
4936 * Use the C implementation if a repeat prefix is encountered.
4937 */
4938 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4939 {
4940 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
4941 switch (pVCpu->iem.s.enmEffAddrMode)
4942 {
4943 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
4944 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
4945 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
4946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4947 }
4948 }
4949 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
4950
4951 /*
4952 * Sharing case implementation with stos[wdq] below.
4953 */
4954 switch (pVCpu->iem.s.enmEffAddrMode)
4955 {
4956 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
4957 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
4958 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
4959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4960 }
4961 return VINF_SUCCESS;
4962}
4963
4964
4965/**
4966 * @opcode 0xab
4967 */
4968FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
4969{
4970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4971
4972 /*
4973 * Use the C implementation if a repeat prefix is encountered.
4974 */
4975 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4976 {
4977 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
4978 switch (pVCpu->iem.s.enmEffOpSize)
4979 {
4980 case IEMMODE_16BIT:
4981 switch (pVCpu->iem.s.enmEffAddrMode)
4982 {
4983 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
4984 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
4985 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
4986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4987 }
4988 break;
4989 case IEMMODE_32BIT:
4990 switch (pVCpu->iem.s.enmEffAddrMode)
4991 {
4992 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
4993 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
4994 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
4995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4996 }
4997 case IEMMODE_64BIT:
4998 switch (pVCpu->iem.s.enmEffAddrMode)
4999 {
5000 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5001 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5002 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5004 }
5005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5006 }
5007 }
5008 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5009
5010 /*
5011 * Annoying double switch here.
5012 * Using ugly macro for implementing the cases, sharing it with stosb.
5013 */
5014 switch (pVCpu->iem.s.enmEffOpSize)
5015 {
5016 case IEMMODE_16BIT:
5017 switch (pVCpu->iem.s.enmEffAddrMode)
5018 {
5019 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5020 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5021 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5023 }
5024 break;
5025
5026 case IEMMODE_32BIT:
5027 switch (pVCpu->iem.s.enmEffAddrMode)
5028 {
5029 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5030 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5031 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5033 }
5034 break;
5035
5036 case IEMMODE_64BIT:
5037 switch (pVCpu->iem.s.enmEffAddrMode)
5038 {
5039 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5040 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5041 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5043 }
5044 break;
5045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5046 }
5047 return VINF_SUCCESS;
5048}
5049
5050#undef IEM_STOS_CASE
5051
5052/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5053#define IEM_LODS_CASE(ValBits, AddrBits) \
5054 IEM_MC_BEGIN(0, 2); \
5055 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5056 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5057 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5058 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5059 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5060 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5061 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5062 } IEM_MC_ELSE() { \
5063 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5064 } IEM_MC_ENDIF(); \
5065 IEM_MC_ADVANCE_RIP(); \
5066 IEM_MC_END();
5067
5068/**
5069 * @opcode 0xac
5070 */
5071FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5072{
5073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5074
5075 /*
5076 * Use the C implementation if a repeat prefix is encountered.
5077 */
5078 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5079 {
5080 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5081 switch (pVCpu->iem.s.enmEffAddrMode)
5082 {
5083 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5084 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5085 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5087 }
5088 }
5089 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5090
5091 /*
5092 * Sharing case implementation with stos[wdq] below.
5093 */
5094 switch (pVCpu->iem.s.enmEffAddrMode)
5095 {
5096 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5097 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5098 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5100 }
5101 return VINF_SUCCESS;
5102}
5103
5104
5105/**
5106 * @opcode 0xad
5107 */
5108FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5109{
5110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5111
5112 /*
5113 * Use the C implementation if a repeat prefix is encountered.
5114 */
5115 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5116 {
5117 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5118 switch (pVCpu->iem.s.enmEffOpSize)
5119 {
5120 case IEMMODE_16BIT:
5121 switch (pVCpu->iem.s.enmEffAddrMode)
5122 {
5123 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5124 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5125 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5127 }
5128 break;
5129 case IEMMODE_32BIT:
5130 switch (pVCpu->iem.s.enmEffAddrMode)
5131 {
5132 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5133 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5134 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5136 }
5137 case IEMMODE_64BIT:
5138 switch (pVCpu->iem.s.enmEffAddrMode)
5139 {
5140 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5141 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5142 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5144 }
5145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5146 }
5147 }
5148 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5149
5150 /*
5151 * Annoying double switch here.
5152 * Using ugly macro for implementing the cases, sharing it with lodsb.
5153 */
5154 switch (pVCpu->iem.s.enmEffOpSize)
5155 {
5156 case IEMMODE_16BIT:
5157 switch (pVCpu->iem.s.enmEffAddrMode)
5158 {
5159 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5160 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5161 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5163 }
5164 break;
5165
5166 case IEMMODE_32BIT:
5167 switch (pVCpu->iem.s.enmEffAddrMode)
5168 {
5169 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5170 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5171 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5173 }
5174 break;
5175
5176 case IEMMODE_64BIT:
5177 switch (pVCpu->iem.s.enmEffAddrMode)
5178 {
5179 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5180 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5181 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5183 }
5184 break;
5185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5186 }
5187 return VINF_SUCCESS;
5188}
5189
5190#undef IEM_LODS_CASE
5191
5192/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5193#define IEM_SCAS_CASE(ValBits, AddrBits) \
5194 IEM_MC_BEGIN(3, 2); \
5195 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5196 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5197 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5198 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5199 \
5200 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5201 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5202 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5203 IEM_MC_REF_EFLAGS(pEFlags); \
5204 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5205 \
5206 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5207 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5208 } IEM_MC_ELSE() { \
5209 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5210 } IEM_MC_ENDIF(); \
5211 IEM_MC_ADVANCE_RIP(); \
5212 IEM_MC_END();
5213
5214/**
5215 * @opcode 0xae
5216 */
5217FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5218{
5219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5220
5221 /*
5222 * Use the C implementation if a repeat prefix is encountered.
5223 */
5224 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5225 {
5226 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5227 switch (pVCpu->iem.s.enmEffAddrMode)
5228 {
5229 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5230 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5231 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5233 }
5234 }
5235 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5236 {
5237 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5238 switch (pVCpu->iem.s.enmEffAddrMode)
5239 {
5240 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5241 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5242 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5244 }
5245 }
5246 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5247
5248 /*
5249 * Sharing case implementation with stos[wdq] below.
5250 */
5251 switch (pVCpu->iem.s.enmEffAddrMode)
5252 {
5253 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5254 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5255 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5257 }
5258 return VINF_SUCCESS;
5259}
5260
5261
5262/**
5263 * @opcode 0xaf
5264 */
5265FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5266{
5267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5268
5269 /*
5270 * Use the C implementation if a repeat prefix is encountered.
5271 */
5272 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5273 {
5274 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5275 switch (pVCpu->iem.s.enmEffOpSize)
5276 {
5277 case IEMMODE_16BIT:
5278 switch (pVCpu->iem.s.enmEffAddrMode)
5279 {
5280 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5281 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5282 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5284 }
5285 break;
5286 case IEMMODE_32BIT:
5287 switch (pVCpu->iem.s.enmEffAddrMode)
5288 {
5289 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5290 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5291 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5293 }
5294 case IEMMODE_64BIT:
5295 switch (pVCpu->iem.s.enmEffAddrMode)
5296 {
5297 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5298 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5299 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5301 }
5302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5303 }
5304 }
5305 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5306 {
5307 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5308 switch (pVCpu->iem.s.enmEffOpSize)
5309 {
5310 case IEMMODE_16BIT:
5311 switch (pVCpu->iem.s.enmEffAddrMode)
5312 {
5313 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5314 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5315 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5317 }
5318 break;
5319 case IEMMODE_32BIT:
5320 switch (pVCpu->iem.s.enmEffAddrMode)
5321 {
5322 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5323 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5324 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5325 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5326 }
5327 case IEMMODE_64BIT:
5328 switch (pVCpu->iem.s.enmEffAddrMode)
5329 {
5330 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5331 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5332 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5334 }
5335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5336 }
5337 }
5338 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5339
5340 /*
5341 * Annoying double switch here.
5342 * Using ugly macro for implementing the cases, sharing it with scasb.
5343 */
5344 switch (pVCpu->iem.s.enmEffOpSize)
5345 {
5346 case IEMMODE_16BIT:
5347 switch (pVCpu->iem.s.enmEffAddrMode)
5348 {
5349 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5350 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5351 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5353 }
5354 break;
5355
5356 case IEMMODE_32BIT:
5357 switch (pVCpu->iem.s.enmEffAddrMode)
5358 {
5359 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5360 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5361 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5363 }
5364 break;
5365
5366 case IEMMODE_64BIT:
5367 switch (pVCpu->iem.s.enmEffAddrMode)
5368 {
5369 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5370 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5371 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5373 }
5374 break;
5375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5376 }
5377 return VINF_SUCCESS;
5378}
5379
5380#undef IEM_SCAS_CASE
5381
5382/**
5383 * Common 'mov r8, imm8' helper.
5384 */
5385FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5386{
5387 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5389
5390 IEM_MC_BEGIN(0, 1);
5391 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5392 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5393 IEM_MC_ADVANCE_RIP();
5394 IEM_MC_END();
5395
5396 return VINF_SUCCESS;
5397}
5398
5399
5400/**
5401 * @opcode 0xb0
5402 */
5403FNIEMOP_DEF(iemOp_mov_AL_Ib)
5404{
5405 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5406 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5407}
5408
5409
5410/**
5411 * @opcode 0xb1
5412 */
5413FNIEMOP_DEF(iemOp_CL_Ib)
5414{
5415 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5416 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5417}
5418
5419
5420/**
5421 * @opcode 0xb2
5422 */
5423FNIEMOP_DEF(iemOp_DL_Ib)
5424{
5425 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5426 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5427}
5428
5429
5430/**
5431 * @opcode 0xb3
5432 */
5433FNIEMOP_DEF(iemOp_BL_Ib)
5434{
5435 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5436 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5437}
5438
5439
5440/**
5441 * @opcode 0xb4
5442 */
5443FNIEMOP_DEF(iemOp_mov_AH_Ib)
5444{
5445 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5446 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5447}
5448
5449
5450/**
5451 * @opcode 0xb5
5452 */
5453FNIEMOP_DEF(iemOp_CH_Ib)
5454{
5455 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5456 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5457}
5458
5459
5460/**
5461 * @opcode 0xb6
5462 */
5463FNIEMOP_DEF(iemOp_DH_Ib)
5464{
5465 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5466 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5467}
5468
5469
5470/**
5471 * @opcode 0xb7
5472 */
5473FNIEMOP_DEF(iemOp_BH_Ib)
5474{
5475 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5476 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5477}
5478
5479
5480/**
5481 * Common 'mov regX,immX' helper.
5482 */
5483FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5484{
5485 switch (pVCpu->iem.s.enmEffOpSize)
5486 {
5487 case IEMMODE_16BIT:
5488 {
5489 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5491
5492 IEM_MC_BEGIN(0, 1);
5493 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5494 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5495 IEM_MC_ADVANCE_RIP();
5496 IEM_MC_END();
5497 break;
5498 }
5499
5500 case IEMMODE_32BIT:
5501 {
5502 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5504
5505 IEM_MC_BEGIN(0, 1);
5506 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5507 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5508 IEM_MC_ADVANCE_RIP();
5509 IEM_MC_END();
5510 break;
5511 }
5512 case IEMMODE_64BIT:
5513 {
5514 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5516
5517 IEM_MC_BEGIN(0, 1);
5518 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5519 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5520 IEM_MC_ADVANCE_RIP();
5521 IEM_MC_END();
5522 break;
5523 }
5524 }
5525
5526 return VINF_SUCCESS;
5527}
5528
5529
5530/**
5531 * @opcode 0xb8
5532 */
5533FNIEMOP_DEF(iemOp_eAX_Iv)
5534{
5535 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5536 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5537}
5538
5539
5540/**
5541 * @opcode 0xb9
5542 */
5543FNIEMOP_DEF(iemOp_eCX_Iv)
5544{
5545 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5546 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5547}
5548
5549
5550/**
5551 * @opcode 0xba
5552 */
5553FNIEMOP_DEF(iemOp_eDX_Iv)
5554{
5555 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5556 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5557}
5558
5559
5560/**
5561 * @opcode 0xbb
5562 */
5563FNIEMOP_DEF(iemOp_eBX_Iv)
5564{
5565 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5566 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5567}
5568
5569
5570/**
5571 * @opcode 0xbc
5572 */
5573FNIEMOP_DEF(iemOp_eSP_Iv)
5574{
5575 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5576 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5577}
5578
5579
5580/**
5581 * @opcode 0xbd
5582 */
5583FNIEMOP_DEF(iemOp_eBP_Iv)
5584{
5585 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5586 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5587}
5588
5589
5590/**
5591 * @opcode 0xbe
5592 */
5593FNIEMOP_DEF(iemOp_eSI_Iv)
5594{
5595 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5596 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5597}
5598
5599
5600/**
5601 * @opcode 0xbf
5602 */
5603FNIEMOP_DEF(iemOp_eDI_Iv)
5604{
5605 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5606 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5607}
5608
5609
5610/**
5611 * @opcode 0xc0
5612 */
5613FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5614{
5615 IEMOP_HLP_MIN_186();
5616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5617 PCIEMOPSHIFTSIZES pImpl;
5618 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5619 {
5620 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5621 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5622 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5623 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5624 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5625 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5626 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5627 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5628 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5629 }
5630 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5631
5632 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5633 {
5634 /* register */
5635 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5637 IEM_MC_BEGIN(3, 0);
5638 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5639 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5640 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5641 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5642 IEM_MC_REF_EFLAGS(pEFlags);
5643 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5644 IEM_MC_ADVANCE_RIP();
5645 IEM_MC_END();
5646 }
5647 else
5648 {
5649 /* memory */
5650 IEM_MC_BEGIN(3, 2);
5651 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5652 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5653 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5655
5656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5657 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5658 IEM_MC_ASSIGN(cShiftArg, cShift);
5659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5660 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5661 IEM_MC_FETCH_EFLAGS(EFlags);
5662 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5663
5664 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5665 IEM_MC_COMMIT_EFLAGS(EFlags);
5666 IEM_MC_ADVANCE_RIP();
5667 IEM_MC_END();
5668 }
5669 return VINF_SUCCESS;
5670}
5671
5672
5673/**
5674 * @opcode 0xc1
5675 */
5676FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
5677{
5678 IEMOP_HLP_MIN_186();
5679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5680 PCIEMOPSHIFTSIZES pImpl;
5681 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5682 {
5683 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
5684 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
5685 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
5686 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
5687 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
5688 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
5689 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
5690 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5691 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5692 }
5693 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5694
5695 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5696 {
5697 /* register */
5698 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5700 switch (pVCpu->iem.s.enmEffOpSize)
5701 {
5702 case IEMMODE_16BIT:
5703 IEM_MC_BEGIN(3, 0);
5704 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5705 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5706 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5707 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5708 IEM_MC_REF_EFLAGS(pEFlags);
5709 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5710 IEM_MC_ADVANCE_RIP();
5711 IEM_MC_END();
5712 return VINF_SUCCESS;
5713
5714 case IEMMODE_32BIT:
5715 IEM_MC_BEGIN(3, 0);
5716 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5717 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5718 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5719 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5720 IEM_MC_REF_EFLAGS(pEFlags);
5721 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5722 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5723 IEM_MC_ADVANCE_RIP();
5724 IEM_MC_END();
5725 return VINF_SUCCESS;
5726
5727 case IEMMODE_64BIT:
5728 IEM_MC_BEGIN(3, 0);
5729 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5730 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5731 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5732 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5733 IEM_MC_REF_EFLAGS(pEFlags);
5734 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5735 IEM_MC_ADVANCE_RIP();
5736 IEM_MC_END();
5737 return VINF_SUCCESS;
5738
5739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5740 }
5741 }
5742 else
5743 {
5744 /* memory */
5745 switch (pVCpu->iem.s.enmEffOpSize)
5746 {
5747 case IEMMODE_16BIT:
5748 IEM_MC_BEGIN(3, 2);
5749 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5750 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5751 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5753
5754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5755 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5756 IEM_MC_ASSIGN(cShiftArg, cShift);
5757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5758 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5759 IEM_MC_FETCH_EFLAGS(EFlags);
5760 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5761
5762 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5763 IEM_MC_COMMIT_EFLAGS(EFlags);
5764 IEM_MC_ADVANCE_RIP();
5765 IEM_MC_END();
5766 return VINF_SUCCESS;
5767
5768 case IEMMODE_32BIT:
5769 IEM_MC_BEGIN(3, 2);
5770 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5771 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5772 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5774
5775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5776 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5777 IEM_MC_ASSIGN(cShiftArg, cShift);
5778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5779 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5780 IEM_MC_FETCH_EFLAGS(EFlags);
5781 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5782
5783 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5784 IEM_MC_COMMIT_EFLAGS(EFlags);
5785 IEM_MC_ADVANCE_RIP();
5786 IEM_MC_END();
5787 return VINF_SUCCESS;
5788
5789 case IEMMODE_64BIT:
5790 IEM_MC_BEGIN(3, 2);
5791 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5792 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5793 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5795
5796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5797 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5798 IEM_MC_ASSIGN(cShiftArg, cShift);
5799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5800 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5801 IEM_MC_FETCH_EFLAGS(EFlags);
5802 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5803
5804 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5805 IEM_MC_COMMIT_EFLAGS(EFlags);
5806 IEM_MC_ADVANCE_RIP();
5807 IEM_MC_END();
5808 return VINF_SUCCESS;
5809
5810 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5811 }
5812 }
5813}
5814
5815
5816/**
5817 * @opcode 0xc2
5818 */
5819FNIEMOP_DEF(iemOp_retn_Iw)
5820{
5821 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
5822 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5824 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5825 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
5826}
5827
5828
5829/**
5830 * @opcode 0xc3
5831 */
5832FNIEMOP_DEF(iemOp_retn)
5833{
5834 IEMOP_MNEMONIC(retn, "retn");
5835 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5837 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
5838}
5839
5840
5841/**
5842 * @opcode 0xc4
5843 */
5844FNIEMOP_DEF(iemOp_les_Gv_Mp__vex2)
5845{
5846 /* The LES instruction is invalid 64-bit mode. In legacy and
5847 compatability mode it is invalid with MOD=3.
5848 The use as a VEX prefix is made possible by assigning the inverted
5849 REX.R to the top MOD bit, and the top bit in the inverted register
5850 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
5851 to accessing registers 0..7 in this VEX form. */
5852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5853 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5854 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5855 {
5856 IEMOP_MNEMONIC(vex2_prefix, "vex2");
5857 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5858 {
5859 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5860 if ( ( pVCpu->iem.s.fPrefixes
5861 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5862 == 0)
5863 {
5864 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5865 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5866 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
5867 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
5868 pVCpu->iem.s.idxPrefix = bRm & 0x3;
5869
5870 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5871 }
5872
5873 Log(("VEX2: Invalid prefix mix!\n"));
5874 }
5875 else
5876 Log(("VEX2: AVX support disabled!\n"));
5877
5878 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
5879 return IEMOP_RAISE_INVALID_OPCODE();
5880 }
5881 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
5882 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
5883}
5884
5885
5886/**
5887 * @opcode 0xc5
5888 */
5889FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex3)
5890{
5891 /* The LDS instruction is invalid 64-bit mode. In legacy and
5892 compatability mode it is invalid with MOD=3.
5893 The use as a VEX prefix is made possible by assigning the inverted
5894 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
5895 outside of 64-bit mode. VEX is not available in real or v86 mode. */
5896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5897 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
5898 {
5899 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5900 {
5901 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
5902 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
5903 }
5904 IEMOP_HLP_NO_REAL_OR_V86_MODE();
5905 }
5906
5907 IEMOP_MNEMONIC(vex3_prefix, "vex3");
5908 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5909 {
5910 /** @todo Test when exctly the VEX conformance checks kick in during
5911 * instruction decoding and fetching (using \#PF). */
5912 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
5913 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5914 if ( ( pVCpu->iem.s.fPrefixes
5915 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5916 == 0)
5917 {
5918 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5919 if (bVex2 & 0x80 /* VEX.W */)
5920 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5921 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5922 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
5923 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
5924 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
5925 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
5926 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
5927
5928 switch (bRm & 0x1f)
5929 {
5930 case 1: /* 0x0f lead opcode byte. */
5931 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5932
5933 case 2: /* 0x0f 0x38 lead opcode bytes. */
5934 /** @todo VEX: Just use new tables and decoders. */
5935 IEMOP_BITCH_ABOUT_STUB();
5936 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5937
5938 case 3: /* 0x0f 0x3a lead opcode bytes. */
5939 /** @todo VEX: Just use new tables and decoders. */
5940 IEMOP_BITCH_ABOUT_STUB();
5941 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5942
5943 default:
5944 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5945 return IEMOP_RAISE_INVALID_OPCODE();
5946 }
5947 }
5948 else
5949 Log(("VEX3: Invalid prefix mix!\n"));
5950 }
5951 else
5952 Log(("VEX3: AVX support disabled!\n"));
5953 return IEMOP_RAISE_INVALID_OPCODE();
5954}
5955
5956
5957/**
5958 * @opcode 0xc6
5959 */
5960FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
5961{
5962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5963 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
5964 return IEMOP_RAISE_INVALID_OPCODE();
5965 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
5966
5967 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5968 {
5969 /* register access */
5970 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5972 IEM_MC_BEGIN(0, 0);
5973 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
5974 IEM_MC_ADVANCE_RIP();
5975 IEM_MC_END();
5976 }
5977 else
5978 {
5979 /* memory access. */
5980 IEM_MC_BEGIN(0, 1);
5981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5983 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5985 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
5986 IEM_MC_ADVANCE_RIP();
5987 IEM_MC_END();
5988 }
5989 return VINF_SUCCESS;
5990}
5991
5992
5993/**
5994 * @opcode 0xc7
5995 */
5996FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
5997{
5998 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5999 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6000 return IEMOP_RAISE_INVALID_OPCODE();
6001 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6002
6003 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6004 {
6005 /* register access */
6006 switch (pVCpu->iem.s.enmEffOpSize)
6007 {
6008 case IEMMODE_16BIT:
6009 IEM_MC_BEGIN(0, 0);
6010 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6012 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6013 IEM_MC_ADVANCE_RIP();
6014 IEM_MC_END();
6015 return VINF_SUCCESS;
6016
6017 case IEMMODE_32BIT:
6018 IEM_MC_BEGIN(0, 0);
6019 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6021 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6022 IEM_MC_ADVANCE_RIP();
6023 IEM_MC_END();
6024 return VINF_SUCCESS;
6025
6026 case IEMMODE_64BIT:
6027 IEM_MC_BEGIN(0, 0);
6028 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6030 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6031 IEM_MC_ADVANCE_RIP();
6032 IEM_MC_END();
6033 return VINF_SUCCESS;
6034
6035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6036 }
6037 }
6038 else
6039 {
6040 /* memory access. */
6041 switch (pVCpu->iem.s.enmEffOpSize)
6042 {
6043 case IEMMODE_16BIT:
6044 IEM_MC_BEGIN(0, 1);
6045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6047 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6049 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6050 IEM_MC_ADVANCE_RIP();
6051 IEM_MC_END();
6052 return VINF_SUCCESS;
6053
6054 case IEMMODE_32BIT:
6055 IEM_MC_BEGIN(0, 1);
6056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6058 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6060 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6061 IEM_MC_ADVANCE_RIP();
6062 IEM_MC_END();
6063 return VINF_SUCCESS;
6064
6065 case IEMMODE_64BIT:
6066 IEM_MC_BEGIN(0, 1);
6067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6069 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6071 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6072 IEM_MC_ADVANCE_RIP();
6073 IEM_MC_END();
6074 return VINF_SUCCESS;
6075
6076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6077 }
6078 }
6079}
6080
6081
6082
6083
6084/**
6085 * @opcode 0xc8
6086 */
6087FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6088{
6089 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6090 IEMOP_HLP_MIN_186();
6091 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6092 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6093 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6095 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6096}
6097
6098
6099/**
6100 * @opcode 0xc9
6101 */
6102FNIEMOP_DEF(iemOp_leave)
6103{
6104 IEMOP_MNEMONIC(leave, "leave");
6105 IEMOP_HLP_MIN_186();
6106 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6108 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6109}
6110
6111
6112/**
6113 * @opcode 0xca
6114 */
6115FNIEMOP_DEF(iemOp_retf_Iw)
6116{
6117 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6118 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6120 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6121 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6122}
6123
6124
6125/**
6126 * @opcode 0xcb
6127 */
6128FNIEMOP_DEF(iemOp_retf)
6129{
6130 IEMOP_MNEMONIC(retf, "retf");
6131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6132 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6133 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6134}
6135
6136
6137/**
6138 * @opcode 0xcc
6139 */
6140FNIEMOP_DEF(iemOp_int3)
6141{
6142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6143 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
6144}
6145
6146
6147/**
6148 * @opcode 0xcd
6149 */
6150FNIEMOP_DEF(iemOp_int_Ib)
6151{
6152 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6154 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
6155}
6156
6157
6158/**
6159 * @opcode 0xce
6160 */
6161FNIEMOP_DEF(iemOp_into)
6162{
6163 IEMOP_MNEMONIC(into, "into");
6164 IEMOP_HLP_NO_64BIT();
6165
6166 IEM_MC_BEGIN(2, 0);
6167 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6168 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
6169 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
6170 IEM_MC_END();
6171 return VINF_SUCCESS;
6172}
6173
6174
6175/**
6176 * @opcode 0xcf
6177 */
6178FNIEMOP_DEF(iemOp_iret)
6179{
6180 IEMOP_MNEMONIC(iret, "iret");
6181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6182 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6183}
6184
6185
6186/**
6187 * @opcode 0xd0
6188 */
6189FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6190{
6191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6192 PCIEMOPSHIFTSIZES pImpl;
6193 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6194 {
6195 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6196 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6197 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6198 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6199 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6200 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6201 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6202 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6203 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6204 }
6205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6206
6207 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6208 {
6209 /* register */
6210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6211 IEM_MC_BEGIN(3, 0);
6212 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6213 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6214 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6215 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6216 IEM_MC_REF_EFLAGS(pEFlags);
6217 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6218 IEM_MC_ADVANCE_RIP();
6219 IEM_MC_END();
6220 }
6221 else
6222 {
6223 /* memory */
6224 IEM_MC_BEGIN(3, 2);
6225 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6226 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6227 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6229
6230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6232 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6233 IEM_MC_FETCH_EFLAGS(EFlags);
6234 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6235
6236 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6237 IEM_MC_COMMIT_EFLAGS(EFlags);
6238 IEM_MC_ADVANCE_RIP();
6239 IEM_MC_END();
6240 }
6241 return VINF_SUCCESS;
6242}
6243
6244
6245
6246/**
6247 * @opcode 0xd1
6248 */
6249FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6250{
6251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6252 PCIEMOPSHIFTSIZES pImpl;
6253 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6254 {
6255 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6256 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6257 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6258 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6259 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6260 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6261 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6262 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6263 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6264 }
6265 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6266
6267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6268 {
6269 /* register */
6270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6271 switch (pVCpu->iem.s.enmEffOpSize)
6272 {
6273 case IEMMODE_16BIT:
6274 IEM_MC_BEGIN(3, 0);
6275 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6276 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6277 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6278 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6279 IEM_MC_REF_EFLAGS(pEFlags);
6280 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6281 IEM_MC_ADVANCE_RIP();
6282 IEM_MC_END();
6283 return VINF_SUCCESS;
6284
6285 case IEMMODE_32BIT:
6286 IEM_MC_BEGIN(3, 0);
6287 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6288 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6289 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6290 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6291 IEM_MC_REF_EFLAGS(pEFlags);
6292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6293 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6294 IEM_MC_ADVANCE_RIP();
6295 IEM_MC_END();
6296 return VINF_SUCCESS;
6297
6298 case IEMMODE_64BIT:
6299 IEM_MC_BEGIN(3, 0);
6300 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6301 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6302 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6303 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6304 IEM_MC_REF_EFLAGS(pEFlags);
6305 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6306 IEM_MC_ADVANCE_RIP();
6307 IEM_MC_END();
6308 return VINF_SUCCESS;
6309
6310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6311 }
6312 }
6313 else
6314 {
6315 /* memory */
6316 switch (pVCpu->iem.s.enmEffOpSize)
6317 {
6318 case IEMMODE_16BIT:
6319 IEM_MC_BEGIN(3, 2);
6320 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6321 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6322 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6324
6325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6327 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6328 IEM_MC_FETCH_EFLAGS(EFlags);
6329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6330
6331 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6332 IEM_MC_COMMIT_EFLAGS(EFlags);
6333 IEM_MC_ADVANCE_RIP();
6334 IEM_MC_END();
6335 return VINF_SUCCESS;
6336
6337 case IEMMODE_32BIT:
6338 IEM_MC_BEGIN(3, 2);
6339 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6340 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6341 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6343
6344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6346 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6347 IEM_MC_FETCH_EFLAGS(EFlags);
6348 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6349
6350 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6351 IEM_MC_COMMIT_EFLAGS(EFlags);
6352 IEM_MC_ADVANCE_RIP();
6353 IEM_MC_END();
6354 return VINF_SUCCESS;
6355
6356 case IEMMODE_64BIT:
6357 IEM_MC_BEGIN(3, 2);
6358 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6359 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6360 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6362
6363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6365 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6366 IEM_MC_FETCH_EFLAGS(EFlags);
6367 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6368
6369 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6370 IEM_MC_COMMIT_EFLAGS(EFlags);
6371 IEM_MC_ADVANCE_RIP();
6372 IEM_MC_END();
6373 return VINF_SUCCESS;
6374
6375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6376 }
6377 }
6378}
6379
6380
6381/**
6382 * @opcode 0xd2
6383 */
6384FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6385{
6386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6387 PCIEMOPSHIFTSIZES pImpl;
6388 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6389 {
6390 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6391 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6392 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6393 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6394 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6395 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6396 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6397 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6398 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6399 }
6400 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6401
6402 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6403 {
6404 /* register */
6405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6406 IEM_MC_BEGIN(3, 0);
6407 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6408 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6409 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6410 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6411 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6412 IEM_MC_REF_EFLAGS(pEFlags);
6413 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6414 IEM_MC_ADVANCE_RIP();
6415 IEM_MC_END();
6416 }
6417 else
6418 {
6419 /* memory */
6420 IEM_MC_BEGIN(3, 2);
6421 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6422 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6423 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6425
6426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6428 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6429 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6430 IEM_MC_FETCH_EFLAGS(EFlags);
6431 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6432
6433 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6434 IEM_MC_COMMIT_EFLAGS(EFlags);
6435 IEM_MC_ADVANCE_RIP();
6436 IEM_MC_END();
6437 }
6438 return VINF_SUCCESS;
6439}
6440
6441
6442/**
6443 * @opcode 0xd3
6444 */
6445FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6446{
6447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6448 PCIEMOPSHIFTSIZES pImpl;
6449 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6450 {
6451 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6452 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6453 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6454 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6455 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6456 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6457 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6458 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6459 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6460 }
6461 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6462
6463 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6464 {
6465 /* register */
6466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6467 switch (pVCpu->iem.s.enmEffOpSize)
6468 {
6469 case IEMMODE_16BIT:
6470 IEM_MC_BEGIN(3, 0);
6471 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6472 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6473 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6474 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6475 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6476 IEM_MC_REF_EFLAGS(pEFlags);
6477 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6478 IEM_MC_ADVANCE_RIP();
6479 IEM_MC_END();
6480 return VINF_SUCCESS;
6481
6482 case IEMMODE_32BIT:
6483 IEM_MC_BEGIN(3, 0);
6484 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6485 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6486 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6487 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6488 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6489 IEM_MC_REF_EFLAGS(pEFlags);
6490 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6491 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6492 IEM_MC_ADVANCE_RIP();
6493 IEM_MC_END();
6494 return VINF_SUCCESS;
6495
6496 case IEMMODE_64BIT:
6497 IEM_MC_BEGIN(3, 0);
6498 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6499 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6500 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6501 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6502 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6503 IEM_MC_REF_EFLAGS(pEFlags);
6504 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6505 IEM_MC_ADVANCE_RIP();
6506 IEM_MC_END();
6507 return VINF_SUCCESS;
6508
6509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6510 }
6511 }
6512 else
6513 {
6514 /* memory */
6515 switch (pVCpu->iem.s.enmEffOpSize)
6516 {
6517 case IEMMODE_16BIT:
6518 IEM_MC_BEGIN(3, 2);
6519 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6520 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6521 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6523
6524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6526 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6527 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6528 IEM_MC_FETCH_EFLAGS(EFlags);
6529 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6530
6531 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6532 IEM_MC_COMMIT_EFLAGS(EFlags);
6533 IEM_MC_ADVANCE_RIP();
6534 IEM_MC_END();
6535 return VINF_SUCCESS;
6536
6537 case IEMMODE_32BIT:
6538 IEM_MC_BEGIN(3, 2);
6539 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6540 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6541 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6543
6544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6546 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6547 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6548 IEM_MC_FETCH_EFLAGS(EFlags);
6549 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6550
6551 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6552 IEM_MC_COMMIT_EFLAGS(EFlags);
6553 IEM_MC_ADVANCE_RIP();
6554 IEM_MC_END();
6555 return VINF_SUCCESS;
6556
6557 case IEMMODE_64BIT:
6558 IEM_MC_BEGIN(3, 2);
6559 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6560 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6561 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6563
6564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6566 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6567 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6568 IEM_MC_FETCH_EFLAGS(EFlags);
6569 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6570
6571 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6572 IEM_MC_COMMIT_EFLAGS(EFlags);
6573 IEM_MC_ADVANCE_RIP();
6574 IEM_MC_END();
6575 return VINF_SUCCESS;
6576
6577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6578 }
6579 }
6580}
6581
6582/**
6583 * @opcode 0xd4
6584 */
6585FNIEMOP_DEF(iemOp_aam_Ib)
6586{
6587 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6588 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6590 IEMOP_HLP_NO_64BIT();
6591 if (!bImm)
6592 return IEMOP_RAISE_DIVIDE_ERROR();
6593 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6594}
6595
6596
6597/**
6598 * @opcode 0xd5
6599 */
6600FNIEMOP_DEF(iemOp_aad_Ib)
6601{
6602 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6603 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6605 IEMOP_HLP_NO_64BIT();
6606 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6607}
6608
6609
6610/**
6611 * @opcode 0xd6
6612 */
6613FNIEMOP_DEF(iemOp_salc)
6614{
6615 IEMOP_MNEMONIC(salc, "salc");
6616 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6617 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6619 IEMOP_HLP_NO_64BIT();
6620
6621 IEM_MC_BEGIN(0, 0);
6622 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6623 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6624 } IEM_MC_ELSE() {
6625 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6626 } IEM_MC_ENDIF();
6627 IEM_MC_ADVANCE_RIP();
6628 IEM_MC_END();
6629 return VINF_SUCCESS;
6630}
6631
6632
6633/**
6634 * @opcode 0xd7
6635 */
6636FNIEMOP_DEF(iemOp_xlat)
6637{
6638 IEMOP_MNEMONIC(xlat, "xlat");
6639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6640 switch (pVCpu->iem.s.enmEffAddrMode)
6641 {
6642 case IEMMODE_16BIT:
6643 IEM_MC_BEGIN(2, 0);
6644 IEM_MC_LOCAL(uint8_t, u8Tmp);
6645 IEM_MC_LOCAL(uint16_t, u16Addr);
6646 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6647 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6648 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6649 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6650 IEM_MC_ADVANCE_RIP();
6651 IEM_MC_END();
6652 return VINF_SUCCESS;
6653
6654 case IEMMODE_32BIT:
6655 IEM_MC_BEGIN(2, 0);
6656 IEM_MC_LOCAL(uint8_t, u8Tmp);
6657 IEM_MC_LOCAL(uint32_t, u32Addr);
6658 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6659 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6660 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6661 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6662 IEM_MC_ADVANCE_RIP();
6663 IEM_MC_END();
6664 return VINF_SUCCESS;
6665
6666 case IEMMODE_64BIT:
6667 IEM_MC_BEGIN(2, 0);
6668 IEM_MC_LOCAL(uint8_t, u8Tmp);
6669 IEM_MC_LOCAL(uint64_t, u64Addr);
6670 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
6671 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
6672 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
6673 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6674 IEM_MC_ADVANCE_RIP();
6675 IEM_MC_END();
6676 return VINF_SUCCESS;
6677
6678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6679 }
6680}
6681
6682
6683/**
6684 * Common worker for FPU instructions working on ST0 and STn, and storing the
6685 * result in ST0.
6686 *
6687 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6688 */
6689FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6690{
6691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6692
6693 IEM_MC_BEGIN(3, 1);
6694 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6695 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6696 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6697 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6698
6699 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6700 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6701 IEM_MC_PREPARE_FPU_USAGE();
6702 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6703 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6704 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6705 IEM_MC_ELSE()
6706 IEM_MC_FPU_STACK_UNDERFLOW(0);
6707 IEM_MC_ENDIF();
6708 IEM_MC_ADVANCE_RIP();
6709
6710 IEM_MC_END();
6711 return VINF_SUCCESS;
6712}
6713
6714
6715/**
6716 * Common worker for FPU instructions working on ST0 and STn, and only affecting
6717 * flags.
6718 *
6719 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6720 */
6721FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6722{
6723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6724
6725 IEM_MC_BEGIN(3, 1);
6726 IEM_MC_LOCAL(uint16_t, u16Fsw);
6727 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6728 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6729 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6730
6731 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6732 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6733 IEM_MC_PREPARE_FPU_USAGE();
6734 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6735 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6736 IEM_MC_UPDATE_FSW(u16Fsw);
6737 IEM_MC_ELSE()
6738 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6739 IEM_MC_ENDIF();
6740 IEM_MC_ADVANCE_RIP();
6741
6742 IEM_MC_END();
6743 return VINF_SUCCESS;
6744}
6745
6746
6747/**
6748 * Common worker for FPU instructions working on ST0 and STn, only affecting
6749 * flags, and popping when done.
6750 *
6751 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6752 */
6753FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6754{
6755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6756
6757 IEM_MC_BEGIN(3, 1);
6758 IEM_MC_LOCAL(uint16_t, u16Fsw);
6759 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6760 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6761 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6762
6763 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6764 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6765 IEM_MC_PREPARE_FPU_USAGE();
6766 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6767 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6768 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6769 IEM_MC_ELSE()
6770 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
6771 IEM_MC_ENDIF();
6772 IEM_MC_ADVANCE_RIP();
6773
6774 IEM_MC_END();
6775 return VINF_SUCCESS;
6776}
6777
6778
6779/** Opcode 0xd8 11/0. */
6780FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
6781{
6782 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
6783 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
6784}
6785
6786
6787/** Opcode 0xd8 11/1. */
6788FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
6789{
6790 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
6791 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
6792}
6793
6794
6795/** Opcode 0xd8 11/2. */
6796FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
6797{
6798 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
6799 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
6800}
6801
6802
6803/** Opcode 0xd8 11/3. */
6804FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
6805{
6806 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
6807 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
6808}
6809
6810
6811/** Opcode 0xd8 11/4. */
6812FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
6813{
6814 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
6815 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
6816}
6817
6818
6819/** Opcode 0xd8 11/5. */
6820FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
6821{
6822 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
6823 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
6824}
6825
6826
6827/** Opcode 0xd8 11/6. */
6828FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
6829{
6830 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
6831 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
6832}
6833
6834
6835/** Opcode 0xd8 11/7. */
6836FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
6837{
6838 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
6839 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
6840}
6841
6842
6843/**
6844 * Common worker for FPU instructions working on ST0 and an m32r, and storing
6845 * the result in ST0.
6846 *
6847 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6848 */
6849FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
6850{
6851 IEM_MC_BEGIN(3, 3);
6852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6853 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6854 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6855 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6856 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6857 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6858
6859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6861
6862 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6863 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6864 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6865
6866 IEM_MC_PREPARE_FPU_USAGE();
6867 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6868 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
6869 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6870 IEM_MC_ELSE()
6871 IEM_MC_FPU_STACK_UNDERFLOW(0);
6872 IEM_MC_ENDIF();
6873 IEM_MC_ADVANCE_RIP();
6874
6875 IEM_MC_END();
6876 return VINF_SUCCESS;
6877}
6878
6879
6880/** Opcode 0xd8 !11/0. */
6881FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
6882{
6883 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
6884 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
6885}
6886
6887
6888/** Opcode 0xd8 !11/1. */
6889FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
6890{
6891 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
6892 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
6893}
6894
6895
6896/** Opcode 0xd8 !11/2. */
6897FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
6898{
6899 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
6900
6901 IEM_MC_BEGIN(3, 3);
6902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6903 IEM_MC_LOCAL(uint16_t, u16Fsw);
6904 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6905 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6906 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6907 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6908
6909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6911
6912 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6913 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6914 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6915
6916 IEM_MC_PREPARE_FPU_USAGE();
6917 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6918 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6919 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6920 IEM_MC_ELSE()
6921 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6922 IEM_MC_ENDIF();
6923 IEM_MC_ADVANCE_RIP();
6924
6925 IEM_MC_END();
6926 return VINF_SUCCESS;
6927}
6928
6929
6930/** Opcode 0xd8 !11/3. */
6931FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
6932{
6933 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
6934
6935 IEM_MC_BEGIN(3, 3);
6936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6937 IEM_MC_LOCAL(uint16_t, u16Fsw);
6938 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6939 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6940 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6941 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6942
6943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6945
6946 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6947 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6948 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6949
6950 IEM_MC_PREPARE_FPU_USAGE();
6951 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6952 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6953 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6954 IEM_MC_ELSE()
6955 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6956 IEM_MC_ENDIF();
6957 IEM_MC_ADVANCE_RIP();
6958
6959 IEM_MC_END();
6960 return VINF_SUCCESS;
6961}
6962
6963
6964/** Opcode 0xd8 !11/4. */
6965FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
6966{
6967 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
6968 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
6969}
6970
6971
6972/** Opcode 0xd8 !11/5. */
6973FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
6974{
6975 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
6976 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
6977}
6978
6979
6980/** Opcode 0xd8 !11/6. */
6981FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
6982{
6983 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
6984 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
6985}
6986
6987
6988/** Opcode 0xd8 !11/7. */
6989FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
6990{
6991 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
6992 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
6993}
6994
6995
6996/**
6997 * @opcode 0xd8
6998 */
6999FNIEMOP_DEF(iemOp_EscF0)
7000{
7001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7002 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7003
7004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7005 {
7006 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7007 {
7008 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7009 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7010 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7011 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7012 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7013 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7014 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7015 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7017 }
7018 }
7019 else
7020 {
7021 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7022 {
7023 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7024 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7025 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7026 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7027 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7028 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7029 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7030 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7032 }
7033 }
7034}
7035
7036
7037/** Opcode 0xd9 /0 mem32real
7038 * @sa iemOp_fld_m64r */
7039FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7040{
7041 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7042
7043 IEM_MC_BEGIN(2, 3);
7044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7045 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7046 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7047 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7049
7050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7052
7053 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7054 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7055 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7056
7057 IEM_MC_PREPARE_FPU_USAGE();
7058 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7059 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7060 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7061 IEM_MC_ELSE()
7062 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7063 IEM_MC_ENDIF();
7064 IEM_MC_ADVANCE_RIP();
7065
7066 IEM_MC_END();
7067 return VINF_SUCCESS;
7068}
7069
7070
7071/** Opcode 0xd9 !11/2 mem32real */
7072FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7073{
7074 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7075 IEM_MC_BEGIN(3, 2);
7076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7077 IEM_MC_LOCAL(uint16_t, u16Fsw);
7078 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7079 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7080 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7081
7082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7085 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7086
7087 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7088 IEM_MC_PREPARE_FPU_USAGE();
7089 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7090 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7091 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7092 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7093 IEM_MC_ELSE()
7094 IEM_MC_IF_FCW_IM()
7095 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7096 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7097 IEM_MC_ENDIF();
7098 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7099 IEM_MC_ENDIF();
7100 IEM_MC_ADVANCE_RIP();
7101
7102 IEM_MC_END();
7103 return VINF_SUCCESS;
7104}
7105
7106
7107/** Opcode 0xd9 !11/3 */
7108FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7109{
7110 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7111 IEM_MC_BEGIN(3, 2);
7112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7113 IEM_MC_LOCAL(uint16_t, u16Fsw);
7114 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7115 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7116 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7117
7118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7120 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7121 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7122
7123 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7124 IEM_MC_PREPARE_FPU_USAGE();
7125 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7126 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7127 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7128 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7129 IEM_MC_ELSE()
7130 IEM_MC_IF_FCW_IM()
7131 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7132 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7133 IEM_MC_ENDIF();
7134 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7135 IEM_MC_ENDIF();
7136 IEM_MC_ADVANCE_RIP();
7137
7138 IEM_MC_END();
7139 return VINF_SUCCESS;
7140}
7141
7142
7143/** Opcode 0xd9 !11/4 */
7144FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7145{
7146 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7147 IEM_MC_BEGIN(3, 0);
7148 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7149 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7150 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7153 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7154 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7155 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7156 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7157 IEM_MC_END();
7158 return VINF_SUCCESS;
7159}
7160
7161
7162/** Opcode 0xd9 !11/5 */
7163FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7164{
7165 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7166 IEM_MC_BEGIN(1, 1);
7167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7168 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7171 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7172 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7173 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7174 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7175 IEM_MC_END();
7176 return VINF_SUCCESS;
7177}
7178
7179
7180/** Opcode 0xd9 !11/6 */
7181FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7182{
7183 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7184 IEM_MC_BEGIN(3, 0);
7185 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7186 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7187 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7190 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7191 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7192 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7193 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7194 IEM_MC_END();
7195 return VINF_SUCCESS;
7196}
7197
7198
7199/** Opcode 0xd9 !11/7 */
7200FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7201{
7202 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7203 IEM_MC_BEGIN(2, 0);
7204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7205 IEM_MC_LOCAL(uint16_t, u16Fcw);
7206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7208 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7209 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7210 IEM_MC_FETCH_FCW(u16Fcw);
7211 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7212 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7213 IEM_MC_END();
7214 return VINF_SUCCESS;
7215}
7216
7217
7218/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7219FNIEMOP_DEF(iemOp_fnop)
7220{
7221 IEMOP_MNEMONIC(fnop, "fnop");
7222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7223
7224 IEM_MC_BEGIN(0, 0);
7225 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7226 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7227 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7228 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7229 * intel optimizations. Investigate. */
7230 IEM_MC_UPDATE_FPU_OPCODE_IP();
7231 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7232 IEM_MC_END();
7233 return VINF_SUCCESS;
7234}
7235
7236
7237/** Opcode 0xd9 11/0 stN */
7238FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7239{
7240 IEMOP_MNEMONIC(fld_stN, "fld stN");
7241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7242
7243 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7244 * indicates that it does. */
7245 IEM_MC_BEGIN(0, 2);
7246 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7247 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7248 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7249 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7250
7251 IEM_MC_PREPARE_FPU_USAGE();
7252 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7253 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7254 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7255 IEM_MC_ELSE()
7256 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7257 IEM_MC_ENDIF();
7258
7259 IEM_MC_ADVANCE_RIP();
7260 IEM_MC_END();
7261
7262 return VINF_SUCCESS;
7263}
7264
7265
7266/** Opcode 0xd9 11/3 stN */
7267FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7268{
7269 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7271
7272 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7273 * indicates that it does. */
7274 IEM_MC_BEGIN(1, 3);
7275 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7276 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7277 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7278 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7279 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7280 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7281
7282 IEM_MC_PREPARE_FPU_USAGE();
7283 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7284 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7285 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7286 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7287 IEM_MC_ELSE()
7288 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7289 IEM_MC_ENDIF();
7290
7291 IEM_MC_ADVANCE_RIP();
7292 IEM_MC_END();
7293
7294 return VINF_SUCCESS;
7295}
7296
7297
7298/** Opcode 0xd9 11/4, 0xdd 11/2. */
7299FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7300{
7301 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7303
7304 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7305 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7306 if (!iDstReg)
7307 {
7308 IEM_MC_BEGIN(0, 1);
7309 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7310 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7311 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7312
7313 IEM_MC_PREPARE_FPU_USAGE();
7314 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7315 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7316 IEM_MC_ELSE()
7317 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7318 IEM_MC_ENDIF();
7319
7320 IEM_MC_ADVANCE_RIP();
7321 IEM_MC_END();
7322 }
7323 else
7324 {
7325 IEM_MC_BEGIN(0, 2);
7326 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7327 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7328 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7329 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7330
7331 IEM_MC_PREPARE_FPU_USAGE();
7332 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7333 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7334 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7335 IEM_MC_ELSE()
7336 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7337 IEM_MC_ENDIF();
7338
7339 IEM_MC_ADVANCE_RIP();
7340 IEM_MC_END();
7341 }
7342 return VINF_SUCCESS;
7343}
7344
7345
7346/**
7347 * Common worker for FPU instructions working on ST0 and replaces it with the
7348 * result, i.e. unary operators.
7349 *
7350 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7351 */
7352FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7353{
7354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7355
7356 IEM_MC_BEGIN(2, 1);
7357 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7358 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7359 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7360
7361 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7362 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7363 IEM_MC_PREPARE_FPU_USAGE();
7364 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7365 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7366 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7367 IEM_MC_ELSE()
7368 IEM_MC_FPU_STACK_UNDERFLOW(0);
7369 IEM_MC_ENDIF();
7370 IEM_MC_ADVANCE_RIP();
7371
7372 IEM_MC_END();
7373 return VINF_SUCCESS;
7374}
7375
7376
7377/** Opcode 0xd9 0xe0. */
7378FNIEMOP_DEF(iemOp_fchs)
7379{
7380 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7381 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7382}
7383
7384
7385/** Opcode 0xd9 0xe1. */
7386FNIEMOP_DEF(iemOp_fabs)
7387{
7388 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7389 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7390}
7391
7392
7393/**
7394 * Common worker for FPU instructions working on ST0 and only returns FSW.
7395 *
7396 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7397 */
7398FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7399{
7400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7401
7402 IEM_MC_BEGIN(2, 1);
7403 IEM_MC_LOCAL(uint16_t, u16Fsw);
7404 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7405 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7406
7407 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7408 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7409 IEM_MC_PREPARE_FPU_USAGE();
7410 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7411 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7412 IEM_MC_UPDATE_FSW(u16Fsw);
7413 IEM_MC_ELSE()
7414 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7415 IEM_MC_ENDIF();
7416 IEM_MC_ADVANCE_RIP();
7417
7418 IEM_MC_END();
7419 return VINF_SUCCESS;
7420}
7421
7422
7423/** Opcode 0xd9 0xe4. */
7424FNIEMOP_DEF(iemOp_ftst)
7425{
7426 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7427 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7428}
7429
7430
7431/** Opcode 0xd9 0xe5. */
7432FNIEMOP_DEF(iemOp_fxam)
7433{
7434 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7435 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7436}
7437
7438
7439/**
7440 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7441 *
7442 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7443 */
7444FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7445{
7446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7447
7448 IEM_MC_BEGIN(1, 1);
7449 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7450 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7451
7452 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7453 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7454 IEM_MC_PREPARE_FPU_USAGE();
7455 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7456 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7457 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7458 IEM_MC_ELSE()
7459 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7460 IEM_MC_ENDIF();
7461 IEM_MC_ADVANCE_RIP();
7462
7463 IEM_MC_END();
7464 return VINF_SUCCESS;
7465}
7466
7467
7468/** Opcode 0xd9 0xe8. */
7469FNIEMOP_DEF(iemOp_fld1)
7470{
7471 IEMOP_MNEMONIC(fld1, "fld1");
7472 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7473}
7474
7475
7476/** Opcode 0xd9 0xe9. */
7477FNIEMOP_DEF(iemOp_fldl2t)
7478{
7479 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7480 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7481}
7482
7483
7484/** Opcode 0xd9 0xea. */
7485FNIEMOP_DEF(iemOp_fldl2e)
7486{
7487 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7488 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7489}
7490
7491/** Opcode 0xd9 0xeb. */
7492FNIEMOP_DEF(iemOp_fldpi)
7493{
7494 IEMOP_MNEMONIC(fldpi, "fldpi");
7495 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7496}
7497
7498
7499/** Opcode 0xd9 0xec. */
7500FNIEMOP_DEF(iemOp_fldlg2)
7501{
7502 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7503 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7504}
7505
7506/** Opcode 0xd9 0xed. */
7507FNIEMOP_DEF(iemOp_fldln2)
7508{
7509 IEMOP_MNEMONIC(fldln2, "fldln2");
7510 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7511}
7512
7513
7514/** Opcode 0xd9 0xee. */
7515FNIEMOP_DEF(iemOp_fldz)
7516{
7517 IEMOP_MNEMONIC(fldz, "fldz");
7518 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7519}
7520
7521
7522/** Opcode 0xd9 0xf0. */
7523FNIEMOP_DEF(iemOp_f2xm1)
7524{
7525 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7526 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7527}
7528
7529
7530/**
7531 * Common worker for FPU instructions working on STn and ST0, storing the result
7532 * in STn, and popping the stack unless IE, DE or ZE was raised.
7533 *
7534 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7535 */
7536FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7537{
7538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7539
7540 IEM_MC_BEGIN(3, 1);
7541 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7542 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7543 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7544 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7545
7546 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7547 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7548
7549 IEM_MC_PREPARE_FPU_USAGE();
7550 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7551 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7552 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7553 IEM_MC_ELSE()
7554 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7555 IEM_MC_ENDIF();
7556 IEM_MC_ADVANCE_RIP();
7557
7558 IEM_MC_END();
7559 return VINF_SUCCESS;
7560}
7561
7562
7563/** Opcode 0xd9 0xf1. */
7564FNIEMOP_DEF(iemOp_fyl2x)
7565{
7566 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7567 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7568}
7569
7570
7571/**
7572 * Common worker for FPU instructions working on ST0 and having two outputs, one
7573 * replacing ST0 and one pushed onto the stack.
7574 *
7575 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7576 */
7577FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7578{
7579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7580
7581 IEM_MC_BEGIN(2, 1);
7582 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7583 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7584 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7585
7586 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7587 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7588 IEM_MC_PREPARE_FPU_USAGE();
7589 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7590 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7591 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7592 IEM_MC_ELSE()
7593 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7594 IEM_MC_ENDIF();
7595 IEM_MC_ADVANCE_RIP();
7596
7597 IEM_MC_END();
7598 return VINF_SUCCESS;
7599}
7600
7601
7602/** Opcode 0xd9 0xf2. */
7603FNIEMOP_DEF(iemOp_fptan)
7604{
7605 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7606 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7607}
7608
7609
7610/** Opcode 0xd9 0xf3. */
7611FNIEMOP_DEF(iemOp_fpatan)
7612{
7613 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7614 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7615}
7616
7617
7618/** Opcode 0xd9 0xf4. */
7619FNIEMOP_DEF(iemOp_fxtract)
7620{
7621 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7622 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7623}
7624
7625
7626/** Opcode 0xd9 0xf5. */
7627FNIEMOP_DEF(iemOp_fprem1)
7628{
7629 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7630 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7631}
7632
7633
7634/** Opcode 0xd9 0xf6. */
7635FNIEMOP_DEF(iemOp_fdecstp)
7636{
7637 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7639 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7640 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7641 * FINCSTP and FDECSTP. */
7642
7643 IEM_MC_BEGIN(0,0);
7644
7645 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7646 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7647
7648 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7649 IEM_MC_FPU_STACK_DEC_TOP();
7650 IEM_MC_UPDATE_FSW_CONST(0);
7651
7652 IEM_MC_ADVANCE_RIP();
7653 IEM_MC_END();
7654 return VINF_SUCCESS;
7655}
7656
7657
7658/** Opcode 0xd9 0xf7. */
7659FNIEMOP_DEF(iemOp_fincstp)
7660{
7661 IEMOP_MNEMONIC(fincstp, "fincstp");
7662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7663 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7664 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7665 * FINCSTP and FDECSTP. */
7666
7667 IEM_MC_BEGIN(0,0);
7668
7669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7670 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7671
7672 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7673 IEM_MC_FPU_STACK_INC_TOP();
7674 IEM_MC_UPDATE_FSW_CONST(0);
7675
7676 IEM_MC_ADVANCE_RIP();
7677 IEM_MC_END();
7678 return VINF_SUCCESS;
7679}
7680
7681
7682/** Opcode 0xd9 0xf8. */
7683FNIEMOP_DEF(iemOp_fprem)
7684{
7685 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
7686 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
7687}
7688
7689
7690/** Opcode 0xd9 0xf9. */
7691FNIEMOP_DEF(iemOp_fyl2xp1)
7692{
7693 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
7694 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
7695}
7696
7697
7698/** Opcode 0xd9 0xfa. */
7699FNIEMOP_DEF(iemOp_fsqrt)
7700{
7701 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
7702 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
7703}
7704
7705
7706/** Opcode 0xd9 0xfb. */
7707FNIEMOP_DEF(iemOp_fsincos)
7708{
7709 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
7710 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
7711}
7712
7713
7714/** Opcode 0xd9 0xfc. */
7715FNIEMOP_DEF(iemOp_frndint)
7716{
7717 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
7718 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
7719}
7720
7721
7722/** Opcode 0xd9 0xfd. */
7723FNIEMOP_DEF(iemOp_fscale)
7724{
7725 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
7726 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
7727}
7728
7729
7730/** Opcode 0xd9 0xfe. */
7731FNIEMOP_DEF(iemOp_fsin)
7732{
7733 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
7734 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
7735}
7736
7737
7738/** Opcode 0xd9 0xff. */
7739FNIEMOP_DEF(iemOp_fcos)
7740{
7741 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
7742 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
7743}
7744
7745
7746/** Used by iemOp_EscF1. */
7747IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
7748{
7749 /* 0xe0 */ iemOp_fchs,
7750 /* 0xe1 */ iemOp_fabs,
7751 /* 0xe2 */ iemOp_Invalid,
7752 /* 0xe3 */ iemOp_Invalid,
7753 /* 0xe4 */ iemOp_ftst,
7754 /* 0xe5 */ iemOp_fxam,
7755 /* 0xe6 */ iemOp_Invalid,
7756 /* 0xe7 */ iemOp_Invalid,
7757 /* 0xe8 */ iemOp_fld1,
7758 /* 0xe9 */ iemOp_fldl2t,
7759 /* 0xea */ iemOp_fldl2e,
7760 /* 0xeb */ iemOp_fldpi,
7761 /* 0xec */ iemOp_fldlg2,
7762 /* 0xed */ iemOp_fldln2,
7763 /* 0xee */ iemOp_fldz,
7764 /* 0xef */ iemOp_Invalid,
7765 /* 0xf0 */ iemOp_f2xm1,
7766 /* 0xf1 */ iemOp_fyl2x,
7767 /* 0xf2 */ iemOp_fptan,
7768 /* 0xf3 */ iemOp_fpatan,
7769 /* 0xf4 */ iemOp_fxtract,
7770 /* 0xf5 */ iemOp_fprem1,
7771 /* 0xf6 */ iemOp_fdecstp,
7772 /* 0xf7 */ iemOp_fincstp,
7773 /* 0xf8 */ iemOp_fprem,
7774 /* 0xf9 */ iemOp_fyl2xp1,
7775 /* 0xfa */ iemOp_fsqrt,
7776 /* 0xfb */ iemOp_fsincos,
7777 /* 0xfc */ iemOp_frndint,
7778 /* 0xfd */ iemOp_fscale,
7779 /* 0xfe */ iemOp_fsin,
7780 /* 0xff */ iemOp_fcos
7781};
7782
7783
7784/**
7785 * @opcode 0xd9
7786 */
7787FNIEMOP_DEF(iemOp_EscF1)
7788{
7789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7790 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
7791
7792 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7793 {
7794 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7795 {
7796 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
7797 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
7798 case 2:
7799 if (bRm == 0xd0)
7800 return FNIEMOP_CALL(iemOp_fnop);
7801 return IEMOP_RAISE_INVALID_OPCODE();
7802 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
7803 case 4:
7804 case 5:
7805 case 6:
7806 case 7:
7807 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
7808 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
7809 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7810 }
7811 }
7812 else
7813 {
7814 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7815 {
7816 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
7817 case 1: return IEMOP_RAISE_INVALID_OPCODE();
7818 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
7819 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
7820 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
7821 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
7822 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
7823 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
7824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7825 }
7826 }
7827}
7828
7829
7830/** Opcode 0xda 11/0. */
7831FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
7832{
7833 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
7834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7835
7836 IEM_MC_BEGIN(0, 1);
7837 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7838
7839 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7840 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7841
7842 IEM_MC_PREPARE_FPU_USAGE();
7843 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
7845 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7846 IEM_MC_ENDIF();
7847 IEM_MC_UPDATE_FPU_OPCODE_IP();
7848 IEM_MC_ELSE()
7849 IEM_MC_FPU_STACK_UNDERFLOW(0);
7850 IEM_MC_ENDIF();
7851 IEM_MC_ADVANCE_RIP();
7852
7853 IEM_MC_END();
7854 return VINF_SUCCESS;
7855}
7856
7857
7858/** Opcode 0xda 11/1. */
7859FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
7860{
7861 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
7862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7863
7864 IEM_MC_BEGIN(0, 1);
7865 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7866
7867 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7868 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7869
7870 IEM_MC_PREPARE_FPU_USAGE();
7871 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7872 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
7873 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7874 IEM_MC_ENDIF();
7875 IEM_MC_UPDATE_FPU_OPCODE_IP();
7876 IEM_MC_ELSE()
7877 IEM_MC_FPU_STACK_UNDERFLOW(0);
7878 IEM_MC_ENDIF();
7879 IEM_MC_ADVANCE_RIP();
7880
7881 IEM_MC_END();
7882 return VINF_SUCCESS;
7883}
7884
7885
7886/** Opcode 0xda 11/2. */
7887FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
7888{
7889 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
7890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7891
7892 IEM_MC_BEGIN(0, 1);
7893 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7894
7895 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7896 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7897
7898 IEM_MC_PREPARE_FPU_USAGE();
7899 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7900 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7901 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7902 IEM_MC_ENDIF();
7903 IEM_MC_UPDATE_FPU_OPCODE_IP();
7904 IEM_MC_ELSE()
7905 IEM_MC_FPU_STACK_UNDERFLOW(0);
7906 IEM_MC_ENDIF();
7907 IEM_MC_ADVANCE_RIP();
7908
7909 IEM_MC_END();
7910 return VINF_SUCCESS;
7911}
7912
7913
7914/** Opcode 0xda 11/3. */
7915FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
7916{
7917 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
7918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7919
7920 IEM_MC_BEGIN(0, 1);
7921 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7922
7923 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7924 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7925
7926 IEM_MC_PREPARE_FPU_USAGE();
7927 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7928 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
7929 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7930 IEM_MC_ENDIF();
7931 IEM_MC_UPDATE_FPU_OPCODE_IP();
7932 IEM_MC_ELSE()
7933 IEM_MC_FPU_STACK_UNDERFLOW(0);
7934 IEM_MC_ENDIF();
7935 IEM_MC_ADVANCE_RIP();
7936
7937 IEM_MC_END();
7938 return VINF_SUCCESS;
7939}
7940
7941
7942/**
7943 * Common worker for FPU instructions working on ST0 and STn, only affecting
7944 * flags, and popping twice when done.
7945 *
7946 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7947 */
7948FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7949{
7950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7951
7952 IEM_MC_BEGIN(3, 1);
7953 IEM_MC_LOCAL(uint16_t, u16Fsw);
7954 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7955 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7956 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7957
7958 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7959 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7960
7961 IEM_MC_PREPARE_FPU_USAGE();
7962 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
7963 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7964 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
7965 IEM_MC_ELSE()
7966 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
7967 IEM_MC_ENDIF();
7968 IEM_MC_ADVANCE_RIP();
7969
7970 IEM_MC_END();
7971 return VINF_SUCCESS;
7972}
7973
7974
7975/** Opcode 0xda 0xe9. */
7976FNIEMOP_DEF(iemOp_fucompp)
7977{
7978 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
7979 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
7980}
7981
7982
7983/**
7984 * Common worker for FPU instructions working on ST0 and an m32i, and storing
7985 * the result in ST0.
7986 *
7987 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7988 */
7989FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
7990{
7991 IEM_MC_BEGIN(3, 3);
7992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7993 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7994 IEM_MC_LOCAL(int32_t, i32Val2);
7995 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7996 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7997 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7998
7999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8001
8002 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8003 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8004 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8005
8006 IEM_MC_PREPARE_FPU_USAGE();
8007 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8008 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8009 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8010 IEM_MC_ELSE()
8011 IEM_MC_FPU_STACK_UNDERFLOW(0);
8012 IEM_MC_ENDIF();
8013 IEM_MC_ADVANCE_RIP();
8014
8015 IEM_MC_END();
8016 return VINF_SUCCESS;
8017}
8018
8019
8020/** Opcode 0xda !11/0. */
8021FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8022{
8023 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8024 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8025}
8026
8027
8028/** Opcode 0xda !11/1. */
8029FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8030{
8031 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8032 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8033}
8034
8035
8036/** Opcode 0xda !11/2. */
8037FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8038{
8039 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8040
8041 IEM_MC_BEGIN(3, 3);
8042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8043 IEM_MC_LOCAL(uint16_t, u16Fsw);
8044 IEM_MC_LOCAL(int32_t, i32Val2);
8045 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8046 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8047 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8048
8049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8051
8052 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8053 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8054 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8055
8056 IEM_MC_PREPARE_FPU_USAGE();
8057 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8058 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8059 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8060 IEM_MC_ELSE()
8061 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8062 IEM_MC_ENDIF();
8063 IEM_MC_ADVANCE_RIP();
8064
8065 IEM_MC_END();
8066 return VINF_SUCCESS;
8067}
8068
8069
8070/** Opcode 0xda !11/3. */
8071FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8072{
8073 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8074
8075 IEM_MC_BEGIN(3, 3);
8076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8077 IEM_MC_LOCAL(uint16_t, u16Fsw);
8078 IEM_MC_LOCAL(int32_t, i32Val2);
8079 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8080 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8081 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8082
8083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8085
8086 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8087 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8088 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8089
8090 IEM_MC_PREPARE_FPU_USAGE();
8091 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8092 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8093 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8094 IEM_MC_ELSE()
8095 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8096 IEM_MC_ENDIF();
8097 IEM_MC_ADVANCE_RIP();
8098
8099 IEM_MC_END();
8100 return VINF_SUCCESS;
8101}
8102
8103
8104/** Opcode 0xda !11/4. */
8105FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8106{
8107 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8108 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8109}
8110
8111
8112/** Opcode 0xda !11/5. */
8113FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8114{
8115 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8116 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8117}
8118
8119
8120/** Opcode 0xda !11/6. */
8121FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8122{
8123 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8124 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8125}
8126
8127
8128/** Opcode 0xda !11/7. */
8129FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8130{
8131 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8132 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8133}
8134
8135
8136/**
8137 * @opcode 0xda
8138 */
8139FNIEMOP_DEF(iemOp_EscF2)
8140{
8141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8142 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8143 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8144 {
8145 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8146 {
8147 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8148 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8149 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8150 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8151 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8152 case 5:
8153 if (bRm == 0xe9)
8154 return FNIEMOP_CALL(iemOp_fucompp);
8155 return IEMOP_RAISE_INVALID_OPCODE();
8156 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8157 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8159 }
8160 }
8161 else
8162 {
8163 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8164 {
8165 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8166 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8167 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8168 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8169 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8170 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8171 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8172 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8174 }
8175 }
8176}
8177
8178
8179/** Opcode 0xdb !11/0. */
8180FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8181{
8182 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8183
8184 IEM_MC_BEGIN(2, 3);
8185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8186 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8187 IEM_MC_LOCAL(int32_t, i32Val);
8188 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8189 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8190
8191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8193
8194 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8195 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8196 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8197
8198 IEM_MC_PREPARE_FPU_USAGE();
8199 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8200 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8201 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8202 IEM_MC_ELSE()
8203 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8204 IEM_MC_ENDIF();
8205 IEM_MC_ADVANCE_RIP();
8206
8207 IEM_MC_END();
8208 return VINF_SUCCESS;
8209}
8210
8211
8212/** Opcode 0xdb !11/1. */
8213FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8214{
8215 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8216 IEM_MC_BEGIN(3, 2);
8217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8218 IEM_MC_LOCAL(uint16_t, u16Fsw);
8219 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8220 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8221 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8222
8223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8225 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8226 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8227
8228 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8229 IEM_MC_PREPARE_FPU_USAGE();
8230 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8231 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8232 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8233 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8234 IEM_MC_ELSE()
8235 IEM_MC_IF_FCW_IM()
8236 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8237 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8238 IEM_MC_ENDIF();
8239 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8240 IEM_MC_ENDIF();
8241 IEM_MC_ADVANCE_RIP();
8242
8243 IEM_MC_END();
8244 return VINF_SUCCESS;
8245}
8246
8247
8248/** Opcode 0xdb !11/2. */
8249FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8250{
8251 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8252 IEM_MC_BEGIN(3, 2);
8253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8254 IEM_MC_LOCAL(uint16_t, u16Fsw);
8255 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8256 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8257 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8258
8259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8261 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8262 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8263
8264 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8265 IEM_MC_PREPARE_FPU_USAGE();
8266 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8267 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8268 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8269 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8270 IEM_MC_ELSE()
8271 IEM_MC_IF_FCW_IM()
8272 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8273 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8274 IEM_MC_ENDIF();
8275 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8276 IEM_MC_ENDIF();
8277 IEM_MC_ADVANCE_RIP();
8278
8279 IEM_MC_END();
8280 return VINF_SUCCESS;
8281}
8282
8283
8284/** Opcode 0xdb !11/3. */
8285FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8286{
8287 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8288 IEM_MC_BEGIN(3, 2);
8289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8290 IEM_MC_LOCAL(uint16_t, u16Fsw);
8291 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8292 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8293 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8294
8295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8297 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8298 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8299
8300 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8301 IEM_MC_PREPARE_FPU_USAGE();
8302 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8303 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8304 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8305 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8306 IEM_MC_ELSE()
8307 IEM_MC_IF_FCW_IM()
8308 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8309 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8310 IEM_MC_ENDIF();
8311 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8312 IEM_MC_ENDIF();
8313 IEM_MC_ADVANCE_RIP();
8314
8315 IEM_MC_END();
8316 return VINF_SUCCESS;
8317}
8318
8319
8320/** Opcode 0xdb !11/5. */
8321FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8322{
8323 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8324
8325 IEM_MC_BEGIN(2, 3);
8326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8327 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8328 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8329 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8330 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8331
8332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8334
8335 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8336 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8337 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8338
8339 IEM_MC_PREPARE_FPU_USAGE();
8340 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8341 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8342 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8343 IEM_MC_ELSE()
8344 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8345 IEM_MC_ENDIF();
8346 IEM_MC_ADVANCE_RIP();
8347
8348 IEM_MC_END();
8349 return VINF_SUCCESS;
8350}
8351
8352
8353/** Opcode 0xdb !11/7. */
8354FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8355{
8356 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8357 IEM_MC_BEGIN(3, 2);
8358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8359 IEM_MC_LOCAL(uint16_t, u16Fsw);
8360 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8361 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8362 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8363
8364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8366 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8367 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8368
8369 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8370 IEM_MC_PREPARE_FPU_USAGE();
8371 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8372 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8373 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8374 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8375 IEM_MC_ELSE()
8376 IEM_MC_IF_FCW_IM()
8377 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8378 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8379 IEM_MC_ENDIF();
8380 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8381 IEM_MC_ENDIF();
8382 IEM_MC_ADVANCE_RIP();
8383
8384 IEM_MC_END();
8385 return VINF_SUCCESS;
8386}
8387
8388
8389/** Opcode 0xdb 11/0. */
8390FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8391{
8392 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8394
8395 IEM_MC_BEGIN(0, 1);
8396 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8397
8398 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8399 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8400
8401 IEM_MC_PREPARE_FPU_USAGE();
8402 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8403 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8404 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8405 IEM_MC_ENDIF();
8406 IEM_MC_UPDATE_FPU_OPCODE_IP();
8407 IEM_MC_ELSE()
8408 IEM_MC_FPU_STACK_UNDERFLOW(0);
8409 IEM_MC_ENDIF();
8410 IEM_MC_ADVANCE_RIP();
8411
8412 IEM_MC_END();
8413 return VINF_SUCCESS;
8414}
8415
8416
8417/** Opcode 0xdb 11/1. */
8418FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8419{
8420 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8422
8423 IEM_MC_BEGIN(0, 1);
8424 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8425
8426 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8427 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8428
8429 IEM_MC_PREPARE_FPU_USAGE();
8430 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8431 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8432 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8433 IEM_MC_ENDIF();
8434 IEM_MC_UPDATE_FPU_OPCODE_IP();
8435 IEM_MC_ELSE()
8436 IEM_MC_FPU_STACK_UNDERFLOW(0);
8437 IEM_MC_ENDIF();
8438 IEM_MC_ADVANCE_RIP();
8439
8440 IEM_MC_END();
8441 return VINF_SUCCESS;
8442}
8443
8444
8445/** Opcode 0xdb 11/2. */
8446FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8447{
8448 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8450
8451 IEM_MC_BEGIN(0, 1);
8452 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8453
8454 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8455 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8456
8457 IEM_MC_PREPARE_FPU_USAGE();
8458 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8459 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8460 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8461 IEM_MC_ENDIF();
8462 IEM_MC_UPDATE_FPU_OPCODE_IP();
8463 IEM_MC_ELSE()
8464 IEM_MC_FPU_STACK_UNDERFLOW(0);
8465 IEM_MC_ENDIF();
8466 IEM_MC_ADVANCE_RIP();
8467
8468 IEM_MC_END();
8469 return VINF_SUCCESS;
8470}
8471
8472
8473/** Opcode 0xdb 11/3. */
8474FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8475{
8476 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8478
8479 IEM_MC_BEGIN(0, 1);
8480 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8481
8482 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8483 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8484
8485 IEM_MC_PREPARE_FPU_USAGE();
8486 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8487 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8488 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8489 IEM_MC_ENDIF();
8490 IEM_MC_UPDATE_FPU_OPCODE_IP();
8491 IEM_MC_ELSE()
8492 IEM_MC_FPU_STACK_UNDERFLOW(0);
8493 IEM_MC_ENDIF();
8494 IEM_MC_ADVANCE_RIP();
8495
8496 IEM_MC_END();
8497 return VINF_SUCCESS;
8498}
8499
8500
8501/** Opcode 0xdb 0xe0. */
8502FNIEMOP_DEF(iemOp_fneni)
8503{
8504 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8506 IEM_MC_BEGIN(0,0);
8507 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8508 IEM_MC_ADVANCE_RIP();
8509 IEM_MC_END();
8510 return VINF_SUCCESS;
8511}
8512
8513
8514/** Opcode 0xdb 0xe1. */
8515FNIEMOP_DEF(iemOp_fndisi)
8516{
8517 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8519 IEM_MC_BEGIN(0,0);
8520 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8521 IEM_MC_ADVANCE_RIP();
8522 IEM_MC_END();
8523 return VINF_SUCCESS;
8524}
8525
8526
8527/** Opcode 0xdb 0xe2. */
8528FNIEMOP_DEF(iemOp_fnclex)
8529{
8530 IEMOP_MNEMONIC(fnclex, "fnclex");
8531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8532
8533 IEM_MC_BEGIN(0,0);
8534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8535 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8536 IEM_MC_CLEAR_FSW_EX();
8537 IEM_MC_ADVANCE_RIP();
8538 IEM_MC_END();
8539 return VINF_SUCCESS;
8540}
8541
8542
8543/** Opcode 0xdb 0xe3. */
8544FNIEMOP_DEF(iemOp_fninit)
8545{
8546 IEMOP_MNEMONIC(fninit, "fninit");
8547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8548 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8549}
8550
8551
8552/** Opcode 0xdb 0xe4. */
8553FNIEMOP_DEF(iemOp_fnsetpm)
8554{
8555 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8557 IEM_MC_BEGIN(0,0);
8558 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8559 IEM_MC_ADVANCE_RIP();
8560 IEM_MC_END();
8561 return VINF_SUCCESS;
8562}
8563
8564
8565/** Opcode 0xdb 0xe5. */
8566FNIEMOP_DEF(iemOp_frstpm)
8567{
8568 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8569#if 0 /* #UDs on newer CPUs */
8570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8571 IEM_MC_BEGIN(0,0);
8572 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8573 IEM_MC_ADVANCE_RIP();
8574 IEM_MC_END();
8575 return VINF_SUCCESS;
8576#else
8577 return IEMOP_RAISE_INVALID_OPCODE();
8578#endif
8579}
8580
8581
8582/** Opcode 0xdb 11/5. */
8583FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8584{
8585 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8586 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8587}
8588
8589
8590/** Opcode 0xdb 11/6. */
8591FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8592{
8593 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8594 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8595}
8596
8597
8598/**
8599 * @opcode 0xdb
8600 */
8601FNIEMOP_DEF(iemOp_EscF3)
8602{
8603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8604 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8605 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8606 {
8607 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8608 {
8609 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8610 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8611 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8612 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8613 case 4:
8614 switch (bRm)
8615 {
8616 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8617 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8618 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8619 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8620 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8621 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8622 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8623 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8625 }
8626 break;
8627 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8628 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8629 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8630 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8631 }
8632 }
8633 else
8634 {
8635 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8636 {
8637 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8638 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8639 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8640 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8641 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8642 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8643 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8644 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8646 }
8647 }
8648}
8649
8650
8651/**
8652 * Common worker for FPU instructions working on STn and ST0, and storing the
8653 * result in STn unless IE, DE or ZE was raised.
8654 *
8655 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8656 */
8657FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8658{
8659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8660
8661 IEM_MC_BEGIN(3, 1);
8662 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8663 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8664 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8665 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8666
8667 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8668 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8669
8670 IEM_MC_PREPARE_FPU_USAGE();
8671 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
8672 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8673 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8674 IEM_MC_ELSE()
8675 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8676 IEM_MC_ENDIF();
8677 IEM_MC_ADVANCE_RIP();
8678
8679 IEM_MC_END();
8680 return VINF_SUCCESS;
8681}
8682
8683
8684/** Opcode 0xdc 11/0. */
8685FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
8686{
8687 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
8688 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
8689}
8690
8691
8692/** Opcode 0xdc 11/1. */
8693FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
8694{
8695 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
8696 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
8697}
8698
8699
8700/** Opcode 0xdc 11/4. */
8701FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
8702{
8703 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
8704 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
8705}
8706
8707
8708/** Opcode 0xdc 11/5. */
8709FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
8710{
8711 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
8712 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
8713}
8714
8715
8716/** Opcode 0xdc 11/6. */
8717FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
8718{
8719 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
8720 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
8721}
8722
8723
8724/** Opcode 0xdc 11/7. */
8725FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
8726{
8727 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
8728 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
8729}
8730
8731
8732/**
8733 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
8734 * memory operand, and storing the result in ST0.
8735 *
8736 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8737 */
8738FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
8739{
8740 IEM_MC_BEGIN(3, 3);
8741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8742 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8743 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
8744 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8745 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
8746 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
8747
8748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8750 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8751 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8752
8753 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8754 IEM_MC_PREPARE_FPU_USAGE();
8755 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
8756 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
8757 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8758 IEM_MC_ELSE()
8759 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8760 IEM_MC_ENDIF();
8761 IEM_MC_ADVANCE_RIP();
8762
8763 IEM_MC_END();
8764 return VINF_SUCCESS;
8765}
8766
8767
8768/** Opcode 0xdc !11/0. */
8769FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
8770{
8771 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
8772 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
8773}
8774
8775
8776/** Opcode 0xdc !11/1. */
8777FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
8778{
8779 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
8780 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
8781}
8782
8783
8784/** Opcode 0xdc !11/2. */
8785FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
8786{
8787 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
8788
8789 IEM_MC_BEGIN(3, 3);
8790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8791 IEM_MC_LOCAL(uint16_t, u16Fsw);
8792 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8793 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8794 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8795 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8796
8797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8799
8800 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8801 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8802 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8803
8804 IEM_MC_PREPARE_FPU_USAGE();
8805 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8806 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8807 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8808 IEM_MC_ELSE()
8809 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8810 IEM_MC_ENDIF();
8811 IEM_MC_ADVANCE_RIP();
8812
8813 IEM_MC_END();
8814 return VINF_SUCCESS;
8815}
8816
8817
8818/** Opcode 0xdc !11/3. */
8819FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
8820{
8821 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
8822
8823 IEM_MC_BEGIN(3, 3);
8824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8825 IEM_MC_LOCAL(uint16_t, u16Fsw);
8826 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8827 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8828 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8829 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8830
8831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8833
8834 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8835 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8836 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8837
8838 IEM_MC_PREPARE_FPU_USAGE();
8839 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8840 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8841 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8842 IEM_MC_ELSE()
8843 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8844 IEM_MC_ENDIF();
8845 IEM_MC_ADVANCE_RIP();
8846
8847 IEM_MC_END();
8848 return VINF_SUCCESS;
8849}
8850
8851
8852/** Opcode 0xdc !11/4. */
8853FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
8854{
8855 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
8856 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
8857}
8858
8859
8860/** Opcode 0xdc !11/5. */
8861FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
8862{
8863 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
8864 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
8865}
8866
8867
8868/** Opcode 0xdc !11/6. */
8869FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
8870{
8871 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
8872 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
8873}
8874
8875
8876/** Opcode 0xdc !11/7. */
8877FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
8878{
8879 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
8880 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
8881}
8882
8883
8884/**
8885 * @opcode 0xdc
8886 */
8887FNIEMOP_DEF(iemOp_EscF4)
8888{
8889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8890 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
8891 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8892 {
8893 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8894 {
8895 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
8896 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
8897 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
8898 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
8899 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
8900 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
8901 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
8902 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
8903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8904 }
8905 }
8906 else
8907 {
8908 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8909 {
8910 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
8911 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
8912 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
8913 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
8914 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
8915 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
8916 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
8917 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
8918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8919 }
8920 }
8921}
8922
8923
8924/** Opcode 0xdd !11/0.
8925 * @sa iemOp_fld_m32r */
8926FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
8927{
8928 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
8929
8930 IEM_MC_BEGIN(2, 3);
8931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8932 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8933 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
8934 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8935 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
8936
8937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8939 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8940 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8941
8942 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8943 IEM_MC_PREPARE_FPU_USAGE();
8944 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8945 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
8946 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8947 IEM_MC_ELSE()
8948 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8949 IEM_MC_ENDIF();
8950 IEM_MC_ADVANCE_RIP();
8951
8952 IEM_MC_END();
8953 return VINF_SUCCESS;
8954}
8955
8956
8957/** Opcode 0xdd !11/0. */
8958FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
8959{
8960 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
8961 IEM_MC_BEGIN(3, 2);
8962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8963 IEM_MC_LOCAL(uint16_t, u16Fsw);
8964 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8965 IEM_MC_ARG(int64_t *, pi64Dst, 1);
8966 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8967
8968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8970 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8971 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8972
8973 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8974 IEM_MC_PREPARE_FPU_USAGE();
8975 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8976 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
8977 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8978 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8979 IEM_MC_ELSE()
8980 IEM_MC_IF_FCW_IM()
8981 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
8982 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
8983 IEM_MC_ENDIF();
8984 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8985 IEM_MC_ENDIF();
8986 IEM_MC_ADVANCE_RIP();
8987
8988 IEM_MC_END();
8989 return VINF_SUCCESS;
8990}
8991
8992
8993/** Opcode 0xdd !11/0. */
8994FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
8995{
8996 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
8997 IEM_MC_BEGIN(3, 2);
8998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8999 IEM_MC_LOCAL(uint16_t, u16Fsw);
9000 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9001 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9002 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9003
9004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9006 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9007 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9008
9009 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9010 IEM_MC_PREPARE_FPU_USAGE();
9011 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9012 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9013 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9014 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9015 IEM_MC_ELSE()
9016 IEM_MC_IF_FCW_IM()
9017 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9018 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9019 IEM_MC_ENDIF();
9020 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9021 IEM_MC_ENDIF();
9022 IEM_MC_ADVANCE_RIP();
9023
9024 IEM_MC_END();
9025 return VINF_SUCCESS;
9026}
9027
9028
9029
9030
9031/** Opcode 0xdd !11/0. */
9032FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9033{
9034 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9035 IEM_MC_BEGIN(3, 2);
9036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9037 IEM_MC_LOCAL(uint16_t, u16Fsw);
9038 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9039 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9040 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9041
9042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9044 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9045 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9046
9047 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9048 IEM_MC_PREPARE_FPU_USAGE();
9049 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9050 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9051 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9052 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9053 IEM_MC_ELSE()
9054 IEM_MC_IF_FCW_IM()
9055 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9056 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9057 IEM_MC_ENDIF();
9058 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9059 IEM_MC_ENDIF();
9060 IEM_MC_ADVANCE_RIP();
9061
9062 IEM_MC_END();
9063 return VINF_SUCCESS;
9064}
9065
9066
9067/** Opcode 0xdd !11/0. */
9068FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9069{
9070 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9071 IEM_MC_BEGIN(3, 0);
9072 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9073 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9074 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9077 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9078 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9079 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9080 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9081 IEM_MC_END();
9082 return VINF_SUCCESS;
9083}
9084
9085
9086/** Opcode 0xdd !11/0. */
9087FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9088{
9089 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9090 IEM_MC_BEGIN(3, 0);
9091 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9092 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9093 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9096 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9097 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9098 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9099 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9100 IEM_MC_END();
9101 return VINF_SUCCESS;
9102
9103}
9104
9105/** Opcode 0xdd !11/0. */
9106FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9107{
9108 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9109
9110 IEM_MC_BEGIN(0, 2);
9111 IEM_MC_LOCAL(uint16_t, u16Tmp);
9112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9113
9114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9116 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9117
9118 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9119 IEM_MC_FETCH_FSW(u16Tmp);
9120 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9121 IEM_MC_ADVANCE_RIP();
9122
9123/** @todo Debug / drop a hint to the verifier that things may differ
9124 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9125 * NT4SP1. (X86_FSW_PE) */
9126 IEM_MC_END();
9127 return VINF_SUCCESS;
9128}
9129
9130
9131/** Opcode 0xdd 11/0. */
9132FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9133{
9134 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9136 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9137 unmodified. */
9138
9139 IEM_MC_BEGIN(0, 0);
9140
9141 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9142 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9143
9144 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9145 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9146 IEM_MC_UPDATE_FPU_OPCODE_IP();
9147
9148 IEM_MC_ADVANCE_RIP();
9149 IEM_MC_END();
9150 return VINF_SUCCESS;
9151}
9152
9153
9154/** Opcode 0xdd 11/1. */
9155FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9156{
9157 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9159
9160 IEM_MC_BEGIN(0, 2);
9161 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9162 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9163 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9164 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9165
9166 IEM_MC_PREPARE_FPU_USAGE();
9167 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9168 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9169 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9170 IEM_MC_ELSE()
9171 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9172 IEM_MC_ENDIF();
9173
9174 IEM_MC_ADVANCE_RIP();
9175 IEM_MC_END();
9176 return VINF_SUCCESS;
9177}
9178
9179
9180/** Opcode 0xdd 11/3. */
9181FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9182{
9183 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9184 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9185}
9186
9187
9188/** Opcode 0xdd 11/4. */
9189FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9190{
9191 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9192 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9193}
9194
9195
9196/**
9197 * @opcode 0xdd
9198 */
9199FNIEMOP_DEF(iemOp_EscF5)
9200{
9201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9202 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9203 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9204 {
9205 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9206 {
9207 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9208 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9209 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9210 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9211 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9212 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9213 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9214 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9216 }
9217 }
9218 else
9219 {
9220 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9221 {
9222 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9223 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9224 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9225 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9226 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9227 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9228 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9229 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9231 }
9232 }
9233}
9234
9235
9236/** Opcode 0xde 11/0. */
9237FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9238{
9239 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9240 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9241}
9242
9243
9244/** Opcode 0xde 11/0. */
9245FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9246{
9247 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9248 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9249}
9250
9251
9252/** Opcode 0xde 0xd9. */
9253FNIEMOP_DEF(iemOp_fcompp)
9254{
9255 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9256 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9257}
9258
9259
9260/** Opcode 0xde 11/4. */
9261FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9262{
9263 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9264 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9265}
9266
9267
9268/** Opcode 0xde 11/5. */
9269FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9270{
9271 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9272 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9273}
9274
9275
9276/** Opcode 0xde 11/6. */
9277FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9278{
9279 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9280 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9281}
9282
9283
9284/** Opcode 0xde 11/7. */
9285FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9286{
9287 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9288 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9289}
9290
9291
9292/**
9293 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9294 * the result in ST0.
9295 *
9296 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9297 */
9298FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9299{
9300 IEM_MC_BEGIN(3, 3);
9301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9302 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9303 IEM_MC_LOCAL(int16_t, i16Val2);
9304 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9305 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9306 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9307
9308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9310
9311 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9312 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9313 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9314
9315 IEM_MC_PREPARE_FPU_USAGE();
9316 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9317 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9318 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9319 IEM_MC_ELSE()
9320 IEM_MC_FPU_STACK_UNDERFLOW(0);
9321 IEM_MC_ENDIF();
9322 IEM_MC_ADVANCE_RIP();
9323
9324 IEM_MC_END();
9325 return VINF_SUCCESS;
9326}
9327
9328
9329/** Opcode 0xde !11/0. */
9330FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9331{
9332 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9333 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9334}
9335
9336
9337/** Opcode 0xde !11/1. */
9338FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9339{
9340 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9341 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9342}
9343
9344
9345/** Opcode 0xde !11/2. */
9346FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9347{
9348 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9349
9350 IEM_MC_BEGIN(3, 3);
9351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9352 IEM_MC_LOCAL(uint16_t, u16Fsw);
9353 IEM_MC_LOCAL(int16_t, i16Val2);
9354 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9355 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9356 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9357
9358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9360
9361 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9362 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9363 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9364
9365 IEM_MC_PREPARE_FPU_USAGE();
9366 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9367 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9368 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9369 IEM_MC_ELSE()
9370 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9371 IEM_MC_ENDIF();
9372 IEM_MC_ADVANCE_RIP();
9373
9374 IEM_MC_END();
9375 return VINF_SUCCESS;
9376}
9377
9378
9379/** Opcode 0xde !11/3. */
9380FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9381{
9382 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9383
9384 IEM_MC_BEGIN(3, 3);
9385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9386 IEM_MC_LOCAL(uint16_t, u16Fsw);
9387 IEM_MC_LOCAL(int16_t, i16Val2);
9388 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9389 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9390 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9391
9392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9394
9395 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9396 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9397 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9398
9399 IEM_MC_PREPARE_FPU_USAGE();
9400 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9401 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9402 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9403 IEM_MC_ELSE()
9404 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9405 IEM_MC_ENDIF();
9406 IEM_MC_ADVANCE_RIP();
9407
9408 IEM_MC_END();
9409 return VINF_SUCCESS;
9410}
9411
9412
9413/** Opcode 0xde !11/4. */
9414FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9415{
9416 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9417 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9418}
9419
9420
9421/** Opcode 0xde !11/5. */
9422FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9423{
9424 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9425 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9426}
9427
9428
9429/** Opcode 0xde !11/6. */
9430FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9431{
9432 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9433 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9434}
9435
9436
9437/** Opcode 0xde !11/7. */
9438FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9439{
9440 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9441 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9442}
9443
9444
9445/**
9446 * @opcode 0xde
9447 */
9448FNIEMOP_DEF(iemOp_EscF6)
9449{
9450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9451 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9452 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9453 {
9454 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9455 {
9456 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9457 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9458 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9459 case 3: if (bRm == 0xd9)
9460 return FNIEMOP_CALL(iemOp_fcompp);
9461 return IEMOP_RAISE_INVALID_OPCODE();
9462 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9463 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9464 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9465 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9467 }
9468 }
9469 else
9470 {
9471 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9472 {
9473 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9474 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9475 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9476 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9477 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9478 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9479 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9480 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9482 }
9483 }
9484}
9485
9486
9487/** Opcode 0xdf 11/0.
9488 * Undocument instruction, assumed to work like ffree + fincstp. */
9489FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9490{
9491 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9493
9494 IEM_MC_BEGIN(0, 0);
9495
9496 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9497 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9498
9499 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9500 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9501 IEM_MC_FPU_STACK_INC_TOP();
9502 IEM_MC_UPDATE_FPU_OPCODE_IP();
9503
9504 IEM_MC_ADVANCE_RIP();
9505 IEM_MC_END();
9506 return VINF_SUCCESS;
9507}
9508
9509
9510/** Opcode 0xdf 0xe0. */
9511FNIEMOP_DEF(iemOp_fnstsw_ax)
9512{
9513 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9515
9516 IEM_MC_BEGIN(0, 1);
9517 IEM_MC_LOCAL(uint16_t, u16Tmp);
9518 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9519 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9520 IEM_MC_FETCH_FSW(u16Tmp);
9521 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9522 IEM_MC_ADVANCE_RIP();
9523 IEM_MC_END();
9524 return VINF_SUCCESS;
9525}
9526
9527
9528/** Opcode 0xdf 11/5. */
9529FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9530{
9531 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9532 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9533}
9534
9535
9536/** Opcode 0xdf 11/6. */
9537FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9538{
9539 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9540 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9541}
9542
9543
9544/** Opcode 0xdf !11/0. */
9545FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9546{
9547 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9548
9549 IEM_MC_BEGIN(2, 3);
9550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9551 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9552 IEM_MC_LOCAL(int16_t, i16Val);
9553 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9554 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9555
9556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9558
9559 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9560 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9561 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9562
9563 IEM_MC_PREPARE_FPU_USAGE();
9564 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9565 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9566 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9567 IEM_MC_ELSE()
9568 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9569 IEM_MC_ENDIF();
9570 IEM_MC_ADVANCE_RIP();
9571
9572 IEM_MC_END();
9573 return VINF_SUCCESS;
9574}
9575
9576
9577/** Opcode 0xdf !11/1. */
9578FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9579{
9580 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9581 IEM_MC_BEGIN(3, 2);
9582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9583 IEM_MC_LOCAL(uint16_t, u16Fsw);
9584 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9585 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9586 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9587
9588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9590 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9591 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9592
9593 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9594 IEM_MC_PREPARE_FPU_USAGE();
9595 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9596 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9597 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9598 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9599 IEM_MC_ELSE()
9600 IEM_MC_IF_FCW_IM()
9601 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9602 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9603 IEM_MC_ENDIF();
9604 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9605 IEM_MC_ENDIF();
9606 IEM_MC_ADVANCE_RIP();
9607
9608 IEM_MC_END();
9609 return VINF_SUCCESS;
9610}
9611
9612
9613/** Opcode 0xdf !11/2. */
9614FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9615{
9616 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9617 IEM_MC_BEGIN(3, 2);
9618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9619 IEM_MC_LOCAL(uint16_t, u16Fsw);
9620 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9621 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9622 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9623
9624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9627 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9628
9629 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9630 IEM_MC_PREPARE_FPU_USAGE();
9631 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9632 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9633 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9634 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9635 IEM_MC_ELSE()
9636 IEM_MC_IF_FCW_IM()
9637 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9638 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9639 IEM_MC_ENDIF();
9640 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9641 IEM_MC_ENDIF();
9642 IEM_MC_ADVANCE_RIP();
9643
9644 IEM_MC_END();
9645 return VINF_SUCCESS;
9646}
9647
9648
9649/** Opcode 0xdf !11/3. */
9650FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9651{
9652 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9653 IEM_MC_BEGIN(3, 2);
9654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9655 IEM_MC_LOCAL(uint16_t, u16Fsw);
9656 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9657 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9658 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9659
9660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9662 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9663 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9664
9665 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9666 IEM_MC_PREPARE_FPU_USAGE();
9667 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9668 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9669 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9670 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9671 IEM_MC_ELSE()
9672 IEM_MC_IF_FCW_IM()
9673 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9674 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9675 IEM_MC_ENDIF();
9676 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9677 IEM_MC_ENDIF();
9678 IEM_MC_ADVANCE_RIP();
9679
9680 IEM_MC_END();
9681 return VINF_SUCCESS;
9682}
9683
9684
9685/** Opcode 0xdf !11/4. */
9686FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
9687
9688
9689/** Opcode 0xdf !11/5. */
9690FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
9691{
9692 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
9693
9694 IEM_MC_BEGIN(2, 3);
9695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9696 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9697 IEM_MC_LOCAL(int64_t, i64Val);
9698 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9699 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
9700
9701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9703
9704 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9705 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9706 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9707
9708 IEM_MC_PREPARE_FPU_USAGE();
9709 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9710 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
9711 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9712 IEM_MC_ELSE()
9713 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9714 IEM_MC_ENDIF();
9715 IEM_MC_ADVANCE_RIP();
9716
9717 IEM_MC_END();
9718 return VINF_SUCCESS;
9719}
9720
9721
9722/** Opcode 0xdf !11/6. */
9723FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
9724
9725
9726/** Opcode 0xdf !11/7. */
9727FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
9728{
9729 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
9730 IEM_MC_BEGIN(3, 2);
9731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9732 IEM_MC_LOCAL(uint16_t, u16Fsw);
9733 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9734 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9735 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9736
9737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9739 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9740 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9741
9742 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9743 IEM_MC_PREPARE_FPU_USAGE();
9744 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9745 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9746 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9747 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9748 IEM_MC_ELSE()
9749 IEM_MC_IF_FCW_IM()
9750 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9751 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9752 IEM_MC_ENDIF();
9753 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9754 IEM_MC_ENDIF();
9755 IEM_MC_ADVANCE_RIP();
9756
9757 IEM_MC_END();
9758 return VINF_SUCCESS;
9759}
9760
9761
9762/**
9763 * @opcode 0xdf
9764 */
9765FNIEMOP_DEF(iemOp_EscF7)
9766{
9767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9769 {
9770 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9771 {
9772 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
9773 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
9774 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9775 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9776 case 4: if (bRm == 0xe0)
9777 return FNIEMOP_CALL(iemOp_fnstsw_ax);
9778 return IEMOP_RAISE_INVALID_OPCODE();
9779 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
9780 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
9781 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9783 }
9784 }
9785 else
9786 {
9787 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9788 {
9789 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
9790 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
9791 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
9792 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
9793 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
9794 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
9795 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
9796 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
9797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9798 }
9799 }
9800}
9801
9802
9803/**
9804 * @opcode 0xe0
9805 */
9806FNIEMOP_DEF(iemOp_loopne_Jb)
9807{
9808 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
9809 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9811 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9812
9813 switch (pVCpu->iem.s.enmEffAddrMode)
9814 {
9815 case IEMMODE_16BIT:
9816 IEM_MC_BEGIN(0,0);
9817 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9818 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9819 IEM_MC_REL_JMP_S8(i8Imm);
9820 } IEM_MC_ELSE() {
9821 IEM_MC_ADVANCE_RIP();
9822 } IEM_MC_ENDIF();
9823 IEM_MC_END();
9824 return VINF_SUCCESS;
9825
9826 case IEMMODE_32BIT:
9827 IEM_MC_BEGIN(0,0);
9828 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9829 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9830 IEM_MC_REL_JMP_S8(i8Imm);
9831 } IEM_MC_ELSE() {
9832 IEM_MC_ADVANCE_RIP();
9833 } IEM_MC_ENDIF();
9834 IEM_MC_END();
9835 return VINF_SUCCESS;
9836
9837 case IEMMODE_64BIT:
9838 IEM_MC_BEGIN(0,0);
9839 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9840 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9841 IEM_MC_REL_JMP_S8(i8Imm);
9842 } IEM_MC_ELSE() {
9843 IEM_MC_ADVANCE_RIP();
9844 } IEM_MC_ENDIF();
9845 IEM_MC_END();
9846 return VINF_SUCCESS;
9847
9848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9849 }
9850}
9851
9852
9853/**
9854 * @opcode 0xe1
9855 */
9856FNIEMOP_DEF(iemOp_loope_Jb)
9857{
9858 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
9859 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9862
9863 switch (pVCpu->iem.s.enmEffAddrMode)
9864 {
9865 case IEMMODE_16BIT:
9866 IEM_MC_BEGIN(0,0);
9867 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9868 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9869 IEM_MC_REL_JMP_S8(i8Imm);
9870 } IEM_MC_ELSE() {
9871 IEM_MC_ADVANCE_RIP();
9872 } IEM_MC_ENDIF();
9873 IEM_MC_END();
9874 return VINF_SUCCESS;
9875
9876 case IEMMODE_32BIT:
9877 IEM_MC_BEGIN(0,0);
9878 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9879 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9880 IEM_MC_REL_JMP_S8(i8Imm);
9881 } IEM_MC_ELSE() {
9882 IEM_MC_ADVANCE_RIP();
9883 } IEM_MC_ENDIF();
9884 IEM_MC_END();
9885 return VINF_SUCCESS;
9886
9887 case IEMMODE_64BIT:
9888 IEM_MC_BEGIN(0,0);
9889 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9890 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9891 IEM_MC_REL_JMP_S8(i8Imm);
9892 } IEM_MC_ELSE() {
9893 IEM_MC_ADVANCE_RIP();
9894 } IEM_MC_ENDIF();
9895 IEM_MC_END();
9896 return VINF_SUCCESS;
9897
9898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9899 }
9900}
9901
9902
9903/**
9904 * @opcode 0xe2
9905 */
9906FNIEMOP_DEF(iemOp_loop_Jb)
9907{
9908 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
9909 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9911 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9912
9913 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
9914 * using the 32-bit operand size override. How can that be restarted? See
9915 * weird pseudo code in intel manual. */
9916 switch (pVCpu->iem.s.enmEffAddrMode)
9917 {
9918 case IEMMODE_16BIT:
9919 IEM_MC_BEGIN(0,0);
9920 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9921 {
9922 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9923 IEM_MC_IF_CX_IS_NZ() {
9924 IEM_MC_REL_JMP_S8(i8Imm);
9925 } IEM_MC_ELSE() {
9926 IEM_MC_ADVANCE_RIP();
9927 } IEM_MC_ENDIF();
9928 }
9929 else
9930 {
9931 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
9932 IEM_MC_ADVANCE_RIP();
9933 }
9934 IEM_MC_END();
9935 return VINF_SUCCESS;
9936
9937 case IEMMODE_32BIT:
9938 IEM_MC_BEGIN(0,0);
9939 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9940 {
9941 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9942 IEM_MC_IF_ECX_IS_NZ() {
9943 IEM_MC_REL_JMP_S8(i8Imm);
9944 } IEM_MC_ELSE() {
9945 IEM_MC_ADVANCE_RIP();
9946 } IEM_MC_ENDIF();
9947 }
9948 else
9949 {
9950 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
9951 IEM_MC_ADVANCE_RIP();
9952 }
9953 IEM_MC_END();
9954 return VINF_SUCCESS;
9955
9956 case IEMMODE_64BIT:
9957 IEM_MC_BEGIN(0,0);
9958 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9959 {
9960 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9961 IEM_MC_IF_RCX_IS_NZ() {
9962 IEM_MC_REL_JMP_S8(i8Imm);
9963 } IEM_MC_ELSE() {
9964 IEM_MC_ADVANCE_RIP();
9965 } IEM_MC_ENDIF();
9966 }
9967 else
9968 {
9969 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
9970 IEM_MC_ADVANCE_RIP();
9971 }
9972 IEM_MC_END();
9973 return VINF_SUCCESS;
9974
9975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9976 }
9977}
9978
9979
9980/**
9981 * @opcode 0xe3
9982 */
9983FNIEMOP_DEF(iemOp_jecxz_Jb)
9984{
9985 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
9986 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9988 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9989
9990 switch (pVCpu->iem.s.enmEffAddrMode)
9991 {
9992 case IEMMODE_16BIT:
9993 IEM_MC_BEGIN(0,0);
9994 IEM_MC_IF_CX_IS_NZ() {
9995 IEM_MC_ADVANCE_RIP();
9996 } IEM_MC_ELSE() {
9997 IEM_MC_REL_JMP_S8(i8Imm);
9998 } IEM_MC_ENDIF();
9999 IEM_MC_END();
10000 return VINF_SUCCESS;
10001
10002 case IEMMODE_32BIT:
10003 IEM_MC_BEGIN(0,0);
10004 IEM_MC_IF_ECX_IS_NZ() {
10005 IEM_MC_ADVANCE_RIP();
10006 } IEM_MC_ELSE() {
10007 IEM_MC_REL_JMP_S8(i8Imm);
10008 } IEM_MC_ENDIF();
10009 IEM_MC_END();
10010 return VINF_SUCCESS;
10011
10012 case IEMMODE_64BIT:
10013 IEM_MC_BEGIN(0,0);
10014 IEM_MC_IF_RCX_IS_NZ() {
10015 IEM_MC_ADVANCE_RIP();
10016 } IEM_MC_ELSE() {
10017 IEM_MC_REL_JMP_S8(i8Imm);
10018 } IEM_MC_ENDIF();
10019 IEM_MC_END();
10020 return VINF_SUCCESS;
10021
10022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10023 }
10024}
10025
10026
10027/** Opcode 0xe4 */
10028FNIEMOP_DEF(iemOp_in_AL_Ib)
10029{
10030 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10031 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10033 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10034}
10035
10036
10037/** Opcode 0xe5 */
10038FNIEMOP_DEF(iemOp_in_eAX_Ib)
10039{
10040 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10041 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10043 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10044}
10045
10046
10047/** Opcode 0xe6 */
10048FNIEMOP_DEF(iemOp_out_Ib_AL)
10049{
10050 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10051 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10053 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10054}
10055
10056
10057/** Opcode 0xe7 */
10058FNIEMOP_DEF(iemOp_out_Ib_eAX)
10059{
10060 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10061 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10063 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10064}
10065
10066
10067/**
10068 * @opcode 0xe8
10069 */
10070FNIEMOP_DEF(iemOp_call_Jv)
10071{
10072 IEMOP_MNEMONIC(call_Jv, "call Jv");
10073 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10074 switch (pVCpu->iem.s.enmEffOpSize)
10075 {
10076 case IEMMODE_16BIT:
10077 {
10078 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10079 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10080 }
10081
10082 case IEMMODE_32BIT:
10083 {
10084 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10085 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10086 }
10087
10088 case IEMMODE_64BIT:
10089 {
10090 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10091 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10092 }
10093
10094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10095 }
10096}
10097
10098
10099/**
10100 * @opcode 0xe9
10101 */
10102FNIEMOP_DEF(iemOp_jmp_Jv)
10103{
10104 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10105 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10106 switch (pVCpu->iem.s.enmEffOpSize)
10107 {
10108 case IEMMODE_16BIT:
10109 {
10110 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10111 IEM_MC_BEGIN(0, 0);
10112 IEM_MC_REL_JMP_S16(i16Imm);
10113 IEM_MC_END();
10114 return VINF_SUCCESS;
10115 }
10116
10117 case IEMMODE_64BIT:
10118 case IEMMODE_32BIT:
10119 {
10120 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10121 IEM_MC_BEGIN(0, 0);
10122 IEM_MC_REL_JMP_S32(i32Imm);
10123 IEM_MC_END();
10124 return VINF_SUCCESS;
10125 }
10126
10127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10128 }
10129}
10130
10131
10132/**
10133 * @opcode 0xea
10134 */
10135FNIEMOP_DEF(iemOp_jmp_Ap)
10136{
10137 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10138 IEMOP_HLP_NO_64BIT();
10139
10140 /* Decode the far pointer address and pass it on to the far call C implementation. */
10141 uint32_t offSeg;
10142 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10143 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10144 else
10145 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10146 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10148 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10149}
10150
10151
10152/**
10153 * @opcode 0xeb
10154 */
10155FNIEMOP_DEF(iemOp_jmp_Jb)
10156{
10157 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10158 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10160 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10161
10162 IEM_MC_BEGIN(0, 0);
10163 IEM_MC_REL_JMP_S8(i8Imm);
10164 IEM_MC_END();
10165 return VINF_SUCCESS;
10166}
10167
10168
10169/** Opcode 0xec */
10170FNIEMOP_DEF(iemOp_in_AL_DX)
10171{
10172 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10174 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10175}
10176
10177
10178/** Opcode 0xed */
10179FNIEMOP_DEF(iemOp_eAX_DX)
10180{
10181 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10183 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10184}
10185
10186
10187/** Opcode 0xee */
10188FNIEMOP_DEF(iemOp_out_DX_AL)
10189{
10190 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10192 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10193}
10194
10195
10196/** Opcode 0xef */
10197FNIEMOP_DEF(iemOp_out_DX_eAX)
10198{
10199 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10201 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10202}
10203
10204
10205/**
10206 * @opcode 0xf0
10207 */
10208FNIEMOP_DEF(iemOp_lock)
10209{
10210 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10211 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10212
10213 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10214 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10215}
10216
10217
10218/**
10219 * @opcode 0xf1
10220 */
10221FNIEMOP_DEF(iemOp_int1)
10222{
10223 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10224 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10225 /** @todo testcase! */
10226 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
10227}
10228
10229
10230/**
10231 * @opcode 0xf2
10232 */
10233FNIEMOP_DEF(iemOp_repne)
10234{
10235 /* This overrides any previous REPE prefix. */
10236 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10237 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10238 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10239
10240 /* For the 4 entry opcode tables, REPNZ overrides any previous
10241 REPZ and operand size prefixes. */
10242 pVCpu->iem.s.idxPrefix = 3;
10243
10244 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10245 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10246}
10247
10248
10249/**
10250 * @opcode 0xf3
10251 */
10252FNIEMOP_DEF(iemOp_repe)
10253{
10254 /* This overrides any previous REPNE prefix. */
10255 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10256 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10257 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10258
10259 /* For the 4 entry opcode tables, REPNZ overrides any previous
10260 REPNZ and operand size prefixes. */
10261 pVCpu->iem.s.idxPrefix = 2;
10262
10263 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10264 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10265}
10266
10267
10268/**
10269 * @opcode 0xf4
10270 */
10271FNIEMOP_DEF(iemOp_hlt)
10272{
10273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10274 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10275}
10276
10277
10278/**
10279 * @opcode 0xf5
10280 */
10281FNIEMOP_DEF(iemOp_cmc)
10282{
10283 IEMOP_MNEMONIC(cmc, "cmc");
10284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10285 IEM_MC_BEGIN(0, 0);
10286 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10287 IEM_MC_ADVANCE_RIP();
10288 IEM_MC_END();
10289 return VINF_SUCCESS;
10290}
10291
10292
10293/**
10294 * Common implementation of 'inc/dec/not/neg Eb'.
10295 *
10296 * @param bRm The RM byte.
10297 * @param pImpl The instruction implementation.
10298 */
10299FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10300{
10301 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10302 {
10303 /* register access */
10304 IEM_MC_BEGIN(2, 0);
10305 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10306 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10307 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10308 IEM_MC_REF_EFLAGS(pEFlags);
10309 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10310 IEM_MC_ADVANCE_RIP();
10311 IEM_MC_END();
10312 }
10313 else
10314 {
10315 /* memory access. */
10316 IEM_MC_BEGIN(2, 2);
10317 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10318 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10320
10321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10322 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10323 IEM_MC_FETCH_EFLAGS(EFlags);
10324 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10325 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10326 else
10327 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10328
10329 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10330 IEM_MC_COMMIT_EFLAGS(EFlags);
10331 IEM_MC_ADVANCE_RIP();
10332 IEM_MC_END();
10333 }
10334 return VINF_SUCCESS;
10335}
10336
10337
10338/**
10339 * Common implementation of 'inc/dec/not/neg Ev'.
10340 *
10341 * @param bRm The RM byte.
10342 * @param pImpl The instruction implementation.
10343 */
10344FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10345{
10346 /* Registers are handled by a common worker. */
10347 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10348 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10349
10350 /* Memory we do here. */
10351 switch (pVCpu->iem.s.enmEffOpSize)
10352 {
10353 case IEMMODE_16BIT:
10354 IEM_MC_BEGIN(2, 2);
10355 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10356 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10358
10359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10360 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10361 IEM_MC_FETCH_EFLAGS(EFlags);
10362 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10363 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10364 else
10365 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10366
10367 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10368 IEM_MC_COMMIT_EFLAGS(EFlags);
10369 IEM_MC_ADVANCE_RIP();
10370 IEM_MC_END();
10371 return VINF_SUCCESS;
10372
10373 case IEMMODE_32BIT:
10374 IEM_MC_BEGIN(2, 2);
10375 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10376 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10378
10379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10380 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10381 IEM_MC_FETCH_EFLAGS(EFlags);
10382 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10383 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10384 else
10385 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10386
10387 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10388 IEM_MC_COMMIT_EFLAGS(EFlags);
10389 IEM_MC_ADVANCE_RIP();
10390 IEM_MC_END();
10391 return VINF_SUCCESS;
10392
10393 case IEMMODE_64BIT:
10394 IEM_MC_BEGIN(2, 2);
10395 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10396 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10398
10399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10400 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10401 IEM_MC_FETCH_EFLAGS(EFlags);
10402 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10403 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10404 else
10405 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10406
10407 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10408 IEM_MC_COMMIT_EFLAGS(EFlags);
10409 IEM_MC_ADVANCE_RIP();
10410 IEM_MC_END();
10411 return VINF_SUCCESS;
10412
10413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10414 }
10415}
10416
10417
10418/** Opcode 0xf6 /0. */
10419FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10420{
10421 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10422 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10423
10424 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10425 {
10426 /* register access */
10427 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10429
10430 IEM_MC_BEGIN(3, 0);
10431 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10432 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10433 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10434 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10435 IEM_MC_REF_EFLAGS(pEFlags);
10436 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10437 IEM_MC_ADVANCE_RIP();
10438 IEM_MC_END();
10439 }
10440 else
10441 {
10442 /* memory access. */
10443 IEM_MC_BEGIN(3, 2);
10444 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10445 IEM_MC_ARG(uint8_t, u8Src, 1);
10446 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10448
10449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10450 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10451 IEM_MC_ASSIGN(u8Src, u8Imm);
10452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10453 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10454 IEM_MC_FETCH_EFLAGS(EFlags);
10455 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10456
10457 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10458 IEM_MC_COMMIT_EFLAGS(EFlags);
10459 IEM_MC_ADVANCE_RIP();
10460 IEM_MC_END();
10461 }
10462 return VINF_SUCCESS;
10463}
10464
10465
10466/** Opcode 0xf7 /0. */
10467FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10468{
10469 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10470 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10471
10472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10473 {
10474 /* register access */
10475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10476 switch (pVCpu->iem.s.enmEffOpSize)
10477 {
10478 case IEMMODE_16BIT:
10479 {
10480 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10481 IEM_MC_BEGIN(3, 0);
10482 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10483 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10484 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10485 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10486 IEM_MC_REF_EFLAGS(pEFlags);
10487 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10488 IEM_MC_ADVANCE_RIP();
10489 IEM_MC_END();
10490 return VINF_SUCCESS;
10491 }
10492
10493 case IEMMODE_32BIT:
10494 {
10495 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10496 IEM_MC_BEGIN(3, 0);
10497 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10498 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10499 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10500 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10501 IEM_MC_REF_EFLAGS(pEFlags);
10502 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10503 /* No clearing the high dword here - test doesn't write back the result. */
10504 IEM_MC_ADVANCE_RIP();
10505 IEM_MC_END();
10506 return VINF_SUCCESS;
10507 }
10508
10509 case IEMMODE_64BIT:
10510 {
10511 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10512 IEM_MC_BEGIN(3, 0);
10513 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10514 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10515 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10516 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10517 IEM_MC_REF_EFLAGS(pEFlags);
10518 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10519 IEM_MC_ADVANCE_RIP();
10520 IEM_MC_END();
10521 return VINF_SUCCESS;
10522 }
10523
10524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10525 }
10526 }
10527 else
10528 {
10529 /* memory access. */
10530 switch (pVCpu->iem.s.enmEffOpSize)
10531 {
10532 case IEMMODE_16BIT:
10533 {
10534 IEM_MC_BEGIN(3, 2);
10535 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10536 IEM_MC_ARG(uint16_t, u16Src, 1);
10537 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10539
10540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10541 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10542 IEM_MC_ASSIGN(u16Src, u16Imm);
10543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10544 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10545 IEM_MC_FETCH_EFLAGS(EFlags);
10546 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10547
10548 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10549 IEM_MC_COMMIT_EFLAGS(EFlags);
10550 IEM_MC_ADVANCE_RIP();
10551 IEM_MC_END();
10552 return VINF_SUCCESS;
10553 }
10554
10555 case IEMMODE_32BIT:
10556 {
10557 IEM_MC_BEGIN(3, 2);
10558 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10559 IEM_MC_ARG(uint32_t, u32Src, 1);
10560 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10562
10563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10564 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10565 IEM_MC_ASSIGN(u32Src, u32Imm);
10566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10567 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10568 IEM_MC_FETCH_EFLAGS(EFlags);
10569 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10570
10571 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10572 IEM_MC_COMMIT_EFLAGS(EFlags);
10573 IEM_MC_ADVANCE_RIP();
10574 IEM_MC_END();
10575 return VINF_SUCCESS;
10576 }
10577
10578 case IEMMODE_64BIT:
10579 {
10580 IEM_MC_BEGIN(3, 2);
10581 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10582 IEM_MC_ARG(uint64_t, u64Src, 1);
10583 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10585
10586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10587 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10588 IEM_MC_ASSIGN(u64Src, u64Imm);
10589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10590 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10591 IEM_MC_FETCH_EFLAGS(EFlags);
10592 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10593
10594 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10595 IEM_MC_COMMIT_EFLAGS(EFlags);
10596 IEM_MC_ADVANCE_RIP();
10597 IEM_MC_END();
10598 return VINF_SUCCESS;
10599 }
10600
10601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10602 }
10603 }
10604}
10605
10606
10607/** Opcode 0xf6 /4, /5, /6 and /7. */
10608FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10609{
10610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10611 {
10612 /* register access */
10613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10614 IEM_MC_BEGIN(3, 1);
10615 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10616 IEM_MC_ARG(uint8_t, u8Value, 1);
10617 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10618 IEM_MC_LOCAL(int32_t, rc);
10619
10620 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10621 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10622 IEM_MC_REF_EFLAGS(pEFlags);
10623 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10624 IEM_MC_IF_LOCAL_IS_Z(rc) {
10625 IEM_MC_ADVANCE_RIP();
10626 } IEM_MC_ELSE() {
10627 IEM_MC_RAISE_DIVIDE_ERROR();
10628 } IEM_MC_ENDIF();
10629
10630 IEM_MC_END();
10631 }
10632 else
10633 {
10634 /* memory access. */
10635 IEM_MC_BEGIN(3, 2);
10636 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10637 IEM_MC_ARG(uint8_t, u8Value, 1);
10638 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10640 IEM_MC_LOCAL(int32_t, rc);
10641
10642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10644 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10645 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10646 IEM_MC_REF_EFLAGS(pEFlags);
10647 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10648 IEM_MC_IF_LOCAL_IS_Z(rc) {
10649 IEM_MC_ADVANCE_RIP();
10650 } IEM_MC_ELSE() {
10651 IEM_MC_RAISE_DIVIDE_ERROR();
10652 } IEM_MC_ENDIF();
10653
10654 IEM_MC_END();
10655 }
10656 return VINF_SUCCESS;
10657}
10658
10659
10660/** Opcode 0xf7 /4, /5, /6 and /7. */
10661FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10662{
10663 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10664
10665 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10666 {
10667 /* register access */
10668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10669 switch (pVCpu->iem.s.enmEffOpSize)
10670 {
10671 case IEMMODE_16BIT:
10672 {
10673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10674 IEM_MC_BEGIN(4, 1);
10675 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10676 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10677 IEM_MC_ARG(uint16_t, u16Value, 2);
10678 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10679 IEM_MC_LOCAL(int32_t, rc);
10680
10681 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10682 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10683 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10684 IEM_MC_REF_EFLAGS(pEFlags);
10685 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10686 IEM_MC_IF_LOCAL_IS_Z(rc) {
10687 IEM_MC_ADVANCE_RIP();
10688 } IEM_MC_ELSE() {
10689 IEM_MC_RAISE_DIVIDE_ERROR();
10690 } IEM_MC_ENDIF();
10691
10692 IEM_MC_END();
10693 return VINF_SUCCESS;
10694 }
10695
10696 case IEMMODE_32BIT:
10697 {
10698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10699 IEM_MC_BEGIN(4, 1);
10700 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10701 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10702 IEM_MC_ARG(uint32_t, u32Value, 2);
10703 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10704 IEM_MC_LOCAL(int32_t, rc);
10705
10706 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10707 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10708 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10709 IEM_MC_REF_EFLAGS(pEFlags);
10710 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10711 IEM_MC_IF_LOCAL_IS_Z(rc) {
10712 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10713 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10714 IEM_MC_ADVANCE_RIP();
10715 } IEM_MC_ELSE() {
10716 IEM_MC_RAISE_DIVIDE_ERROR();
10717 } IEM_MC_ENDIF();
10718
10719 IEM_MC_END();
10720 return VINF_SUCCESS;
10721 }
10722
10723 case IEMMODE_64BIT:
10724 {
10725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10726 IEM_MC_BEGIN(4, 1);
10727 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10728 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10729 IEM_MC_ARG(uint64_t, u64Value, 2);
10730 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10731 IEM_MC_LOCAL(int32_t, rc);
10732
10733 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10734 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10735 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10736 IEM_MC_REF_EFLAGS(pEFlags);
10737 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10738 IEM_MC_IF_LOCAL_IS_Z(rc) {
10739 IEM_MC_ADVANCE_RIP();
10740 } IEM_MC_ELSE() {
10741 IEM_MC_RAISE_DIVIDE_ERROR();
10742 } IEM_MC_ENDIF();
10743
10744 IEM_MC_END();
10745 return VINF_SUCCESS;
10746 }
10747
10748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10749 }
10750 }
10751 else
10752 {
10753 /* memory access. */
10754 switch (pVCpu->iem.s.enmEffOpSize)
10755 {
10756 case IEMMODE_16BIT:
10757 {
10758 IEM_MC_BEGIN(4, 2);
10759 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10760 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10761 IEM_MC_ARG(uint16_t, u16Value, 2);
10762 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10764 IEM_MC_LOCAL(int32_t, rc);
10765
10766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10768 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10769 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10770 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10771 IEM_MC_REF_EFLAGS(pEFlags);
10772 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10773 IEM_MC_IF_LOCAL_IS_Z(rc) {
10774 IEM_MC_ADVANCE_RIP();
10775 } IEM_MC_ELSE() {
10776 IEM_MC_RAISE_DIVIDE_ERROR();
10777 } IEM_MC_ENDIF();
10778
10779 IEM_MC_END();
10780 return VINF_SUCCESS;
10781 }
10782
10783 case IEMMODE_32BIT:
10784 {
10785 IEM_MC_BEGIN(4, 2);
10786 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10787 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10788 IEM_MC_ARG(uint32_t, u32Value, 2);
10789 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10791 IEM_MC_LOCAL(int32_t, rc);
10792
10793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10795 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10796 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10797 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10798 IEM_MC_REF_EFLAGS(pEFlags);
10799 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10800 IEM_MC_IF_LOCAL_IS_Z(rc) {
10801 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10802 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10803 IEM_MC_ADVANCE_RIP();
10804 } IEM_MC_ELSE() {
10805 IEM_MC_RAISE_DIVIDE_ERROR();
10806 } IEM_MC_ENDIF();
10807
10808 IEM_MC_END();
10809 return VINF_SUCCESS;
10810 }
10811
10812 case IEMMODE_64BIT:
10813 {
10814 IEM_MC_BEGIN(4, 2);
10815 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10816 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10817 IEM_MC_ARG(uint64_t, u64Value, 2);
10818 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10820 IEM_MC_LOCAL(int32_t, rc);
10821
10822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10824 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10825 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10826 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10827 IEM_MC_REF_EFLAGS(pEFlags);
10828 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10829 IEM_MC_IF_LOCAL_IS_Z(rc) {
10830 IEM_MC_ADVANCE_RIP();
10831 } IEM_MC_ELSE() {
10832 IEM_MC_RAISE_DIVIDE_ERROR();
10833 } IEM_MC_ENDIF();
10834
10835 IEM_MC_END();
10836 return VINF_SUCCESS;
10837 }
10838
10839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10840 }
10841 }
10842}
10843
10844/**
10845 * @opcode 0xf6
10846 */
10847FNIEMOP_DEF(iemOp_Grp3_Eb)
10848{
10849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10850 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10851 {
10852 case 0:
10853 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
10854 case 1:
10855/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10856 return IEMOP_RAISE_INVALID_OPCODE();
10857 case 2:
10858 IEMOP_MNEMONIC(not_Eb, "not Eb");
10859 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
10860 case 3:
10861 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
10862 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
10863 case 4:
10864 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
10865 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10866 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
10867 case 5:
10868 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
10869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10870 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
10871 case 6:
10872 IEMOP_MNEMONIC(div_Eb, "div Eb");
10873 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10874 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
10875 case 7:
10876 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
10877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10878 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
10879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10880 }
10881}
10882
10883
10884/**
10885 * @opcode 0xf7
10886 */
10887FNIEMOP_DEF(iemOp_Grp3_Ev)
10888{
10889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10890 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10891 {
10892 case 0:
10893 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
10894 case 1:
10895/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10896 return IEMOP_RAISE_INVALID_OPCODE();
10897 case 2:
10898 IEMOP_MNEMONIC(not_Ev, "not Ev");
10899 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
10900 case 3:
10901 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
10902 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
10903 case 4:
10904 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
10905 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10906 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
10907 case 5:
10908 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
10909 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10910 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
10911 case 6:
10912 IEMOP_MNEMONIC(div_Ev, "div Ev");
10913 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10914 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
10915 case 7:
10916 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
10917 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10918 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
10919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10920 }
10921}
10922
10923
10924/**
10925 * @opcode 0xf8
10926 */
10927FNIEMOP_DEF(iemOp_clc)
10928{
10929 IEMOP_MNEMONIC(clc, "clc");
10930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10931 IEM_MC_BEGIN(0, 0);
10932 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
10933 IEM_MC_ADVANCE_RIP();
10934 IEM_MC_END();
10935 return VINF_SUCCESS;
10936}
10937
10938
10939/**
10940 * @opcode 0xf9
10941 */
10942FNIEMOP_DEF(iemOp_stc)
10943{
10944 IEMOP_MNEMONIC(stc, "stc");
10945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10946 IEM_MC_BEGIN(0, 0);
10947 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
10948 IEM_MC_ADVANCE_RIP();
10949 IEM_MC_END();
10950 return VINF_SUCCESS;
10951}
10952
10953
10954/**
10955 * @opcode 0xfa
10956 */
10957FNIEMOP_DEF(iemOp_cli)
10958{
10959 IEMOP_MNEMONIC(cli, "cli");
10960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10961 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
10962}
10963
10964
10965FNIEMOP_DEF(iemOp_sti)
10966{
10967 IEMOP_MNEMONIC(sti, "sti");
10968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10969 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
10970}
10971
10972
10973/**
10974 * @opcode 0xfc
10975 */
10976FNIEMOP_DEF(iemOp_cld)
10977{
10978 IEMOP_MNEMONIC(cld, "cld");
10979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10980 IEM_MC_BEGIN(0, 0);
10981 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
10982 IEM_MC_ADVANCE_RIP();
10983 IEM_MC_END();
10984 return VINF_SUCCESS;
10985}
10986
10987
10988/**
10989 * @opcode 0xfd
10990 */
10991FNIEMOP_DEF(iemOp_std)
10992{
10993 IEMOP_MNEMONIC(std, "std");
10994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10995 IEM_MC_BEGIN(0, 0);
10996 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
10997 IEM_MC_ADVANCE_RIP();
10998 IEM_MC_END();
10999 return VINF_SUCCESS;
11000}
11001
11002
11003/**
11004 * @opcode 0xfe
11005 */
11006FNIEMOP_DEF(iemOp_Grp4)
11007{
11008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11009 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11010 {
11011 case 0:
11012 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11013 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11014 case 1:
11015 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11016 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11017 default:
11018 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11019 return IEMOP_RAISE_INVALID_OPCODE();
11020 }
11021}
11022
11023
11024/**
11025 * Opcode 0xff /2.
11026 * @param bRm The RM byte.
11027 */
11028FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11029{
11030 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11031 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11032
11033 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11034 {
11035 /* The new RIP is taken from a register. */
11036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11037 switch (pVCpu->iem.s.enmEffOpSize)
11038 {
11039 case IEMMODE_16BIT:
11040 IEM_MC_BEGIN(1, 0);
11041 IEM_MC_ARG(uint16_t, u16Target, 0);
11042 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11043 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11044 IEM_MC_END()
11045 return VINF_SUCCESS;
11046
11047 case IEMMODE_32BIT:
11048 IEM_MC_BEGIN(1, 0);
11049 IEM_MC_ARG(uint32_t, u32Target, 0);
11050 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11051 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11052 IEM_MC_END()
11053 return VINF_SUCCESS;
11054
11055 case IEMMODE_64BIT:
11056 IEM_MC_BEGIN(1, 0);
11057 IEM_MC_ARG(uint64_t, u64Target, 0);
11058 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11059 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11060 IEM_MC_END()
11061 return VINF_SUCCESS;
11062
11063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11064 }
11065 }
11066 else
11067 {
11068 /* The new RIP is taken from a register. */
11069 switch (pVCpu->iem.s.enmEffOpSize)
11070 {
11071 case IEMMODE_16BIT:
11072 IEM_MC_BEGIN(1, 1);
11073 IEM_MC_ARG(uint16_t, u16Target, 0);
11074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11077 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11078 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11079 IEM_MC_END()
11080 return VINF_SUCCESS;
11081
11082 case IEMMODE_32BIT:
11083 IEM_MC_BEGIN(1, 1);
11084 IEM_MC_ARG(uint32_t, u32Target, 0);
11085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11088 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11089 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11090 IEM_MC_END()
11091 return VINF_SUCCESS;
11092
11093 case IEMMODE_64BIT:
11094 IEM_MC_BEGIN(1, 1);
11095 IEM_MC_ARG(uint64_t, u64Target, 0);
11096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11099 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11100 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11101 IEM_MC_END()
11102 return VINF_SUCCESS;
11103
11104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11105 }
11106 }
11107}
11108
11109typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11110
11111FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11112{
11113 /* Registers? How?? */
11114 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11115 { /* likely */ }
11116 else
11117 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11118
11119 /* Far pointer loaded from memory. */
11120 switch (pVCpu->iem.s.enmEffOpSize)
11121 {
11122 case IEMMODE_16BIT:
11123 IEM_MC_BEGIN(3, 1);
11124 IEM_MC_ARG(uint16_t, u16Sel, 0);
11125 IEM_MC_ARG(uint16_t, offSeg, 1);
11126 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11130 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11131 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11132 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11133 IEM_MC_END();
11134 return VINF_SUCCESS;
11135
11136 case IEMMODE_64BIT:
11137 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11138 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11139 * and call far qword [rsp] encodings. */
11140 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11141 {
11142 IEM_MC_BEGIN(3, 1);
11143 IEM_MC_ARG(uint16_t, u16Sel, 0);
11144 IEM_MC_ARG(uint64_t, offSeg, 1);
11145 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11149 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11150 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11151 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11152 IEM_MC_END();
11153 return VINF_SUCCESS;
11154 }
11155 /* AMD falls thru. */
11156 /* fall thru */
11157
11158 case IEMMODE_32BIT:
11159 IEM_MC_BEGIN(3, 1);
11160 IEM_MC_ARG(uint16_t, u16Sel, 0);
11161 IEM_MC_ARG(uint32_t, offSeg, 1);
11162 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11166 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11167 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11168 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11169 IEM_MC_END();
11170 return VINF_SUCCESS;
11171
11172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11173 }
11174}
11175
11176
11177/**
11178 * Opcode 0xff /3.
11179 * @param bRm The RM byte.
11180 */
11181FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11182{
11183 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11184 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11185}
11186
11187
11188/**
11189 * Opcode 0xff /4.
11190 * @param bRm The RM byte.
11191 */
11192FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11193{
11194 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11195 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11196
11197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11198 {
11199 /* The new RIP is taken from a register. */
11200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11201 switch (pVCpu->iem.s.enmEffOpSize)
11202 {
11203 case IEMMODE_16BIT:
11204 IEM_MC_BEGIN(0, 1);
11205 IEM_MC_LOCAL(uint16_t, u16Target);
11206 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11207 IEM_MC_SET_RIP_U16(u16Target);
11208 IEM_MC_END()
11209 return VINF_SUCCESS;
11210
11211 case IEMMODE_32BIT:
11212 IEM_MC_BEGIN(0, 1);
11213 IEM_MC_LOCAL(uint32_t, u32Target);
11214 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11215 IEM_MC_SET_RIP_U32(u32Target);
11216 IEM_MC_END()
11217 return VINF_SUCCESS;
11218
11219 case IEMMODE_64BIT:
11220 IEM_MC_BEGIN(0, 1);
11221 IEM_MC_LOCAL(uint64_t, u64Target);
11222 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11223 IEM_MC_SET_RIP_U64(u64Target);
11224 IEM_MC_END()
11225 return VINF_SUCCESS;
11226
11227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11228 }
11229 }
11230 else
11231 {
11232 /* The new RIP is taken from a memory location. */
11233 switch (pVCpu->iem.s.enmEffOpSize)
11234 {
11235 case IEMMODE_16BIT:
11236 IEM_MC_BEGIN(0, 2);
11237 IEM_MC_LOCAL(uint16_t, u16Target);
11238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11241 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11242 IEM_MC_SET_RIP_U16(u16Target);
11243 IEM_MC_END()
11244 return VINF_SUCCESS;
11245
11246 case IEMMODE_32BIT:
11247 IEM_MC_BEGIN(0, 2);
11248 IEM_MC_LOCAL(uint32_t, u32Target);
11249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11252 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11253 IEM_MC_SET_RIP_U32(u32Target);
11254 IEM_MC_END()
11255 return VINF_SUCCESS;
11256
11257 case IEMMODE_64BIT:
11258 IEM_MC_BEGIN(0, 2);
11259 IEM_MC_LOCAL(uint64_t, u64Target);
11260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11263 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11264 IEM_MC_SET_RIP_U64(u64Target);
11265 IEM_MC_END()
11266 return VINF_SUCCESS;
11267
11268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11269 }
11270 }
11271}
11272
11273
11274/**
11275 * Opcode 0xff /5.
11276 * @param bRm The RM byte.
11277 */
11278FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11279{
11280 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11281 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11282}
11283
11284
11285/**
11286 * Opcode 0xff /6.
11287 * @param bRm The RM byte.
11288 */
11289FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11290{
11291 IEMOP_MNEMONIC(push_Ev, "push Ev");
11292
11293 /* Registers are handled by a common worker. */
11294 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11295 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11296
11297 /* Memory we do here. */
11298 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11299 switch (pVCpu->iem.s.enmEffOpSize)
11300 {
11301 case IEMMODE_16BIT:
11302 IEM_MC_BEGIN(0, 2);
11303 IEM_MC_LOCAL(uint16_t, u16Src);
11304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11307 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11308 IEM_MC_PUSH_U16(u16Src);
11309 IEM_MC_ADVANCE_RIP();
11310 IEM_MC_END();
11311 return VINF_SUCCESS;
11312
11313 case IEMMODE_32BIT:
11314 IEM_MC_BEGIN(0, 2);
11315 IEM_MC_LOCAL(uint32_t, u32Src);
11316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11319 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11320 IEM_MC_PUSH_U32(u32Src);
11321 IEM_MC_ADVANCE_RIP();
11322 IEM_MC_END();
11323 return VINF_SUCCESS;
11324
11325 case IEMMODE_64BIT:
11326 IEM_MC_BEGIN(0, 2);
11327 IEM_MC_LOCAL(uint64_t, u64Src);
11328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11331 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11332 IEM_MC_PUSH_U64(u64Src);
11333 IEM_MC_ADVANCE_RIP();
11334 IEM_MC_END();
11335 return VINF_SUCCESS;
11336
11337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11338 }
11339}
11340
11341
11342/**
11343 * @opcode 0xff
11344 */
11345FNIEMOP_DEF(iemOp_Grp5)
11346{
11347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11348 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11349 {
11350 case 0:
11351 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11352 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11353 case 1:
11354 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11355 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11356 case 2:
11357 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11358 case 3:
11359 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11360 case 4:
11361 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11362 case 5:
11363 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11364 case 6:
11365 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11366 case 7:
11367 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11368 return IEMOP_RAISE_INVALID_OPCODE();
11369 }
11370 AssertFailedReturn(VERR_IEM_IPE_3);
11371}
11372
11373
11374
11375const PFNIEMOP g_apfnOneByteMap[256] =
11376{
11377 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11378 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11379 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11380 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11381 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11382 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11383 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11384 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11385 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11386 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11387 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11388 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11389 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11390 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11391 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11392 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11393 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11394 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11395 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11396 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11397 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11398 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11399 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11400 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11401 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11402 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11403 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11404 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11405 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11406 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11407 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11408 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11409 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11410 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11411 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11412 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11413 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11414 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11415 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11416 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11417 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11418 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11419 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11420 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11421 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11422 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11423 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11424 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11425 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11426 /* 0xc4 */ iemOp_les_Gv_Mp__vex2, iemOp_lds_Gv_Mp__vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11427 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11428 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11429 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11430 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11431 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11432 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11433 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11434 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11435 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11436 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11437 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11438 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11439 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11440 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11441};
11442
11443
11444/** @} */
11445
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette