VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 65790

Last change on this file since 65790 was 65790, checked in by vboxsync, 8 years ago

IEM: updates.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 366.2 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 65790 2017-02-14 16:59:50Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/** @def og_gen General
25 * @{
26 */
27
28/** @def og_gen_arith Arithmetic
29 * @{
30 */
31/** @defgroup og_gen_arith_bin Binary numbers */
32/** @defgroup og_gen_arith_dec Decimal numbers */
33/** @} */
34
35
36
37/** @name One byte opcodes.
38 * @{
39 */
40
41/*
42 * Instruction specification format - work in progress:
43 *
44 * @opmnemonic add
45 * @op1 reg:Eb
46 * @op2 rm:Gb
47 * @oppfx none
48 * @opmaps none
49 * @opcode 0x00
50 * @openc ModR/M
51 * @opfltest none
52 * @opflmodify of,sz,zf,af,pf,cf
53 * @opflundef none
54 * @opflset none
55 * @opflclear none
56 * @ophints harmless
57 * @opstats add_Eb_Gb
58 * @optest in1=1 in2=1 -> out1=2 outfl=a?,p?
59 * @optest oppfx:o32 in1=0xfffffffe:dw in2=1:dw -> out1=0xffffffff:dw outfl=a?,p?
60 *
61 * @ingroup op_gen_arith_bin
62 */
63FNIEMOP_DEF(iemOp_add_Eb_Gb)
64{
65 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
66 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
67}
68
69
70/** Opcode 0x01. */
71FNIEMOP_DEF(iemOp_add_Ev_Gv)
72{
73 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
74 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
75}
76
77
78/** Opcode 0x02. */
79FNIEMOP_DEF(iemOp_add_Gb_Eb)
80{
81 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
82 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
83}
84
85
86/** Opcode 0x03. */
87FNIEMOP_DEF(iemOp_add_Gv_Ev)
88{
89 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
90 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
91}
92
93
94/** Opcode 0x04. */
95FNIEMOP_DEF(iemOp_add_Al_Ib)
96{
97 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
98 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
99}
100
101
102/** Opcode 0x05. */
103FNIEMOP_DEF(iemOp_add_eAX_Iz)
104{
105 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
106 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
107}
108
109
110/** Opcode 0x06. */
111FNIEMOP_DEF(iemOp_push_ES)
112{
113 IEMOP_MNEMONIC(push_es, "push es");
114 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
115}
116
117
118/** Opcode 0x07. */
119FNIEMOP_DEF(iemOp_pop_ES)
120{
121 IEMOP_MNEMONIC(pop_es, "pop es");
122 IEMOP_HLP_NO_64BIT();
123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
124 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
125}
126
127
128/** Opcode 0x08. */
129FNIEMOP_DEF(iemOp_or_Eb_Gb)
130{
131 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
132 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
133 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
134}
135
136
137/** Opcode 0x09. */
138FNIEMOP_DEF(iemOp_or_Ev_Gv)
139{
140 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
141 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
142 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
143}
144
145
146/** Opcode 0x0a. */
147FNIEMOP_DEF(iemOp_or_Gb_Eb)
148{
149 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
150 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
151 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
152}
153
154
155/** Opcode 0x0b. */
156FNIEMOP_DEF(iemOp_or_Gv_Ev)
157{
158 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
159 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
160 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
161}
162
163
164/** Opcode 0x0c. */
165FNIEMOP_DEF(iemOp_or_Al_Ib)
166{
167 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
168 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
169 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
170}
171
172
173/** Opcode 0x0d. */
174FNIEMOP_DEF(iemOp_or_eAX_Iz)
175{
176 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
177 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
178 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
179}
180
181
182/** Opcode 0x0e. */
183FNIEMOP_DEF(iemOp_push_CS)
184{
185 IEMOP_MNEMONIC(push_cs, "push cs");
186 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
187}
188
189
190/** Opcode 0x0f. */
191FNIEMOP_DEF(iemOp_2byteEscape)
192{
193#ifdef VBOX_STRICT
194 static bool s_fTested = false;
195 if (RT_LIKELY(s_fTested)) { /* likely */ }
196 else
197 {
198 s_fTested = true;
199 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
200 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
201 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
202 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
203 }
204#endif
205
206 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
207
208 /** @todo PUSH CS on 8086, undefined on 80186. */
209 IEMOP_HLP_MIN_286();
210 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
211}
212
213/** Opcode 0x10. */
214FNIEMOP_DEF(iemOp_adc_Eb_Gb)
215{
216 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
217 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
218}
219
220
221/** Opcode 0x11. */
222FNIEMOP_DEF(iemOp_adc_Ev_Gv)
223{
224 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
225 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
226}
227
228
229/** Opcode 0x12. */
230FNIEMOP_DEF(iemOp_adc_Gb_Eb)
231{
232 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
233 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
234}
235
236
237/** Opcode 0x13. */
238FNIEMOP_DEF(iemOp_adc_Gv_Ev)
239{
240 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
241 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
242}
243
244
245/** Opcode 0x14. */
246FNIEMOP_DEF(iemOp_adc_Al_Ib)
247{
248 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
249 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
250}
251
252
253/** Opcode 0x15. */
254FNIEMOP_DEF(iemOp_adc_eAX_Iz)
255{
256 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
257 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
258}
259
260
261/** Opcode 0x16. */
262FNIEMOP_DEF(iemOp_push_SS)
263{
264 IEMOP_MNEMONIC(push_ss, "push ss");
265 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
266}
267
268
269/** Opcode 0x17. */
270FNIEMOP_DEF(iemOp_pop_SS)
271{
272 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
274 IEMOP_HLP_NO_64BIT();
275 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
276}
277
278
279/** Opcode 0x18. */
280FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
281{
282 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
283 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
284}
285
286
287/** Opcode 0x19. */
288FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
289{
290 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
291 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
292}
293
294
295/** Opcode 0x1a. */
296FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
297{
298 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
299 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
300}
301
302
303/** Opcode 0x1b. */
304FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
305{
306 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
307 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
308}
309
310
311/** Opcode 0x1c. */
312FNIEMOP_DEF(iemOp_sbb_Al_Ib)
313{
314 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
315 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
316}
317
318
319/** Opcode 0x1d. */
320FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
321{
322 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
323 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
324}
325
326
327/** Opcode 0x1e. */
328FNIEMOP_DEF(iemOp_push_DS)
329{
330 IEMOP_MNEMONIC(push_ds, "push ds");
331 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
332}
333
334
335/** Opcode 0x1f. */
336FNIEMOP_DEF(iemOp_pop_DS)
337{
338 IEMOP_MNEMONIC(pop_ds, "pop ds");
339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
340 IEMOP_HLP_NO_64BIT();
341 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
342}
343
344
345/** Opcode 0x20. */
346FNIEMOP_DEF(iemOp_and_Eb_Gb)
347{
348 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
349 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
350 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
351}
352
353
354/** Opcode 0x21. */
355FNIEMOP_DEF(iemOp_and_Ev_Gv)
356{
357 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
358 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
359 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
360}
361
362
363/** Opcode 0x22. */
364FNIEMOP_DEF(iemOp_and_Gb_Eb)
365{
366 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
367 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
368 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
369}
370
371
372/** Opcode 0x23. */
373FNIEMOP_DEF(iemOp_and_Gv_Ev)
374{
375 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
376 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
377 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
378}
379
380
381/** Opcode 0x24. */
382FNIEMOP_DEF(iemOp_and_Al_Ib)
383{
384 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
385 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
386 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
387}
388
389
390/** Opcode 0x25. */
391FNIEMOP_DEF(iemOp_and_eAX_Iz)
392{
393 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
394 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
395 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
396}
397
398
399/** Opcode 0x26. */
400FNIEMOP_DEF(iemOp_seg_ES)
401{
402 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
403 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
404 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
405
406 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
407 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
408}
409
410
411/** Opcode 0x27. */
412FNIEMOP_DEF(iemOp_daa)
413{
414 IEMOP_MNEMONIC(daa_AL, "daa AL");
415 IEMOP_HLP_NO_64BIT();
416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
417 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
418 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
419}
420
421
422/** Opcode 0x28. */
423FNIEMOP_DEF(iemOp_sub_Eb_Gb)
424{
425 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
426 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
427}
428
429
430/** Opcode 0x29. */
431FNIEMOP_DEF(iemOp_sub_Ev_Gv)
432{
433 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
434 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
435}
436
437
438/** Opcode 0x2a. */
439FNIEMOP_DEF(iemOp_sub_Gb_Eb)
440{
441 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
442 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
443}
444
445
446/** Opcode 0x2b. */
447FNIEMOP_DEF(iemOp_sub_Gv_Ev)
448{
449 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
450 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
451}
452
453
454/** Opcode 0x2c. */
455FNIEMOP_DEF(iemOp_sub_Al_Ib)
456{
457 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
458 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
459}
460
461
462/** Opcode 0x2d. */
463FNIEMOP_DEF(iemOp_sub_eAX_Iz)
464{
465 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
466 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
467}
468
469
470/** Opcode 0x2e. */
471FNIEMOP_DEF(iemOp_seg_CS)
472{
473 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
474 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
475 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
476
477 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
478 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
479}
480
481
482/** Opcode 0x2f. */
483FNIEMOP_DEF(iemOp_das)
484{
485 IEMOP_MNEMONIC(das_AL, "das AL");
486 IEMOP_HLP_NO_64BIT();
487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
488 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
489 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
490}
491
492
493/** Opcode 0x30. */
494FNIEMOP_DEF(iemOp_xor_Eb_Gb)
495{
496 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
497 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
498 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
499}
500
501
502/** Opcode 0x31. */
503FNIEMOP_DEF(iemOp_xor_Ev_Gv)
504{
505 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
506 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
507 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
508}
509
510
511/** Opcode 0x32. */
512FNIEMOP_DEF(iemOp_xor_Gb_Eb)
513{
514 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
516 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
517}
518
519
520/** Opcode 0x33. */
521FNIEMOP_DEF(iemOp_xor_Gv_Ev)
522{
523 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
524 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
525 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
526}
527
528
529/** Opcode 0x34. */
530FNIEMOP_DEF(iemOp_xor_Al_Ib)
531{
532 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
533 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
534 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
535}
536
537
538/** Opcode 0x35. */
539FNIEMOP_DEF(iemOp_xor_eAX_Iz)
540{
541 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
542 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
543 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
544}
545
546
547/** Opcode 0x36. */
548FNIEMOP_DEF(iemOp_seg_SS)
549{
550 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
551 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
552 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
553
554 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
555 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
556}
557
558
559/** Opcode 0x37. */
560FNIEMOP_STUB(iemOp_aaa);
561
562
563/** Opcode 0x38. */
564FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
565{
566 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
567 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
568}
569
570
571/** Opcode 0x39. */
572FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
573{
574 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
575 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
576}
577
578
579/** Opcode 0x3a. */
580FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
581{
582 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
583 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
584}
585
586
587/** Opcode 0x3b. */
588FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
589{
590 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
592}
593
594
595/** Opcode 0x3c. */
596FNIEMOP_DEF(iemOp_cmp_Al_Ib)
597{
598 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
599 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
600}
601
602
603/** Opcode 0x3d. */
604FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
605{
606 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
607 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
608}
609
610
611/** Opcode 0x3e. */
612FNIEMOP_DEF(iemOp_seg_DS)
613{
614 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
615 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
616 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
617
618 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
619 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
620}
621
622
623/** Opcode 0x3f. */
624FNIEMOP_STUB(iemOp_aas);
625
626/**
627 * Common 'inc/dec/not/neg register' helper.
628 */
629FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
630{
631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
632 switch (pVCpu->iem.s.enmEffOpSize)
633 {
634 case IEMMODE_16BIT:
635 IEM_MC_BEGIN(2, 0);
636 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
637 IEM_MC_ARG(uint32_t *, pEFlags, 1);
638 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
639 IEM_MC_REF_EFLAGS(pEFlags);
640 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
641 IEM_MC_ADVANCE_RIP();
642 IEM_MC_END();
643 return VINF_SUCCESS;
644
645 case IEMMODE_32BIT:
646 IEM_MC_BEGIN(2, 0);
647 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
648 IEM_MC_ARG(uint32_t *, pEFlags, 1);
649 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
650 IEM_MC_REF_EFLAGS(pEFlags);
651 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
652 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
653 IEM_MC_ADVANCE_RIP();
654 IEM_MC_END();
655 return VINF_SUCCESS;
656
657 case IEMMODE_64BIT:
658 IEM_MC_BEGIN(2, 0);
659 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
660 IEM_MC_ARG(uint32_t *, pEFlags, 1);
661 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
662 IEM_MC_REF_EFLAGS(pEFlags);
663 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
664 IEM_MC_ADVANCE_RIP();
665 IEM_MC_END();
666 return VINF_SUCCESS;
667 }
668 return VINF_SUCCESS;
669}
670
671
672/** Opcode 0x40. */
673FNIEMOP_DEF(iemOp_inc_eAX)
674{
675 /*
676 * This is a REX prefix in 64-bit mode.
677 */
678 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
679 {
680 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
681 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
682
683 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
684 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
685 }
686
687 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
688 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
689}
690
691
692/** Opcode 0x41. */
693FNIEMOP_DEF(iemOp_inc_eCX)
694{
695 /*
696 * This is a REX prefix in 64-bit mode.
697 */
698 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
699 {
700 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
701 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
702 pVCpu->iem.s.uRexB = 1 << 3;
703
704 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
705 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
706 }
707
708 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
709 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
710}
711
712
713/** Opcode 0x42. */
714FNIEMOP_DEF(iemOp_inc_eDX)
715{
716 /*
717 * This is a REX prefix in 64-bit mode.
718 */
719 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
720 {
721 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
722 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
723 pVCpu->iem.s.uRexIndex = 1 << 3;
724
725 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
726 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
727 }
728
729 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
730 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
731}
732
733
734
735/** Opcode 0x43. */
736FNIEMOP_DEF(iemOp_inc_eBX)
737{
738 /*
739 * This is a REX prefix in 64-bit mode.
740 */
741 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
742 {
743 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
744 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
745 pVCpu->iem.s.uRexB = 1 << 3;
746 pVCpu->iem.s.uRexIndex = 1 << 3;
747
748 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
749 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
750 }
751
752 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
753 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
754}
755
756
757/** Opcode 0x44. */
758FNIEMOP_DEF(iemOp_inc_eSP)
759{
760 /*
761 * This is a REX prefix in 64-bit mode.
762 */
763 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
764 {
765 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
766 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
767 pVCpu->iem.s.uRexReg = 1 << 3;
768
769 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
770 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
771 }
772
773 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
774 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
775}
776
777
778/** Opcode 0x45. */
779FNIEMOP_DEF(iemOp_inc_eBP)
780{
781 /*
782 * This is a REX prefix in 64-bit mode.
783 */
784 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
785 {
786 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
787 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
788 pVCpu->iem.s.uRexReg = 1 << 3;
789 pVCpu->iem.s.uRexB = 1 << 3;
790
791 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
792 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
793 }
794
795 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
796 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
797}
798
799
800/** Opcode 0x46. */
801FNIEMOP_DEF(iemOp_inc_eSI)
802{
803 /*
804 * This is a REX prefix in 64-bit mode.
805 */
806 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
807 {
808 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
809 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
810 pVCpu->iem.s.uRexReg = 1 << 3;
811 pVCpu->iem.s.uRexIndex = 1 << 3;
812
813 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
814 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
815 }
816
817 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
818 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
819}
820
821
822/** Opcode 0x47. */
823FNIEMOP_DEF(iemOp_inc_eDI)
824{
825 /*
826 * This is a REX prefix in 64-bit mode.
827 */
828 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
829 {
830 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
831 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
832 pVCpu->iem.s.uRexReg = 1 << 3;
833 pVCpu->iem.s.uRexB = 1 << 3;
834 pVCpu->iem.s.uRexIndex = 1 << 3;
835
836 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
837 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
838 }
839
840 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
841 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
842}
843
844
845/** Opcode 0x48. */
846FNIEMOP_DEF(iemOp_dec_eAX)
847{
848 /*
849 * This is a REX prefix in 64-bit mode.
850 */
851 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
852 {
853 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
854 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
855 iemRecalEffOpSize(pVCpu);
856
857 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
858 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
859 }
860
861 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
862 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
863}
864
865
866/** Opcode 0x49. */
867FNIEMOP_DEF(iemOp_dec_eCX)
868{
869 /*
870 * This is a REX prefix in 64-bit mode.
871 */
872 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
873 {
874 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
875 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
876 pVCpu->iem.s.uRexB = 1 << 3;
877 iemRecalEffOpSize(pVCpu);
878
879 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
880 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
881 }
882
883 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
884 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
885}
886
887
888/** Opcode 0x4a. */
889FNIEMOP_DEF(iemOp_dec_eDX)
890{
891 /*
892 * This is a REX prefix in 64-bit mode.
893 */
894 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
895 {
896 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
897 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
898 pVCpu->iem.s.uRexIndex = 1 << 3;
899 iemRecalEffOpSize(pVCpu);
900
901 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
902 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
903 }
904
905 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
906 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
907}
908
909
910/** Opcode 0x4b. */
911FNIEMOP_DEF(iemOp_dec_eBX)
912{
913 /*
914 * This is a REX prefix in 64-bit mode.
915 */
916 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
917 {
918 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
919 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
920 pVCpu->iem.s.uRexB = 1 << 3;
921 pVCpu->iem.s.uRexIndex = 1 << 3;
922 iemRecalEffOpSize(pVCpu);
923
924 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
925 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
926 }
927
928 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
929 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
930}
931
932
933/** Opcode 0x4c. */
934FNIEMOP_DEF(iemOp_dec_eSP)
935{
936 /*
937 * This is a REX prefix in 64-bit mode.
938 */
939 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
940 {
941 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
942 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
943 pVCpu->iem.s.uRexReg = 1 << 3;
944 iemRecalEffOpSize(pVCpu);
945
946 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
947 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
948 }
949
950 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
951 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
952}
953
954
955/** Opcode 0x4d. */
956FNIEMOP_DEF(iemOp_dec_eBP)
957{
958 /*
959 * This is a REX prefix in 64-bit mode.
960 */
961 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
962 {
963 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
964 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
965 pVCpu->iem.s.uRexReg = 1 << 3;
966 pVCpu->iem.s.uRexB = 1 << 3;
967 iemRecalEffOpSize(pVCpu);
968
969 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
970 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
971 }
972
973 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
974 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
975}
976
977
978/** Opcode 0x4e. */
979FNIEMOP_DEF(iemOp_dec_eSI)
980{
981 /*
982 * This is a REX prefix in 64-bit mode.
983 */
984 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
985 {
986 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
987 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
988 pVCpu->iem.s.uRexReg = 1 << 3;
989 pVCpu->iem.s.uRexIndex = 1 << 3;
990 iemRecalEffOpSize(pVCpu);
991
992 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
993 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
994 }
995
996 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
997 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
998}
999
1000
1001/** Opcode 0x4f. */
1002FNIEMOP_DEF(iemOp_dec_eDI)
1003{
1004 /*
1005 * This is a REX prefix in 64-bit mode.
1006 */
1007 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1008 {
1009 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1010 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1011 pVCpu->iem.s.uRexReg = 1 << 3;
1012 pVCpu->iem.s.uRexB = 1 << 3;
1013 pVCpu->iem.s.uRexIndex = 1 << 3;
1014 iemRecalEffOpSize(pVCpu);
1015
1016 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1017 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1018 }
1019
1020 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1021 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1022}
1023
1024
1025/**
1026 * Common 'push register' helper.
1027 */
1028FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1029{
1030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1031 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1032 {
1033 iReg |= pVCpu->iem.s.uRexB;
1034 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1035 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1036 }
1037
1038 switch (pVCpu->iem.s.enmEffOpSize)
1039 {
1040 case IEMMODE_16BIT:
1041 IEM_MC_BEGIN(0, 1);
1042 IEM_MC_LOCAL(uint16_t, u16Value);
1043 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1044 IEM_MC_PUSH_U16(u16Value);
1045 IEM_MC_ADVANCE_RIP();
1046 IEM_MC_END();
1047 break;
1048
1049 case IEMMODE_32BIT:
1050 IEM_MC_BEGIN(0, 1);
1051 IEM_MC_LOCAL(uint32_t, u32Value);
1052 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1053 IEM_MC_PUSH_U32(u32Value);
1054 IEM_MC_ADVANCE_RIP();
1055 IEM_MC_END();
1056 break;
1057
1058 case IEMMODE_64BIT:
1059 IEM_MC_BEGIN(0, 1);
1060 IEM_MC_LOCAL(uint64_t, u64Value);
1061 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1062 IEM_MC_PUSH_U64(u64Value);
1063 IEM_MC_ADVANCE_RIP();
1064 IEM_MC_END();
1065 break;
1066 }
1067
1068 return VINF_SUCCESS;
1069}
1070
1071
1072/** Opcode 0x50. */
1073FNIEMOP_DEF(iemOp_push_eAX)
1074{
1075 IEMOP_MNEMONIC(push_rAX, "push rAX");
1076 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1077}
1078
1079
1080/** Opcode 0x51. */
1081FNIEMOP_DEF(iemOp_push_eCX)
1082{
1083 IEMOP_MNEMONIC(push_rCX, "push rCX");
1084 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1085}
1086
1087
1088/** Opcode 0x52. */
1089FNIEMOP_DEF(iemOp_push_eDX)
1090{
1091 IEMOP_MNEMONIC(push_rDX, "push rDX");
1092 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1093}
1094
1095
1096/** Opcode 0x53. */
1097FNIEMOP_DEF(iemOp_push_eBX)
1098{
1099 IEMOP_MNEMONIC(push_rBX, "push rBX");
1100 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1101}
1102
1103
1104/** Opcode 0x54. */
1105FNIEMOP_DEF(iemOp_push_eSP)
1106{
1107 IEMOP_MNEMONIC(push_rSP, "push rSP");
1108 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1109 {
1110 IEM_MC_BEGIN(0, 1);
1111 IEM_MC_LOCAL(uint16_t, u16Value);
1112 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1113 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1114 IEM_MC_PUSH_U16(u16Value);
1115 IEM_MC_ADVANCE_RIP();
1116 IEM_MC_END();
1117 }
1118 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1119}
1120
1121
1122/** Opcode 0x55. */
1123FNIEMOP_DEF(iemOp_push_eBP)
1124{
1125 IEMOP_MNEMONIC(push_rBP, "push rBP");
1126 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1127}
1128
1129
1130/** Opcode 0x56. */
1131FNIEMOP_DEF(iemOp_push_eSI)
1132{
1133 IEMOP_MNEMONIC(push_rSI, "push rSI");
1134 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1135}
1136
1137
1138/** Opcode 0x57. */
1139FNIEMOP_DEF(iemOp_push_eDI)
1140{
1141 IEMOP_MNEMONIC(push_rDI, "push rDI");
1142 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1143}
1144
1145
1146/**
1147 * Common 'pop register' helper.
1148 */
1149FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1150{
1151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1152 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1153 {
1154 iReg |= pVCpu->iem.s.uRexB;
1155 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1156 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1157 }
1158
1159 switch (pVCpu->iem.s.enmEffOpSize)
1160 {
1161 case IEMMODE_16BIT:
1162 IEM_MC_BEGIN(0, 1);
1163 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1164 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1165 IEM_MC_POP_U16(pu16Dst);
1166 IEM_MC_ADVANCE_RIP();
1167 IEM_MC_END();
1168 break;
1169
1170 case IEMMODE_32BIT:
1171 IEM_MC_BEGIN(0, 1);
1172 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1173 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1174 IEM_MC_POP_U32(pu32Dst);
1175 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 break;
1179
1180 case IEMMODE_64BIT:
1181 IEM_MC_BEGIN(0, 1);
1182 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1183 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1184 IEM_MC_POP_U64(pu64Dst);
1185 IEM_MC_ADVANCE_RIP();
1186 IEM_MC_END();
1187 break;
1188 }
1189
1190 return VINF_SUCCESS;
1191}
1192
1193
1194/** Opcode 0x58. */
1195FNIEMOP_DEF(iemOp_pop_eAX)
1196{
1197 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1198 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1199}
1200
1201
1202/** Opcode 0x59. */
1203FNIEMOP_DEF(iemOp_pop_eCX)
1204{
1205 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1206 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1207}
1208
1209
1210/** Opcode 0x5a. */
1211FNIEMOP_DEF(iemOp_pop_eDX)
1212{
1213 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1214 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1215}
1216
1217
1218/** Opcode 0x5b. */
1219FNIEMOP_DEF(iemOp_pop_eBX)
1220{
1221 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1222 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1223}
1224
1225
1226/** Opcode 0x5c. */
1227FNIEMOP_DEF(iemOp_pop_eSP)
1228{
1229 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1230 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1231 {
1232 if (pVCpu->iem.s.uRexB)
1233 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1234 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1235 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1236 }
1237
1238 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1239 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1240 /** @todo add testcase for this instruction. */
1241 switch (pVCpu->iem.s.enmEffOpSize)
1242 {
1243 case IEMMODE_16BIT:
1244 IEM_MC_BEGIN(0, 1);
1245 IEM_MC_LOCAL(uint16_t, u16Dst);
1246 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1247 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1248 IEM_MC_ADVANCE_RIP();
1249 IEM_MC_END();
1250 break;
1251
1252 case IEMMODE_32BIT:
1253 IEM_MC_BEGIN(0, 1);
1254 IEM_MC_LOCAL(uint32_t, u32Dst);
1255 IEM_MC_POP_U32(&u32Dst);
1256 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1257 IEM_MC_ADVANCE_RIP();
1258 IEM_MC_END();
1259 break;
1260
1261 case IEMMODE_64BIT:
1262 IEM_MC_BEGIN(0, 1);
1263 IEM_MC_LOCAL(uint64_t, u64Dst);
1264 IEM_MC_POP_U64(&u64Dst);
1265 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1266 IEM_MC_ADVANCE_RIP();
1267 IEM_MC_END();
1268 break;
1269 }
1270
1271 return VINF_SUCCESS;
1272}
1273
1274
1275/** Opcode 0x5d. */
1276FNIEMOP_DEF(iemOp_pop_eBP)
1277{
1278 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1279 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1280}
1281
1282
1283/** Opcode 0x5e. */
1284FNIEMOP_DEF(iemOp_pop_eSI)
1285{
1286 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1287 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1288}
1289
1290
1291/** Opcode 0x5f. */
1292FNIEMOP_DEF(iemOp_pop_eDI)
1293{
1294 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1295 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1296}
1297
1298
1299/** Opcode 0x60. */
1300FNIEMOP_DEF(iemOp_pusha)
1301{
1302 IEMOP_MNEMONIC(pusha, "pusha");
1303 IEMOP_HLP_MIN_186();
1304 IEMOP_HLP_NO_64BIT();
1305 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1306 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1307 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1308 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1309}
1310
1311
1312/** Opcode 0x61. */
1313FNIEMOP_DEF(iemOp_popa__mvex)
1314{
1315 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1316 {
1317 IEMOP_MNEMONIC(popa, "popa");
1318 IEMOP_HLP_MIN_186();
1319 IEMOP_HLP_NO_64BIT();
1320 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1321 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1322 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1323 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1324 }
1325 IEMOP_MNEMONIC(mvex, "mvex");
1326 Log(("mvex prefix is not supported!\n"));
1327 return IEMOP_RAISE_INVALID_OPCODE();
1328}
1329
1330
1331/** Opcode 0x62. */
1332FNIEMOP_STUB(iemOp_bound_Gv_Ma__evex);
1333// IEMOP_HLP_MIN_186();
1334
1335
1336/** Opcode 0x63 - non-64-bit modes. */
1337FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1338{
1339 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1340 IEMOP_HLP_MIN_286();
1341 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1343
1344 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1345 {
1346 /* Register */
1347 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1348 IEM_MC_BEGIN(3, 0);
1349 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1350 IEM_MC_ARG(uint16_t, u16Src, 1);
1351 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1352
1353 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1354 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1355 IEM_MC_REF_EFLAGS(pEFlags);
1356 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1357
1358 IEM_MC_ADVANCE_RIP();
1359 IEM_MC_END();
1360 }
1361 else
1362 {
1363 /* Memory */
1364 IEM_MC_BEGIN(3, 2);
1365 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1366 IEM_MC_ARG(uint16_t, u16Src, 1);
1367 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1369
1370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1371 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1372 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1373 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1374 IEM_MC_FETCH_EFLAGS(EFlags);
1375 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1376
1377 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
1378 IEM_MC_COMMIT_EFLAGS(EFlags);
1379 IEM_MC_ADVANCE_RIP();
1380 IEM_MC_END();
1381 }
1382 return VINF_SUCCESS;
1383
1384}
1385
1386
1387/** Opcode 0x63.
1388 * @note This is a weird one. It works like a regular move instruction if
1389 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
1390 * @todo This definitely needs a testcase to verify the odd cases. */
1391FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
1392{
1393 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
1394
1395 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
1396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1397
1398 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1399 {
1400 /*
1401 * Register to register.
1402 */
1403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1404 IEM_MC_BEGIN(0, 1);
1405 IEM_MC_LOCAL(uint64_t, u64Value);
1406 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1407 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1408 IEM_MC_ADVANCE_RIP();
1409 IEM_MC_END();
1410 }
1411 else
1412 {
1413 /*
1414 * We're loading a register from memory.
1415 */
1416 IEM_MC_BEGIN(0, 2);
1417 IEM_MC_LOCAL(uint64_t, u64Value);
1418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1421 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1422 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1423 IEM_MC_ADVANCE_RIP();
1424 IEM_MC_END();
1425 }
1426 return VINF_SUCCESS;
1427}
1428
1429
1430/** Opcode 0x64. */
1431FNIEMOP_DEF(iemOp_seg_FS)
1432{
1433 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
1434 IEMOP_HLP_MIN_386();
1435
1436 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
1437 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
1438
1439 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1440 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1441}
1442
1443
1444/** Opcode 0x65. */
1445FNIEMOP_DEF(iemOp_seg_GS)
1446{
1447 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
1448 IEMOP_HLP_MIN_386();
1449
1450 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
1451 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
1452
1453 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1454 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1455}
1456
1457
1458/** Opcode 0x66. */
1459FNIEMOP_DEF(iemOp_op_size)
1460{
1461 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
1462 IEMOP_HLP_MIN_386();
1463
1464 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
1465 iemRecalEffOpSize(pVCpu);
1466
1467 /* For the 4 entry opcode tables, the operand prefix doesn't not count
1468 when REPZ or REPNZ are present. */
1469 if (pVCpu->iem.s.idxPrefix == 0)
1470 pVCpu->iem.s.idxPrefix = 1;
1471
1472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1473 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1474}
1475
1476
1477/** Opcode 0x67. */
1478FNIEMOP_DEF(iemOp_addr_size)
1479{
1480 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
1481 IEMOP_HLP_MIN_386();
1482
1483 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
1484 switch (pVCpu->iem.s.enmDefAddrMode)
1485 {
1486 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1487 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
1488 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1489 default: AssertFailed();
1490 }
1491
1492 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1493 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1494}
1495
1496
1497/** Opcode 0x68. */
1498FNIEMOP_DEF(iemOp_push_Iz)
1499{
1500 IEMOP_MNEMONIC(push_Iz, "push Iz");
1501 IEMOP_HLP_MIN_186();
1502 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1503 switch (pVCpu->iem.s.enmEffOpSize)
1504 {
1505 case IEMMODE_16BIT:
1506 {
1507 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1509 IEM_MC_BEGIN(0,0);
1510 IEM_MC_PUSH_U16(u16Imm);
1511 IEM_MC_ADVANCE_RIP();
1512 IEM_MC_END();
1513 return VINF_SUCCESS;
1514 }
1515
1516 case IEMMODE_32BIT:
1517 {
1518 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1520 IEM_MC_BEGIN(0,0);
1521 IEM_MC_PUSH_U32(u32Imm);
1522 IEM_MC_ADVANCE_RIP();
1523 IEM_MC_END();
1524 return VINF_SUCCESS;
1525 }
1526
1527 case IEMMODE_64BIT:
1528 {
1529 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1531 IEM_MC_BEGIN(0,0);
1532 IEM_MC_PUSH_U64(u64Imm);
1533 IEM_MC_ADVANCE_RIP();
1534 IEM_MC_END();
1535 return VINF_SUCCESS;
1536 }
1537
1538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1539 }
1540}
1541
1542
1543/** Opcode 0x69. */
1544FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
1545{
1546 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
1547 IEMOP_HLP_MIN_186();
1548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1549 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1550
1551 switch (pVCpu->iem.s.enmEffOpSize)
1552 {
1553 case IEMMODE_16BIT:
1554 {
1555 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1556 {
1557 /* register operand */
1558 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1560
1561 IEM_MC_BEGIN(3, 1);
1562 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1563 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
1564 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1565 IEM_MC_LOCAL(uint16_t, u16Tmp);
1566
1567 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1568 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1569 IEM_MC_REF_EFLAGS(pEFlags);
1570 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1571 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1572
1573 IEM_MC_ADVANCE_RIP();
1574 IEM_MC_END();
1575 }
1576 else
1577 {
1578 /* memory operand */
1579 IEM_MC_BEGIN(3, 2);
1580 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1581 IEM_MC_ARG(uint16_t, u16Src, 1);
1582 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1583 IEM_MC_LOCAL(uint16_t, u16Tmp);
1584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1585
1586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
1587 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1588 IEM_MC_ASSIGN(u16Src, u16Imm);
1589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1590 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1591 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1592 IEM_MC_REF_EFLAGS(pEFlags);
1593 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1594 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1595
1596 IEM_MC_ADVANCE_RIP();
1597 IEM_MC_END();
1598 }
1599 return VINF_SUCCESS;
1600 }
1601
1602 case IEMMODE_32BIT:
1603 {
1604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1605 {
1606 /* register operand */
1607 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1609
1610 IEM_MC_BEGIN(3, 1);
1611 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1612 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
1613 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1614 IEM_MC_LOCAL(uint32_t, u32Tmp);
1615
1616 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1617 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1618 IEM_MC_REF_EFLAGS(pEFlags);
1619 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1620 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1621
1622 IEM_MC_ADVANCE_RIP();
1623 IEM_MC_END();
1624 }
1625 else
1626 {
1627 /* memory operand */
1628 IEM_MC_BEGIN(3, 2);
1629 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1630 IEM_MC_ARG(uint32_t, u32Src, 1);
1631 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1632 IEM_MC_LOCAL(uint32_t, u32Tmp);
1633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1634
1635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
1636 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1637 IEM_MC_ASSIGN(u32Src, u32Imm);
1638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1639 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1640 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1641 IEM_MC_REF_EFLAGS(pEFlags);
1642 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1643 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1644
1645 IEM_MC_ADVANCE_RIP();
1646 IEM_MC_END();
1647 }
1648 return VINF_SUCCESS;
1649 }
1650
1651 case IEMMODE_64BIT:
1652 {
1653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1654 {
1655 /* register operand */
1656 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1658
1659 IEM_MC_BEGIN(3, 1);
1660 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1661 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
1662 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1663 IEM_MC_LOCAL(uint64_t, u64Tmp);
1664
1665 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1666 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1667 IEM_MC_REF_EFLAGS(pEFlags);
1668 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1669 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 else
1675 {
1676 /* memory operand */
1677 IEM_MC_BEGIN(3, 2);
1678 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1679 IEM_MC_ARG(uint64_t, u64Src, 1);
1680 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1681 IEM_MC_LOCAL(uint64_t, u64Tmp);
1682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1683
1684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
1685 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1686 IEM_MC_ASSIGN(u64Src, u64Imm);
1687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1688 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1689 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1690 IEM_MC_REF_EFLAGS(pEFlags);
1691 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1692 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1693
1694 IEM_MC_ADVANCE_RIP();
1695 IEM_MC_END();
1696 }
1697 return VINF_SUCCESS;
1698 }
1699 }
1700 AssertFailedReturn(VERR_IEM_IPE_9);
1701}
1702
1703
1704/** Opcode 0x6a. */
1705FNIEMOP_DEF(iemOp_push_Ib)
1706{
1707 IEMOP_MNEMONIC(push_Ib, "push Ib");
1708 IEMOP_HLP_MIN_186();
1709 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
1710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1711 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1712
1713 IEM_MC_BEGIN(0,0);
1714 switch (pVCpu->iem.s.enmEffOpSize)
1715 {
1716 case IEMMODE_16BIT:
1717 IEM_MC_PUSH_U16(i8Imm);
1718 break;
1719 case IEMMODE_32BIT:
1720 IEM_MC_PUSH_U32(i8Imm);
1721 break;
1722 case IEMMODE_64BIT:
1723 IEM_MC_PUSH_U64(i8Imm);
1724 break;
1725 }
1726 IEM_MC_ADVANCE_RIP();
1727 IEM_MC_END();
1728 return VINF_SUCCESS;
1729}
1730
1731
1732/** Opcode 0x6b. */
1733FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
1734{
1735 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
1736 IEMOP_HLP_MIN_186();
1737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1738 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1739
1740 switch (pVCpu->iem.s.enmEffOpSize)
1741 {
1742 case IEMMODE_16BIT:
1743 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1744 {
1745 /* register operand */
1746 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1748
1749 IEM_MC_BEGIN(3, 1);
1750 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1751 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
1752 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1753 IEM_MC_LOCAL(uint16_t, u16Tmp);
1754
1755 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1756 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1757 IEM_MC_REF_EFLAGS(pEFlags);
1758 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1759 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1760
1761 IEM_MC_ADVANCE_RIP();
1762 IEM_MC_END();
1763 }
1764 else
1765 {
1766 /* memory operand */
1767 IEM_MC_BEGIN(3, 2);
1768 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1769 IEM_MC_ARG(uint16_t, u16Src, 1);
1770 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1771 IEM_MC_LOCAL(uint16_t, u16Tmp);
1772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1773
1774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1775 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
1776 IEM_MC_ASSIGN(u16Src, u16Imm);
1777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1778 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1779 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1780 IEM_MC_REF_EFLAGS(pEFlags);
1781 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1782 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1783
1784 IEM_MC_ADVANCE_RIP();
1785 IEM_MC_END();
1786 }
1787 return VINF_SUCCESS;
1788
1789 case IEMMODE_32BIT:
1790 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1791 {
1792 /* register operand */
1793 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1795
1796 IEM_MC_BEGIN(3, 1);
1797 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1798 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
1799 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1800 IEM_MC_LOCAL(uint32_t, u32Tmp);
1801
1802 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1803 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1804 IEM_MC_REF_EFLAGS(pEFlags);
1805 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1806 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1807
1808 IEM_MC_ADVANCE_RIP();
1809 IEM_MC_END();
1810 }
1811 else
1812 {
1813 /* memory operand */
1814 IEM_MC_BEGIN(3, 2);
1815 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1816 IEM_MC_ARG(uint32_t, u32Src, 1);
1817 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1818 IEM_MC_LOCAL(uint32_t, u32Tmp);
1819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1820
1821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1822 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
1823 IEM_MC_ASSIGN(u32Src, u32Imm);
1824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1825 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1826 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1827 IEM_MC_REF_EFLAGS(pEFlags);
1828 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1829 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1830
1831 IEM_MC_ADVANCE_RIP();
1832 IEM_MC_END();
1833 }
1834 return VINF_SUCCESS;
1835
1836 case IEMMODE_64BIT:
1837 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1838 {
1839 /* register operand */
1840 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1842
1843 IEM_MC_BEGIN(3, 1);
1844 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1845 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
1846 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1847 IEM_MC_LOCAL(uint64_t, u64Tmp);
1848
1849 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1850 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1851 IEM_MC_REF_EFLAGS(pEFlags);
1852 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1853 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1854
1855 IEM_MC_ADVANCE_RIP();
1856 IEM_MC_END();
1857 }
1858 else
1859 {
1860 /* memory operand */
1861 IEM_MC_BEGIN(3, 2);
1862 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1863 IEM_MC_ARG(uint64_t, u64Src, 1);
1864 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1865 IEM_MC_LOCAL(uint64_t, u64Tmp);
1866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1867
1868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1869 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
1870 IEM_MC_ASSIGN(u64Src, u64Imm);
1871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1872 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1873 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1874 IEM_MC_REF_EFLAGS(pEFlags);
1875 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1876 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1877
1878 IEM_MC_ADVANCE_RIP();
1879 IEM_MC_END();
1880 }
1881 return VINF_SUCCESS;
1882 }
1883 AssertFailedReturn(VERR_IEM_IPE_8);
1884}
1885
1886
1887/** Opcode 0x6c. */
1888FNIEMOP_DEF(iemOp_insb_Yb_DX)
1889{
1890 IEMOP_HLP_MIN_186();
1891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1892 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
1893 {
1894 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
1895 switch (pVCpu->iem.s.enmEffAddrMode)
1896 {
1897 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
1898 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
1899 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
1900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1901 }
1902 }
1903 else
1904 {
1905 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
1906 switch (pVCpu->iem.s.enmEffAddrMode)
1907 {
1908 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
1909 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
1910 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
1911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1912 }
1913 }
1914}
1915
1916
1917/** Opcode 0x6d. */
1918FNIEMOP_DEF(iemOp_inswd_Yv_DX)
1919{
1920 IEMOP_HLP_MIN_186();
1921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1922 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1923 {
1924 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
1925 switch (pVCpu->iem.s.enmEffOpSize)
1926 {
1927 case IEMMODE_16BIT:
1928 switch (pVCpu->iem.s.enmEffAddrMode)
1929 {
1930 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
1931 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
1932 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
1933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1934 }
1935 break;
1936 case IEMMODE_64BIT:
1937 case IEMMODE_32BIT:
1938 switch (pVCpu->iem.s.enmEffAddrMode)
1939 {
1940 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
1941 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
1942 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
1943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1944 }
1945 break;
1946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1947 }
1948 }
1949 else
1950 {
1951 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
1952 switch (pVCpu->iem.s.enmEffOpSize)
1953 {
1954 case IEMMODE_16BIT:
1955 switch (pVCpu->iem.s.enmEffAddrMode)
1956 {
1957 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
1958 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
1959 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
1960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1961 }
1962 break;
1963 case IEMMODE_64BIT:
1964 case IEMMODE_32BIT:
1965 switch (pVCpu->iem.s.enmEffAddrMode)
1966 {
1967 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
1968 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
1969 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
1970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1971 }
1972 break;
1973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1974 }
1975 }
1976}
1977
1978
1979/** Opcode 0x6e. */
1980FNIEMOP_DEF(iemOp_outsb_Yb_DX)
1981{
1982 IEMOP_HLP_MIN_186();
1983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1984 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
1985 {
1986 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
1987 switch (pVCpu->iem.s.enmEffAddrMode)
1988 {
1989 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
1990 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
1991 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
1992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1993 }
1994 }
1995 else
1996 {
1997 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
1998 switch (pVCpu->iem.s.enmEffAddrMode)
1999 {
2000 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2001 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2002 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2004 }
2005 }
2006}
2007
2008
2009/** Opcode 0x6f. */
2010FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2011{
2012 IEMOP_HLP_MIN_186();
2013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2014 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2015 {
2016 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2017 switch (pVCpu->iem.s.enmEffOpSize)
2018 {
2019 case IEMMODE_16BIT:
2020 switch (pVCpu->iem.s.enmEffAddrMode)
2021 {
2022 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2023 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2024 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2026 }
2027 break;
2028 case IEMMODE_64BIT:
2029 case IEMMODE_32BIT:
2030 switch (pVCpu->iem.s.enmEffAddrMode)
2031 {
2032 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2033 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2034 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2036 }
2037 break;
2038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2039 }
2040 }
2041 else
2042 {
2043 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2044 switch (pVCpu->iem.s.enmEffOpSize)
2045 {
2046 case IEMMODE_16BIT:
2047 switch (pVCpu->iem.s.enmEffAddrMode)
2048 {
2049 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2050 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2051 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2053 }
2054 break;
2055 case IEMMODE_64BIT:
2056 case IEMMODE_32BIT:
2057 switch (pVCpu->iem.s.enmEffAddrMode)
2058 {
2059 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2060 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2061 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2063 }
2064 break;
2065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2066 }
2067 }
2068}
2069
2070
2071/** Opcode 0x70. */
2072FNIEMOP_DEF(iemOp_jo_Jb)
2073{
2074 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2075 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2077 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2078
2079 IEM_MC_BEGIN(0, 0);
2080 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2081 IEM_MC_REL_JMP_S8(i8Imm);
2082 } IEM_MC_ELSE() {
2083 IEM_MC_ADVANCE_RIP();
2084 } IEM_MC_ENDIF();
2085 IEM_MC_END();
2086 return VINF_SUCCESS;
2087}
2088
2089
2090/** Opcode 0x71. */
2091FNIEMOP_DEF(iemOp_jno_Jb)
2092{
2093 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2094 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2096 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2097
2098 IEM_MC_BEGIN(0, 0);
2099 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2100 IEM_MC_ADVANCE_RIP();
2101 } IEM_MC_ELSE() {
2102 IEM_MC_REL_JMP_S8(i8Imm);
2103 } IEM_MC_ENDIF();
2104 IEM_MC_END();
2105 return VINF_SUCCESS;
2106}
2107
2108/** Opcode 0x72. */
2109FNIEMOP_DEF(iemOp_jc_Jb)
2110{
2111 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2112 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2114 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2115
2116 IEM_MC_BEGIN(0, 0);
2117 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2118 IEM_MC_REL_JMP_S8(i8Imm);
2119 } IEM_MC_ELSE() {
2120 IEM_MC_ADVANCE_RIP();
2121 } IEM_MC_ENDIF();
2122 IEM_MC_END();
2123 return VINF_SUCCESS;
2124}
2125
2126
2127/** Opcode 0x73. */
2128FNIEMOP_DEF(iemOp_jnc_Jb)
2129{
2130 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2131 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2133 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2134
2135 IEM_MC_BEGIN(0, 0);
2136 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2137 IEM_MC_ADVANCE_RIP();
2138 } IEM_MC_ELSE() {
2139 IEM_MC_REL_JMP_S8(i8Imm);
2140 } IEM_MC_ENDIF();
2141 IEM_MC_END();
2142 return VINF_SUCCESS;
2143}
2144
2145
2146/** Opcode 0x74. */
2147FNIEMOP_DEF(iemOp_je_Jb)
2148{
2149 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2150 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2152 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2153
2154 IEM_MC_BEGIN(0, 0);
2155 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2156 IEM_MC_REL_JMP_S8(i8Imm);
2157 } IEM_MC_ELSE() {
2158 IEM_MC_ADVANCE_RIP();
2159 } IEM_MC_ENDIF();
2160 IEM_MC_END();
2161 return VINF_SUCCESS;
2162}
2163
2164
2165/** Opcode 0x75. */
2166FNIEMOP_DEF(iemOp_jne_Jb)
2167{
2168 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2169 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2171 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2172
2173 IEM_MC_BEGIN(0, 0);
2174 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2175 IEM_MC_ADVANCE_RIP();
2176 } IEM_MC_ELSE() {
2177 IEM_MC_REL_JMP_S8(i8Imm);
2178 } IEM_MC_ENDIF();
2179 IEM_MC_END();
2180 return VINF_SUCCESS;
2181}
2182
2183
2184/** Opcode 0x76. */
2185FNIEMOP_DEF(iemOp_jbe_Jb)
2186{
2187 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2188 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2190 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2191
2192 IEM_MC_BEGIN(0, 0);
2193 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2194 IEM_MC_REL_JMP_S8(i8Imm);
2195 } IEM_MC_ELSE() {
2196 IEM_MC_ADVANCE_RIP();
2197 } IEM_MC_ENDIF();
2198 IEM_MC_END();
2199 return VINF_SUCCESS;
2200}
2201
2202
2203/** Opcode 0x77. */
2204FNIEMOP_DEF(iemOp_jnbe_Jb)
2205{
2206 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2207 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2209 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2210
2211 IEM_MC_BEGIN(0, 0);
2212 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2213 IEM_MC_ADVANCE_RIP();
2214 } IEM_MC_ELSE() {
2215 IEM_MC_REL_JMP_S8(i8Imm);
2216 } IEM_MC_ENDIF();
2217 IEM_MC_END();
2218 return VINF_SUCCESS;
2219}
2220
2221
2222/** Opcode 0x78. */
2223FNIEMOP_DEF(iemOp_js_Jb)
2224{
2225 IEMOP_MNEMONIC(js_Jb, "js Jb");
2226 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2228 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2229
2230 IEM_MC_BEGIN(0, 0);
2231 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2232 IEM_MC_REL_JMP_S8(i8Imm);
2233 } IEM_MC_ELSE() {
2234 IEM_MC_ADVANCE_RIP();
2235 } IEM_MC_ENDIF();
2236 IEM_MC_END();
2237 return VINF_SUCCESS;
2238}
2239
2240
2241/** Opcode 0x79. */
2242FNIEMOP_DEF(iemOp_jns_Jb)
2243{
2244 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2245 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2247 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2248
2249 IEM_MC_BEGIN(0, 0);
2250 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2251 IEM_MC_ADVANCE_RIP();
2252 } IEM_MC_ELSE() {
2253 IEM_MC_REL_JMP_S8(i8Imm);
2254 } IEM_MC_ENDIF();
2255 IEM_MC_END();
2256 return VINF_SUCCESS;
2257}
2258
2259
2260/** Opcode 0x7a. */
2261FNIEMOP_DEF(iemOp_jp_Jb)
2262{
2263 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2264 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2266 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2267
2268 IEM_MC_BEGIN(0, 0);
2269 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2270 IEM_MC_REL_JMP_S8(i8Imm);
2271 } IEM_MC_ELSE() {
2272 IEM_MC_ADVANCE_RIP();
2273 } IEM_MC_ENDIF();
2274 IEM_MC_END();
2275 return VINF_SUCCESS;
2276}
2277
2278
2279/** Opcode 0x7b. */
2280FNIEMOP_DEF(iemOp_jnp_Jb)
2281{
2282 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2283 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2285 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2286
2287 IEM_MC_BEGIN(0, 0);
2288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2289 IEM_MC_ADVANCE_RIP();
2290 } IEM_MC_ELSE() {
2291 IEM_MC_REL_JMP_S8(i8Imm);
2292 } IEM_MC_ENDIF();
2293 IEM_MC_END();
2294 return VINF_SUCCESS;
2295}
2296
2297
2298/** Opcode 0x7c. */
2299FNIEMOP_DEF(iemOp_jl_Jb)
2300{
2301 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2302 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2304 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2305
2306 IEM_MC_BEGIN(0, 0);
2307 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2308 IEM_MC_REL_JMP_S8(i8Imm);
2309 } IEM_MC_ELSE() {
2310 IEM_MC_ADVANCE_RIP();
2311 } IEM_MC_ENDIF();
2312 IEM_MC_END();
2313 return VINF_SUCCESS;
2314}
2315
2316
2317/** Opcode 0x7d. */
2318FNIEMOP_DEF(iemOp_jnl_Jb)
2319{
2320 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
2321 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2323 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2324
2325 IEM_MC_BEGIN(0, 0);
2326 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2327 IEM_MC_ADVANCE_RIP();
2328 } IEM_MC_ELSE() {
2329 IEM_MC_REL_JMP_S8(i8Imm);
2330 } IEM_MC_ENDIF();
2331 IEM_MC_END();
2332 return VINF_SUCCESS;
2333}
2334
2335
2336/** Opcode 0x7e. */
2337FNIEMOP_DEF(iemOp_jle_Jb)
2338{
2339 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
2340 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2342 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2343
2344 IEM_MC_BEGIN(0, 0);
2345 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2346 IEM_MC_REL_JMP_S8(i8Imm);
2347 } IEM_MC_ELSE() {
2348 IEM_MC_ADVANCE_RIP();
2349 } IEM_MC_ENDIF();
2350 IEM_MC_END();
2351 return VINF_SUCCESS;
2352}
2353
2354
2355/** Opcode 0x7f. */
2356FNIEMOP_DEF(iemOp_jnle_Jb)
2357{
2358 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
2359 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2362
2363 IEM_MC_BEGIN(0, 0);
2364 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2365 IEM_MC_ADVANCE_RIP();
2366 } IEM_MC_ELSE() {
2367 IEM_MC_REL_JMP_S8(i8Imm);
2368 } IEM_MC_ENDIF();
2369 IEM_MC_END();
2370 return VINF_SUCCESS;
2371}
2372
2373
2374/** Opcode 0x80. */
2375FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
2376{
2377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2378 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2379 {
2380 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
2381 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
2382 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
2383 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
2384 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
2385 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
2386 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
2387 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
2388 }
2389 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2390
2391 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2392 {
2393 /* register target */
2394 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 IEM_MC_BEGIN(3, 0);
2397 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2398 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2400
2401 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2402 IEM_MC_REF_EFLAGS(pEFlags);
2403 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2404
2405 IEM_MC_ADVANCE_RIP();
2406 IEM_MC_END();
2407 }
2408 else
2409 {
2410 /* memory target */
2411 uint32_t fAccess;
2412 if (pImpl->pfnLockedU8)
2413 fAccess = IEM_ACCESS_DATA_RW;
2414 else /* CMP */
2415 fAccess = IEM_ACCESS_DATA_R;
2416 IEM_MC_BEGIN(3, 2);
2417 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2418 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2420
2421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2422 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2423 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2424 if (pImpl->pfnLockedU8)
2425 IEMOP_HLP_DONE_DECODING();
2426 else
2427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2428
2429 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2430 IEM_MC_FETCH_EFLAGS(EFlags);
2431 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2432 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2433 else
2434 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
2435
2436 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
2437 IEM_MC_COMMIT_EFLAGS(EFlags);
2438 IEM_MC_ADVANCE_RIP();
2439 IEM_MC_END();
2440 }
2441 return VINF_SUCCESS;
2442}
2443
2444
2445/** Opcode 0x81. */
2446FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
2447{
2448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2449 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2450 {
2451 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
2452 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
2453 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
2454 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
2455 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
2456 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
2457 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
2458 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
2459 }
2460 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2461
2462 switch (pVCpu->iem.s.enmEffOpSize)
2463 {
2464 case IEMMODE_16BIT:
2465 {
2466 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2467 {
2468 /* register target */
2469 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2471 IEM_MC_BEGIN(3, 0);
2472 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2473 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
2474 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2475
2476 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2477 IEM_MC_REF_EFLAGS(pEFlags);
2478 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2479
2480 IEM_MC_ADVANCE_RIP();
2481 IEM_MC_END();
2482 }
2483 else
2484 {
2485 /* memory target */
2486 uint32_t fAccess;
2487 if (pImpl->pfnLockedU16)
2488 fAccess = IEM_ACCESS_DATA_RW;
2489 else /* CMP, TEST */
2490 fAccess = IEM_ACCESS_DATA_R;
2491 IEM_MC_BEGIN(3, 2);
2492 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2493 IEM_MC_ARG(uint16_t, u16Src, 1);
2494 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2496
2497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2498 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2499 IEM_MC_ASSIGN(u16Src, u16Imm);
2500 if (pImpl->pfnLockedU16)
2501 IEMOP_HLP_DONE_DECODING();
2502 else
2503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2504 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2505 IEM_MC_FETCH_EFLAGS(EFlags);
2506 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2507 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2508 else
2509 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2510
2511 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2512 IEM_MC_COMMIT_EFLAGS(EFlags);
2513 IEM_MC_ADVANCE_RIP();
2514 IEM_MC_END();
2515 }
2516 break;
2517 }
2518
2519 case IEMMODE_32BIT:
2520 {
2521 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2522 {
2523 /* register target */
2524 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2526 IEM_MC_BEGIN(3, 0);
2527 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2528 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
2529 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2530
2531 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2532 IEM_MC_REF_EFLAGS(pEFlags);
2533 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2534 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
2535
2536 IEM_MC_ADVANCE_RIP();
2537 IEM_MC_END();
2538 }
2539 else
2540 {
2541 /* memory target */
2542 uint32_t fAccess;
2543 if (pImpl->pfnLockedU32)
2544 fAccess = IEM_ACCESS_DATA_RW;
2545 else /* CMP, TEST */
2546 fAccess = IEM_ACCESS_DATA_R;
2547 IEM_MC_BEGIN(3, 2);
2548 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2549 IEM_MC_ARG(uint32_t, u32Src, 1);
2550 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2552
2553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2554 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2555 IEM_MC_ASSIGN(u32Src, u32Imm);
2556 if (pImpl->pfnLockedU32)
2557 IEMOP_HLP_DONE_DECODING();
2558 else
2559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2560 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2561 IEM_MC_FETCH_EFLAGS(EFlags);
2562 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2563 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2564 else
2565 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
2566
2567 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
2568 IEM_MC_COMMIT_EFLAGS(EFlags);
2569 IEM_MC_ADVANCE_RIP();
2570 IEM_MC_END();
2571 }
2572 break;
2573 }
2574
2575 case IEMMODE_64BIT:
2576 {
2577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2578 {
2579 /* register target */
2580 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2582 IEM_MC_BEGIN(3, 0);
2583 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2584 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
2585 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2586
2587 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2588 IEM_MC_REF_EFLAGS(pEFlags);
2589 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2590
2591 IEM_MC_ADVANCE_RIP();
2592 IEM_MC_END();
2593 }
2594 else
2595 {
2596 /* memory target */
2597 uint32_t fAccess;
2598 if (pImpl->pfnLockedU64)
2599 fAccess = IEM_ACCESS_DATA_RW;
2600 else /* CMP */
2601 fAccess = IEM_ACCESS_DATA_R;
2602 IEM_MC_BEGIN(3, 2);
2603 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2604 IEM_MC_ARG(uint64_t, u64Src, 1);
2605 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2607
2608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2609 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2610 if (pImpl->pfnLockedU64)
2611 IEMOP_HLP_DONE_DECODING();
2612 else
2613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2614 IEM_MC_ASSIGN(u64Src, u64Imm);
2615 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2616 IEM_MC_FETCH_EFLAGS(EFlags);
2617 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2618 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2619 else
2620 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
2621
2622 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
2623 IEM_MC_COMMIT_EFLAGS(EFlags);
2624 IEM_MC_ADVANCE_RIP();
2625 IEM_MC_END();
2626 }
2627 break;
2628 }
2629 }
2630 return VINF_SUCCESS;
2631}
2632
2633
2634/** Opcode 0x82. */
2635FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
2636{
2637 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
2638 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
2639}
2640
2641
2642/** Opcode 0x83. */
2643FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
2644{
2645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2646 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2647 {
2648 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
2649 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
2650 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
2651 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
2652 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
2653 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
2654 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
2655 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
2656 }
2657 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
2658 to the 386 even if absent in the intel reference manuals and some
2659 3rd party opcode listings. */
2660 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2661
2662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2663 {
2664 /*
2665 * Register target
2666 */
2667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2668 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2669 switch (pVCpu->iem.s.enmEffOpSize)
2670 {
2671 case IEMMODE_16BIT:
2672 {
2673 IEM_MC_BEGIN(3, 0);
2674 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2675 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
2676 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2677
2678 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2679 IEM_MC_REF_EFLAGS(pEFlags);
2680 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2681
2682 IEM_MC_ADVANCE_RIP();
2683 IEM_MC_END();
2684 break;
2685 }
2686
2687 case IEMMODE_32BIT:
2688 {
2689 IEM_MC_BEGIN(3, 0);
2690 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2691 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
2692 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2693
2694 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2695 IEM_MC_REF_EFLAGS(pEFlags);
2696 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2697 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
2698
2699 IEM_MC_ADVANCE_RIP();
2700 IEM_MC_END();
2701 break;
2702 }
2703
2704 case IEMMODE_64BIT:
2705 {
2706 IEM_MC_BEGIN(3, 0);
2707 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2708 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
2709 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2710
2711 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2712 IEM_MC_REF_EFLAGS(pEFlags);
2713 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2714
2715 IEM_MC_ADVANCE_RIP();
2716 IEM_MC_END();
2717 break;
2718 }
2719 }
2720 }
2721 else
2722 {
2723 /*
2724 * Memory target.
2725 */
2726 uint32_t fAccess;
2727 if (pImpl->pfnLockedU16)
2728 fAccess = IEM_ACCESS_DATA_RW;
2729 else /* CMP */
2730 fAccess = IEM_ACCESS_DATA_R;
2731
2732 switch (pVCpu->iem.s.enmEffOpSize)
2733 {
2734 case IEMMODE_16BIT:
2735 {
2736 IEM_MC_BEGIN(3, 2);
2737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2738 IEM_MC_ARG(uint16_t, u16Src, 1);
2739 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2741
2742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2743 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2744 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
2745 if (pImpl->pfnLockedU16)
2746 IEMOP_HLP_DONE_DECODING();
2747 else
2748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2749 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2750 IEM_MC_FETCH_EFLAGS(EFlags);
2751 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2753 else
2754 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2755
2756 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2757 IEM_MC_COMMIT_EFLAGS(EFlags);
2758 IEM_MC_ADVANCE_RIP();
2759 IEM_MC_END();
2760 break;
2761 }
2762
2763 case IEMMODE_32BIT:
2764 {
2765 IEM_MC_BEGIN(3, 2);
2766 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2767 IEM_MC_ARG(uint32_t, u32Src, 1);
2768 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2770
2771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2772 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2773 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
2774 if (pImpl->pfnLockedU32)
2775 IEMOP_HLP_DONE_DECODING();
2776 else
2777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2778 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2779 IEM_MC_FETCH_EFLAGS(EFlags);
2780 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2781 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2782 else
2783 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
2784
2785 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
2786 IEM_MC_COMMIT_EFLAGS(EFlags);
2787 IEM_MC_ADVANCE_RIP();
2788 IEM_MC_END();
2789 break;
2790 }
2791
2792 case IEMMODE_64BIT:
2793 {
2794 IEM_MC_BEGIN(3, 2);
2795 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2796 IEM_MC_ARG(uint64_t, u64Src, 1);
2797 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2799
2800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2801 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2802 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
2803 if (pImpl->pfnLockedU64)
2804 IEMOP_HLP_DONE_DECODING();
2805 else
2806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2807 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2808 IEM_MC_FETCH_EFLAGS(EFlags);
2809 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2810 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2811 else
2812 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
2813
2814 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
2815 IEM_MC_COMMIT_EFLAGS(EFlags);
2816 IEM_MC_ADVANCE_RIP();
2817 IEM_MC_END();
2818 break;
2819 }
2820 }
2821 }
2822 return VINF_SUCCESS;
2823}
2824
2825
2826/** Opcode 0x84. */
2827FNIEMOP_DEF(iemOp_test_Eb_Gb)
2828{
2829 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
2830 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
2831 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
2832}
2833
2834
2835/** Opcode 0x85. */
2836FNIEMOP_DEF(iemOp_test_Ev_Gv)
2837{
2838 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
2839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
2840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
2841}
2842
2843
2844/** Opcode 0x86. */
2845FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
2846{
2847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2848 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
2849
2850 /*
2851 * If rm is denoting a register, no more instruction bytes.
2852 */
2853 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2854 {
2855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2856
2857 IEM_MC_BEGIN(0, 2);
2858 IEM_MC_LOCAL(uint8_t, uTmp1);
2859 IEM_MC_LOCAL(uint8_t, uTmp2);
2860
2861 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2862 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2863 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2864 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2865
2866 IEM_MC_ADVANCE_RIP();
2867 IEM_MC_END();
2868 }
2869 else
2870 {
2871 /*
2872 * We're accessing memory.
2873 */
2874/** @todo the register must be committed separately! */
2875 IEM_MC_BEGIN(2, 2);
2876 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
2877 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
2878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2879
2880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2881 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2882 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2883 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
2884 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
2885
2886 IEM_MC_ADVANCE_RIP();
2887 IEM_MC_END();
2888 }
2889 return VINF_SUCCESS;
2890}
2891
2892
2893/** Opcode 0x87. */
2894FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
2895{
2896 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
2897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2898
2899 /*
2900 * If rm is denoting a register, no more instruction bytes.
2901 */
2902 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2903 {
2904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2905
2906 switch (pVCpu->iem.s.enmEffOpSize)
2907 {
2908 case IEMMODE_16BIT:
2909 IEM_MC_BEGIN(0, 2);
2910 IEM_MC_LOCAL(uint16_t, uTmp1);
2911 IEM_MC_LOCAL(uint16_t, uTmp2);
2912
2913 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2914 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2915 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2916 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2917
2918 IEM_MC_ADVANCE_RIP();
2919 IEM_MC_END();
2920 return VINF_SUCCESS;
2921
2922 case IEMMODE_32BIT:
2923 IEM_MC_BEGIN(0, 2);
2924 IEM_MC_LOCAL(uint32_t, uTmp1);
2925 IEM_MC_LOCAL(uint32_t, uTmp2);
2926
2927 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2928 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2929 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2930 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2931
2932 IEM_MC_ADVANCE_RIP();
2933 IEM_MC_END();
2934 return VINF_SUCCESS;
2935
2936 case IEMMODE_64BIT:
2937 IEM_MC_BEGIN(0, 2);
2938 IEM_MC_LOCAL(uint64_t, uTmp1);
2939 IEM_MC_LOCAL(uint64_t, uTmp2);
2940
2941 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2942 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2943 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2944 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2945
2946 IEM_MC_ADVANCE_RIP();
2947 IEM_MC_END();
2948 return VINF_SUCCESS;
2949
2950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2951 }
2952 }
2953 else
2954 {
2955 /*
2956 * We're accessing memory.
2957 */
2958 switch (pVCpu->iem.s.enmEffOpSize)
2959 {
2960/** @todo the register must be committed separately! */
2961 case IEMMODE_16BIT:
2962 IEM_MC_BEGIN(2, 2);
2963 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
2964 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
2965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2966
2967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2968 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2969 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2970 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
2971 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
2972
2973 IEM_MC_ADVANCE_RIP();
2974 IEM_MC_END();
2975 return VINF_SUCCESS;
2976
2977 case IEMMODE_32BIT:
2978 IEM_MC_BEGIN(2, 2);
2979 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
2980 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
2981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2982
2983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2984 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2985 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2986 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
2987 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
2988
2989 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
2990 IEM_MC_ADVANCE_RIP();
2991 IEM_MC_END();
2992 return VINF_SUCCESS;
2993
2994 case IEMMODE_64BIT:
2995 IEM_MC_BEGIN(2, 2);
2996 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
2997 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
2998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2999
3000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3001 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3002 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3003 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3004 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3005
3006 IEM_MC_ADVANCE_RIP();
3007 IEM_MC_END();
3008 return VINF_SUCCESS;
3009
3010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3011 }
3012 }
3013}
3014
3015
3016/** Opcode 0x88. */
3017FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3018{
3019 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3020
3021 uint8_t bRm;
3022 IEM_OPCODE_GET_NEXT_U8(&bRm);
3023
3024 /*
3025 * If rm is denoting a register, no more instruction bytes.
3026 */
3027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3028 {
3029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3030 IEM_MC_BEGIN(0, 1);
3031 IEM_MC_LOCAL(uint8_t, u8Value);
3032 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3033 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3034 IEM_MC_ADVANCE_RIP();
3035 IEM_MC_END();
3036 }
3037 else
3038 {
3039 /*
3040 * We're writing a register to memory.
3041 */
3042 IEM_MC_BEGIN(0, 2);
3043 IEM_MC_LOCAL(uint8_t, u8Value);
3044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3047 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3048 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3049 IEM_MC_ADVANCE_RIP();
3050 IEM_MC_END();
3051 }
3052 return VINF_SUCCESS;
3053
3054}
3055
3056
3057/** Opcode 0x89. */
3058FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3059{
3060 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3061
3062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3063
3064 /*
3065 * If rm is denoting a register, no more instruction bytes.
3066 */
3067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3068 {
3069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3070 switch (pVCpu->iem.s.enmEffOpSize)
3071 {
3072 case IEMMODE_16BIT:
3073 IEM_MC_BEGIN(0, 1);
3074 IEM_MC_LOCAL(uint16_t, u16Value);
3075 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3076 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3077 IEM_MC_ADVANCE_RIP();
3078 IEM_MC_END();
3079 break;
3080
3081 case IEMMODE_32BIT:
3082 IEM_MC_BEGIN(0, 1);
3083 IEM_MC_LOCAL(uint32_t, u32Value);
3084 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3085 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3086 IEM_MC_ADVANCE_RIP();
3087 IEM_MC_END();
3088 break;
3089
3090 case IEMMODE_64BIT:
3091 IEM_MC_BEGIN(0, 1);
3092 IEM_MC_LOCAL(uint64_t, u64Value);
3093 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3094 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3095 IEM_MC_ADVANCE_RIP();
3096 IEM_MC_END();
3097 break;
3098 }
3099 }
3100 else
3101 {
3102 /*
3103 * We're writing a register to memory.
3104 */
3105 switch (pVCpu->iem.s.enmEffOpSize)
3106 {
3107 case IEMMODE_16BIT:
3108 IEM_MC_BEGIN(0, 2);
3109 IEM_MC_LOCAL(uint16_t, u16Value);
3110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3114 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3115 IEM_MC_ADVANCE_RIP();
3116 IEM_MC_END();
3117 break;
3118
3119 case IEMMODE_32BIT:
3120 IEM_MC_BEGIN(0, 2);
3121 IEM_MC_LOCAL(uint32_t, u32Value);
3122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3125 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3126 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3127 IEM_MC_ADVANCE_RIP();
3128 IEM_MC_END();
3129 break;
3130
3131 case IEMMODE_64BIT:
3132 IEM_MC_BEGIN(0, 2);
3133 IEM_MC_LOCAL(uint64_t, u64Value);
3134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3137 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3138 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3139 IEM_MC_ADVANCE_RIP();
3140 IEM_MC_END();
3141 break;
3142 }
3143 }
3144 return VINF_SUCCESS;
3145}
3146
3147
3148/** Opcode 0x8a. */
3149FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3150{
3151 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3152
3153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3154
3155 /*
3156 * If rm is denoting a register, no more instruction bytes.
3157 */
3158 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3159 {
3160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3161 IEM_MC_BEGIN(0, 1);
3162 IEM_MC_LOCAL(uint8_t, u8Value);
3163 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3164 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3165 IEM_MC_ADVANCE_RIP();
3166 IEM_MC_END();
3167 }
3168 else
3169 {
3170 /*
3171 * We're loading a register from memory.
3172 */
3173 IEM_MC_BEGIN(0, 2);
3174 IEM_MC_LOCAL(uint8_t, u8Value);
3175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3178 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3179 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3180 IEM_MC_ADVANCE_RIP();
3181 IEM_MC_END();
3182 }
3183 return VINF_SUCCESS;
3184}
3185
3186
3187/** Opcode 0x8b. */
3188FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3189{
3190 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3191
3192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3193
3194 /*
3195 * If rm is denoting a register, no more instruction bytes.
3196 */
3197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3198 {
3199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3200 switch (pVCpu->iem.s.enmEffOpSize)
3201 {
3202 case IEMMODE_16BIT:
3203 IEM_MC_BEGIN(0, 1);
3204 IEM_MC_LOCAL(uint16_t, u16Value);
3205 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3206 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3207 IEM_MC_ADVANCE_RIP();
3208 IEM_MC_END();
3209 break;
3210
3211 case IEMMODE_32BIT:
3212 IEM_MC_BEGIN(0, 1);
3213 IEM_MC_LOCAL(uint32_t, u32Value);
3214 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3215 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3216 IEM_MC_ADVANCE_RIP();
3217 IEM_MC_END();
3218 break;
3219
3220 case IEMMODE_64BIT:
3221 IEM_MC_BEGIN(0, 1);
3222 IEM_MC_LOCAL(uint64_t, u64Value);
3223 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3224 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3225 IEM_MC_ADVANCE_RIP();
3226 IEM_MC_END();
3227 break;
3228 }
3229 }
3230 else
3231 {
3232 /*
3233 * We're loading a register from memory.
3234 */
3235 switch (pVCpu->iem.s.enmEffOpSize)
3236 {
3237 case IEMMODE_16BIT:
3238 IEM_MC_BEGIN(0, 2);
3239 IEM_MC_LOCAL(uint16_t, u16Value);
3240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3243 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3244 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3245 IEM_MC_ADVANCE_RIP();
3246 IEM_MC_END();
3247 break;
3248
3249 case IEMMODE_32BIT:
3250 IEM_MC_BEGIN(0, 2);
3251 IEM_MC_LOCAL(uint32_t, u32Value);
3252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3255 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3256 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3257 IEM_MC_ADVANCE_RIP();
3258 IEM_MC_END();
3259 break;
3260
3261 case IEMMODE_64BIT:
3262 IEM_MC_BEGIN(0, 2);
3263 IEM_MC_LOCAL(uint64_t, u64Value);
3264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3267 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3268 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3269 IEM_MC_ADVANCE_RIP();
3270 IEM_MC_END();
3271 break;
3272 }
3273 }
3274 return VINF_SUCCESS;
3275}
3276
3277
3278/** Opcode 0x63. */
3279FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
3280{
3281 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
3282 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
3283 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
3284 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
3285 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
3286}
3287
3288
3289/** Opcode 0x8c. */
3290FNIEMOP_DEF(iemOp_mov_Ev_Sw)
3291{
3292 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
3293
3294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3295
3296 /*
3297 * Check that the destination register exists. The REX.R prefix is ignored.
3298 */
3299 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3300 if ( iSegReg > X86_SREG_GS)
3301 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3302
3303 /*
3304 * If rm is denoting a register, no more instruction bytes.
3305 * In that case, the operand size is respected and the upper bits are
3306 * cleared (starting with some pentium).
3307 */
3308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3309 {
3310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3311 switch (pVCpu->iem.s.enmEffOpSize)
3312 {
3313 case IEMMODE_16BIT:
3314 IEM_MC_BEGIN(0, 1);
3315 IEM_MC_LOCAL(uint16_t, u16Value);
3316 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3317 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3318 IEM_MC_ADVANCE_RIP();
3319 IEM_MC_END();
3320 break;
3321
3322 case IEMMODE_32BIT:
3323 IEM_MC_BEGIN(0, 1);
3324 IEM_MC_LOCAL(uint32_t, u32Value);
3325 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
3326 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3327 IEM_MC_ADVANCE_RIP();
3328 IEM_MC_END();
3329 break;
3330
3331 case IEMMODE_64BIT:
3332 IEM_MC_BEGIN(0, 1);
3333 IEM_MC_LOCAL(uint64_t, u64Value);
3334 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
3335 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3336 IEM_MC_ADVANCE_RIP();
3337 IEM_MC_END();
3338 break;
3339 }
3340 }
3341 else
3342 {
3343 /*
3344 * We're saving the register to memory. The access is word sized
3345 * regardless of operand size prefixes.
3346 */
3347#if 0 /* not necessary */
3348 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3349#endif
3350 IEM_MC_BEGIN(0, 2);
3351 IEM_MC_LOCAL(uint16_t, u16Value);
3352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3355 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3356 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3357 IEM_MC_ADVANCE_RIP();
3358 IEM_MC_END();
3359 }
3360 return VINF_SUCCESS;
3361}
3362
3363
3364
3365
3366/** Opcode 0x8d. */
3367FNIEMOP_DEF(iemOp_lea_Gv_M)
3368{
3369 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
3370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3371 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3372 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
3373
3374 switch (pVCpu->iem.s.enmEffOpSize)
3375 {
3376 case IEMMODE_16BIT:
3377 IEM_MC_BEGIN(0, 2);
3378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3379 IEM_MC_LOCAL(uint16_t, u16Cast);
3380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3382 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
3383 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
3384 IEM_MC_ADVANCE_RIP();
3385 IEM_MC_END();
3386 return VINF_SUCCESS;
3387
3388 case IEMMODE_32BIT:
3389 IEM_MC_BEGIN(0, 2);
3390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3391 IEM_MC_LOCAL(uint32_t, u32Cast);
3392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3394 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
3395 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
3396 IEM_MC_ADVANCE_RIP();
3397 IEM_MC_END();
3398 return VINF_SUCCESS;
3399
3400 case IEMMODE_64BIT:
3401 IEM_MC_BEGIN(0, 1);
3402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3405 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
3406 IEM_MC_ADVANCE_RIP();
3407 IEM_MC_END();
3408 return VINF_SUCCESS;
3409 }
3410 AssertFailedReturn(VERR_IEM_IPE_7);
3411}
3412
3413
3414/** Opcode 0x8e. */
3415FNIEMOP_DEF(iemOp_mov_Sw_Ev)
3416{
3417 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
3418
3419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3420
3421 /*
3422 * The practical operand size is 16-bit.
3423 */
3424#if 0 /* not necessary */
3425 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3426#endif
3427
3428 /*
3429 * Check that the destination register exists and can be used with this
3430 * instruction. The REX.R prefix is ignored.
3431 */
3432 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3433 if ( iSegReg == X86_SREG_CS
3434 || iSegReg > X86_SREG_GS)
3435 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3436
3437 /*
3438 * If rm is denoting a register, no more instruction bytes.
3439 */
3440 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3441 {
3442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3443 IEM_MC_BEGIN(2, 0);
3444 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3445 IEM_MC_ARG(uint16_t, u16Value, 1);
3446 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3447 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3448 IEM_MC_END();
3449 }
3450 else
3451 {
3452 /*
3453 * We're loading the register from memory. The access is word sized
3454 * regardless of operand size prefixes.
3455 */
3456 IEM_MC_BEGIN(2, 1);
3457 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3458 IEM_MC_ARG(uint16_t, u16Value, 1);
3459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3462 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3463 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3464 IEM_MC_END();
3465 }
3466 return VINF_SUCCESS;
3467}
3468
3469
3470/** Opcode 0x8f /0. */
3471FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
3472{
3473 /* This bugger is rather annoying as it requires rSP to be updated before
3474 doing the effective address calculations. Will eventually require a
3475 split between the R/M+SIB decoding and the effective address
3476 calculation - which is something that is required for any attempt at
3477 reusing this code for a recompiler. It may also be good to have if we
3478 need to delay #UD exception caused by invalid lock prefixes.
3479
3480 For now, we'll do a mostly safe interpreter-only implementation here. */
3481 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
3482 * now until tests show it's checked.. */
3483 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
3484
3485 /* Register access is relatively easy and can share code. */
3486 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3487 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3488
3489 /*
3490 * Memory target.
3491 *
3492 * Intel says that RSP is incremented before it's used in any effective
3493 * address calcuations. This means some serious extra annoyance here since
3494 * we decode and calculate the effective address in one step and like to
3495 * delay committing registers till everything is done.
3496 *
3497 * So, we'll decode and calculate the effective address twice. This will
3498 * require some recoding if turned into a recompiler.
3499 */
3500 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
3501
3502#ifndef TST_IEM_CHECK_MC
3503 /* Calc effective address with modified ESP. */
3504/** @todo testcase */
3505 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
3506 RTGCPTR GCPtrEff;
3507 VBOXSTRICTRC rcStrict;
3508 switch (pVCpu->iem.s.enmEffOpSize)
3509 {
3510 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
3511 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
3512 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
3513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3514 }
3515 if (rcStrict != VINF_SUCCESS)
3516 return rcStrict;
3517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3518
3519 /* Perform the operation - this should be CImpl. */
3520 RTUINT64U TmpRsp;
3521 TmpRsp.u = pCtx->rsp;
3522 switch (pVCpu->iem.s.enmEffOpSize)
3523 {
3524 case IEMMODE_16BIT:
3525 {
3526 uint16_t u16Value;
3527 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
3528 if (rcStrict == VINF_SUCCESS)
3529 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
3530 break;
3531 }
3532
3533 case IEMMODE_32BIT:
3534 {
3535 uint32_t u32Value;
3536 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
3537 if (rcStrict == VINF_SUCCESS)
3538 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
3539 break;
3540 }
3541
3542 case IEMMODE_64BIT:
3543 {
3544 uint64_t u64Value;
3545 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
3546 if (rcStrict == VINF_SUCCESS)
3547 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
3548 break;
3549 }
3550
3551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3552 }
3553 if (rcStrict == VINF_SUCCESS)
3554 {
3555 pCtx->rsp = TmpRsp.u;
3556 iemRegUpdateRipAndClearRF(pVCpu);
3557 }
3558 return rcStrict;
3559
3560#else
3561 return VERR_IEM_IPE_2;
3562#endif
3563}
3564
3565
3566/** Opcode 0x8f. */
3567FNIEMOP_DEF(iemOp_Grp1A__xop)
3568{
3569 /*
3570 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
3571 * three byte VEX prefix, except that the mmmmm field cannot have the values
3572 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
3573 */
3574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3575 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
3576 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
3577
3578 IEMOP_MNEMONIC(xop, "xop");
3579 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
3580 {
3581 /** @todo Test when exctly the XOP conformance checks kick in during
3582 * instruction decoding and fetching (using \#PF). */
3583 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
3584 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
3585 if ( ( pVCpu->iem.s.fPrefixes
3586 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
3587 == 0)
3588 {
3589 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
3590 if (bXop2 & 0x80 /* XOP.W */)
3591 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
3592 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
3593 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
3594 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
3595 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
3596 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
3597 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
3598
3599 /** @todo XOP: Just use new tables and decoders. */
3600 switch (bRm & 0x1f)
3601 {
3602 case 8: /* xop opcode map 8. */
3603 IEMOP_BITCH_ABOUT_STUB();
3604 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3605
3606 case 9: /* xop opcode map 9. */
3607 IEMOP_BITCH_ABOUT_STUB();
3608 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3609
3610 case 10: /* xop opcode map 10. */
3611 IEMOP_BITCH_ABOUT_STUB();
3612 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3613
3614 default:
3615 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
3616 return IEMOP_RAISE_INVALID_OPCODE();
3617 }
3618 }
3619 else
3620 Log(("XOP: Invalid prefix mix!\n"));
3621 }
3622 else
3623 Log(("XOP: XOP support disabled!\n"));
3624 return IEMOP_RAISE_INVALID_OPCODE();
3625}
3626
3627
3628/**
3629 * Common 'xchg reg,rAX' helper.
3630 */
3631FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
3632{
3633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3634
3635 iReg |= pVCpu->iem.s.uRexB;
3636 switch (pVCpu->iem.s.enmEffOpSize)
3637 {
3638 case IEMMODE_16BIT:
3639 IEM_MC_BEGIN(0, 2);
3640 IEM_MC_LOCAL(uint16_t, u16Tmp1);
3641 IEM_MC_LOCAL(uint16_t, u16Tmp2);
3642 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
3643 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
3644 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
3645 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
3646 IEM_MC_ADVANCE_RIP();
3647 IEM_MC_END();
3648 return VINF_SUCCESS;
3649
3650 case IEMMODE_32BIT:
3651 IEM_MC_BEGIN(0, 2);
3652 IEM_MC_LOCAL(uint32_t, u32Tmp1);
3653 IEM_MC_LOCAL(uint32_t, u32Tmp2);
3654 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
3655 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
3656 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
3657 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
3658 IEM_MC_ADVANCE_RIP();
3659 IEM_MC_END();
3660 return VINF_SUCCESS;
3661
3662 case IEMMODE_64BIT:
3663 IEM_MC_BEGIN(0, 2);
3664 IEM_MC_LOCAL(uint64_t, u64Tmp1);
3665 IEM_MC_LOCAL(uint64_t, u64Tmp2);
3666 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
3667 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
3668 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
3669 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
3670 IEM_MC_ADVANCE_RIP();
3671 IEM_MC_END();
3672 return VINF_SUCCESS;
3673
3674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3675 }
3676}
3677
3678
3679/** Opcode 0x90. */
3680FNIEMOP_DEF(iemOp_nop)
3681{
3682 /* R8/R8D and RAX/EAX can be exchanged. */
3683 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
3684 {
3685 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
3686 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
3687 }
3688
3689 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3690 IEMOP_MNEMONIC(pause, "pause");
3691 else
3692 IEMOP_MNEMONIC(nop, "nop");
3693 IEM_MC_BEGIN(0, 0);
3694 IEM_MC_ADVANCE_RIP();
3695 IEM_MC_END();
3696 return VINF_SUCCESS;
3697}
3698
3699
3700/** Opcode 0x91. */
3701FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
3702{
3703 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
3704 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
3705}
3706
3707
3708/** Opcode 0x92. */
3709FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
3710{
3711 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
3712 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
3713}
3714
3715
3716/** Opcode 0x93. */
3717FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
3718{
3719 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
3720 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
3721}
3722
3723
3724/** Opcode 0x94. */
3725FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
3726{
3727 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
3728 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
3729}
3730
3731
3732/** Opcode 0x95. */
3733FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
3734{
3735 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
3736 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
3737}
3738
3739
3740/** Opcode 0x96. */
3741FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
3742{
3743 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
3744 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
3745}
3746
3747
3748/** Opcode 0x97. */
3749FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
3750{
3751 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
3752 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
3753}
3754
3755
3756/** Opcode 0x98. */
3757FNIEMOP_DEF(iemOp_cbw)
3758{
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3760 switch (pVCpu->iem.s.enmEffOpSize)
3761 {
3762 case IEMMODE_16BIT:
3763 IEMOP_MNEMONIC(cbw, "cbw");
3764 IEM_MC_BEGIN(0, 1);
3765 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
3766 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
3767 } IEM_MC_ELSE() {
3768 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
3769 } IEM_MC_ENDIF();
3770 IEM_MC_ADVANCE_RIP();
3771 IEM_MC_END();
3772 return VINF_SUCCESS;
3773
3774 case IEMMODE_32BIT:
3775 IEMOP_MNEMONIC(cwde, "cwde");
3776 IEM_MC_BEGIN(0, 1);
3777 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
3778 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
3779 } IEM_MC_ELSE() {
3780 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
3781 } IEM_MC_ENDIF();
3782 IEM_MC_ADVANCE_RIP();
3783 IEM_MC_END();
3784 return VINF_SUCCESS;
3785
3786 case IEMMODE_64BIT:
3787 IEMOP_MNEMONIC(cdqe, "cdqe");
3788 IEM_MC_BEGIN(0, 1);
3789 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
3790 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
3791 } IEM_MC_ELSE() {
3792 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
3793 } IEM_MC_ENDIF();
3794 IEM_MC_ADVANCE_RIP();
3795 IEM_MC_END();
3796 return VINF_SUCCESS;
3797
3798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3799 }
3800}
3801
3802
3803/** Opcode 0x99. */
3804FNIEMOP_DEF(iemOp_cwd)
3805{
3806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3807 switch (pVCpu->iem.s.enmEffOpSize)
3808 {
3809 case IEMMODE_16BIT:
3810 IEMOP_MNEMONIC(cwd, "cwd");
3811 IEM_MC_BEGIN(0, 1);
3812 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
3813 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
3814 } IEM_MC_ELSE() {
3815 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
3816 } IEM_MC_ENDIF();
3817 IEM_MC_ADVANCE_RIP();
3818 IEM_MC_END();
3819 return VINF_SUCCESS;
3820
3821 case IEMMODE_32BIT:
3822 IEMOP_MNEMONIC(cdq, "cdq");
3823 IEM_MC_BEGIN(0, 1);
3824 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
3825 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
3826 } IEM_MC_ELSE() {
3827 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
3828 } IEM_MC_ENDIF();
3829 IEM_MC_ADVANCE_RIP();
3830 IEM_MC_END();
3831 return VINF_SUCCESS;
3832
3833 case IEMMODE_64BIT:
3834 IEMOP_MNEMONIC(cqo, "cqo");
3835 IEM_MC_BEGIN(0, 1);
3836 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
3837 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
3838 } IEM_MC_ELSE() {
3839 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
3840 } IEM_MC_ENDIF();
3841 IEM_MC_ADVANCE_RIP();
3842 IEM_MC_END();
3843 return VINF_SUCCESS;
3844
3845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3846 }
3847}
3848
3849
3850/** Opcode 0x9a. */
3851FNIEMOP_DEF(iemOp_call_Ap)
3852{
3853 IEMOP_MNEMONIC(call_Ap, "call Ap");
3854 IEMOP_HLP_NO_64BIT();
3855
3856 /* Decode the far pointer address and pass it on to the far call C implementation. */
3857 uint32_t offSeg;
3858 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
3859 IEM_OPCODE_GET_NEXT_U32(&offSeg);
3860 else
3861 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
3862 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
3863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3864 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
3865}
3866
3867
3868/** Opcode 0x9b. (aka fwait) */
3869FNIEMOP_DEF(iemOp_wait)
3870{
3871 IEMOP_MNEMONIC(wait, "wait");
3872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3873
3874 IEM_MC_BEGIN(0, 0);
3875 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
3876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3877 IEM_MC_ADVANCE_RIP();
3878 IEM_MC_END();
3879 return VINF_SUCCESS;
3880}
3881
3882
3883/** Opcode 0x9c. */
3884FNIEMOP_DEF(iemOp_pushf_Fv)
3885{
3886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3887 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3888 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
3889}
3890
3891
3892/** Opcode 0x9d. */
3893FNIEMOP_DEF(iemOp_popf_Fv)
3894{
3895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3896 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3897 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
3898}
3899
3900
3901/** Opcode 0x9e. */
3902FNIEMOP_DEF(iemOp_sahf)
3903{
3904 IEMOP_MNEMONIC(sahf, "sahf");
3905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3906 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
3907 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
3908 return IEMOP_RAISE_INVALID_OPCODE();
3909 IEM_MC_BEGIN(0, 2);
3910 IEM_MC_LOCAL(uint32_t, u32Flags);
3911 IEM_MC_LOCAL(uint32_t, EFlags);
3912 IEM_MC_FETCH_EFLAGS(EFlags);
3913 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
3914 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
3915 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
3916 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
3917 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
3918 IEM_MC_COMMIT_EFLAGS(EFlags);
3919 IEM_MC_ADVANCE_RIP();
3920 IEM_MC_END();
3921 return VINF_SUCCESS;
3922}
3923
3924
3925/** Opcode 0x9f. */
3926FNIEMOP_DEF(iemOp_lahf)
3927{
3928 IEMOP_MNEMONIC(lahf, "lahf");
3929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3930 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
3931 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
3932 return IEMOP_RAISE_INVALID_OPCODE();
3933 IEM_MC_BEGIN(0, 1);
3934 IEM_MC_LOCAL(uint8_t, u8Flags);
3935 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
3936 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
3937 IEM_MC_ADVANCE_RIP();
3938 IEM_MC_END();
3939 return VINF_SUCCESS;
3940}
3941
3942
3943/**
3944 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
3945 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
3946 * prefixes. Will return on failures.
3947 * @param a_GCPtrMemOff The variable to store the offset in.
3948 */
3949#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
3950 do \
3951 { \
3952 switch (pVCpu->iem.s.enmEffAddrMode) \
3953 { \
3954 case IEMMODE_16BIT: \
3955 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
3956 break; \
3957 case IEMMODE_32BIT: \
3958 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
3959 break; \
3960 case IEMMODE_64BIT: \
3961 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
3962 break; \
3963 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3964 } \
3965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3966 } while (0)
3967
3968/** Opcode 0xa0. */
3969FNIEMOP_DEF(iemOp_mov_Al_Ob)
3970{
3971 /*
3972 * Get the offset and fend of lock prefixes.
3973 */
3974 RTGCPTR GCPtrMemOff;
3975 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
3976
3977 /*
3978 * Fetch AL.
3979 */
3980 IEM_MC_BEGIN(0,1);
3981 IEM_MC_LOCAL(uint8_t, u8Tmp);
3982 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3983 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
3984 IEM_MC_ADVANCE_RIP();
3985 IEM_MC_END();
3986 return VINF_SUCCESS;
3987}
3988
3989
3990/** Opcode 0xa1. */
3991FNIEMOP_DEF(iemOp_mov_rAX_Ov)
3992{
3993 /*
3994 * Get the offset and fend of lock prefixes.
3995 */
3996 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
3997 RTGCPTR GCPtrMemOff;
3998 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
3999
4000 /*
4001 * Fetch rAX.
4002 */
4003 switch (pVCpu->iem.s.enmEffOpSize)
4004 {
4005 case IEMMODE_16BIT:
4006 IEM_MC_BEGIN(0,1);
4007 IEM_MC_LOCAL(uint16_t, u16Tmp);
4008 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4009 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4010 IEM_MC_ADVANCE_RIP();
4011 IEM_MC_END();
4012 return VINF_SUCCESS;
4013
4014 case IEMMODE_32BIT:
4015 IEM_MC_BEGIN(0,1);
4016 IEM_MC_LOCAL(uint32_t, u32Tmp);
4017 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4018 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4019 IEM_MC_ADVANCE_RIP();
4020 IEM_MC_END();
4021 return VINF_SUCCESS;
4022
4023 case IEMMODE_64BIT:
4024 IEM_MC_BEGIN(0,1);
4025 IEM_MC_LOCAL(uint64_t, u64Tmp);
4026 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4027 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4028 IEM_MC_ADVANCE_RIP();
4029 IEM_MC_END();
4030 return VINF_SUCCESS;
4031
4032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4033 }
4034}
4035
4036
4037/** Opcode 0xa2. */
4038FNIEMOP_DEF(iemOp_mov_Ob_AL)
4039{
4040 /*
4041 * Get the offset and fend of lock prefixes.
4042 */
4043 RTGCPTR GCPtrMemOff;
4044 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4045
4046 /*
4047 * Store AL.
4048 */
4049 IEM_MC_BEGIN(0,1);
4050 IEM_MC_LOCAL(uint8_t, u8Tmp);
4051 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4052 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4053 IEM_MC_ADVANCE_RIP();
4054 IEM_MC_END();
4055 return VINF_SUCCESS;
4056}
4057
4058
4059/** Opcode 0xa3. */
4060FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4061{
4062 /*
4063 * Get the offset and fend of lock prefixes.
4064 */
4065 RTGCPTR GCPtrMemOff;
4066 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4067
4068 /*
4069 * Store rAX.
4070 */
4071 switch (pVCpu->iem.s.enmEffOpSize)
4072 {
4073 case IEMMODE_16BIT:
4074 IEM_MC_BEGIN(0,1);
4075 IEM_MC_LOCAL(uint16_t, u16Tmp);
4076 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4077 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4078 IEM_MC_ADVANCE_RIP();
4079 IEM_MC_END();
4080 return VINF_SUCCESS;
4081
4082 case IEMMODE_32BIT:
4083 IEM_MC_BEGIN(0,1);
4084 IEM_MC_LOCAL(uint32_t, u32Tmp);
4085 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4086 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4087 IEM_MC_ADVANCE_RIP();
4088 IEM_MC_END();
4089 return VINF_SUCCESS;
4090
4091 case IEMMODE_64BIT:
4092 IEM_MC_BEGIN(0,1);
4093 IEM_MC_LOCAL(uint64_t, u64Tmp);
4094 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4095 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4096 IEM_MC_ADVANCE_RIP();
4097 IEM_MC_END();
4098 return VINF_SUCCESS;
4099
4100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4101 }
4102}
4103
4104/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4105#define IEM_MOVS_CASE(ValBits, AddrBits) \
4106 IEM_MC_BEGIN(0, 2); \
4107 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4108 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4109 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4110 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4111 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4112 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4113 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4114 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4115 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4116 } IEM_MC_ELSE() { \
4117 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4118 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4119 } IEM_MC_ENDIF(); \
4120 IEM_MC_ADVANCE_RIP(); \
4121 IEM_MC_END();
4122
4123/** Opcode 0xa4. */
4124FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4125{
4126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4127
4128 /*
4129 * Use the C implementation if a repeat prefix is encountered.
4130 */
4131 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4132 {
4133 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4134 switch (pVCpu->iem.s.enmEffAddrMode)
4135 {
4136 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4137 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4138 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4139 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4140 }
4141 }
4142 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4143
4144 /*
4145 * Sharing case implementation with movs[wdq] below.
4146 */
4147 switch (pVCpu->iem.s.enmEffAddrMode)
4148 {
4149 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4150 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4151 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4153 }
4154 return VINF_SUCCESS;
4155}
4156
4157
4158/** Opcode 0xa5. */
4159FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4160{
4161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4162
4163 /*
4164 * Use the C implementation if a repeat prefix is encountered.
4165 */
4166 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4167 {
4168 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4169 switch (pVCpu->iem.s.enmEffOpSize)
4170 {
4171 case IEMMODE_16BIT:
4172 switch (pVCpu->iem.s.enmEffAddrMode)
4173 {
4174 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4175 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4176 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4178 }
4179 break;
4180 case IEMMODE_32BIT:
4181 switch (pVCpu->iem.s.enmEffAddrMode)
4182 {
4183 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4184 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4185 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4187 }
4188 case IEMMODE_64BIT:
4189 switch (pVCpu->iem.s.enmEffAddrMode)
4190 {
4191 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4192 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4193 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4195 }
4196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4197 }
4198 }
4199 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4200
4201 /*
4202 * Annoying double switch here.
4203 * Using ugly macro for implementing the cases, sharing it with movsb.
4204 */
4205 switch (pVCpu->iem.s.enmEffOpSize)
4206 {
4207 case IEMMODE_16BIT:
4208 switch (pVCpu->iem.s.enmEffAddrMode)
4209 {
4210 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4211 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4212 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4214 }
4215 break;
4216
4217 case IEMMODE_32BIT:
4218 switch (pVCpu->iem.s.enmEffAddrMode)
4219 {
4220 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4221 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4222 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
4223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4224 }
4225 break;
4226
4227 case IEMMODE_64BIT:
4228 switch (pVCpu->iem.s.enmEffAddrMode)
4229 {
4230 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4231 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
4232 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
4233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4234 }
4235 break;
4236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4237 }
4238 return VINF_SUCCESS;
4239}
4240
4241#undef IEM_MOVS_CASE
4242
4243/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
4244#define IEM_CMPS_CASE(ValBits, AddrBits) \
4245 IEM_MC_BEGIN(3, 3); \
4246 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
4247 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
4248 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4249 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
4250 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4251 \
4252 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4253 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
4254 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4255 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
4256 IEM_MC_REF_LOCAL(puValue1, uValue1); \
4257 IEM_MC_REF_EFLAGS(pEFlags); \
4258 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
4259 \
4260 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4261 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4262 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4263 } IEM_MC_ELSE() { \
4264 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4265 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4266 } IEM_MC_ENDIF(); \
4267 IEM_MC_ADVANCE_RIP(); \
4268 IEM_MC_END(); \
4269
4270/** Opcode 0xa6. */
4271FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
4272{
4273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4274
4275 /*
4276 * Use the C implementation if a repeat prefix is encountered.
4277 */
4278 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4279 {
4280 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
4281 switch (pVCpu->iem.s.enmEffAddrMode)
4282 {
4283 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4284 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4285 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4287 }
4288 }
4289 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4290 {
4291 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
4292 switch (pVCpu->iem.s.enmEffAddrMode)
4293 {
4294 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4295 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4296 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4298 }
4299 }
4300 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
4301
4302 /*
4303 * Sharing case implementation with cmps[wdq] below.
4304 */
4305 switch (pVCpu->iem.s.enmEffAddrMode)
4306 {
4307 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
4308 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
4309 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
4310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4311 }
4312 return VINF_SUCCESS;
4313
4314}
4315
4316
4317/** Opcode 0xa7. */
4318FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
4319{
4320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4321
4322 /*
4323 * Use the C implementation if a repeat prefix is encountered.
4324 */
4325 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4326 {
4327 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
4328 switch (pVCpu->iem.s.enmEffOpSize)
4329 {
4330 case IEMMODE_16BIT:
4331 switch (pVCpu->iem.s.enmEffAddrMode)
4332 {
4333 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4334 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4335 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4337 }
4338 break;
4339 case IEMMODE_32BIT:
4340 switch (pVCpu->iem.s.enmEffAddrMode)
4341 {
4342 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4343 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4344 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4346 }
4347 case IEMMODE_64BIT:
4348 switch (pVCpu->iem.s.enmEffAddrMode)
4349 {
4350 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
4351 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4352 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4354 }
4355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4356 }
4357 }
4358
4359 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4360 {
4361 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
4362 switch (pVCpu->iem.s.enmEffOpSize)
4363 {
4364 case IEMMODE_16BIT:
4365 switch (pVCpu->iem.s.enmEffAddrMode)
4366 {
4367 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4368 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4369 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4370 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4371 }
4372 break;
4373 case IEMMODE_32BIT:
4374 switch (pVCpu->iem.s.enmEffAddrMode)
4375 {
4376 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4377 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4378 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4380 }
4381 case IEMMODE_64BIT:
4382 switch (pVCpu->iem.s.enmEffAddrMode)
4383 {
4384 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
4385 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4386 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4388 }
4389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4390 }
4391 }
4392
4393 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
4394
4395 /*
4396 * Annoying double switch here.
4397 * Using ugly macro for implementing the cases, sharing it with cmpsb.
4398 */
4399 switch (pVCpu->iem.s.enmEffOpSize)
4400 {
4401 case IEMMODE_16BIT:
4402 switch (pVCpu->iem.s.enmEffAddrMode)
4403 {
4404 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
4405 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
4406 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
4407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4408 }
4409 break;
4410
4411 case IEMMODE_32BIT:
4412 switch (pVCpu->iem.s.enmEffAddrMode)
4413 {
4414 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
4415 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
4416 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
4417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4418 }
4419 break;
4420
4421 case IEMMODE_64BIT:
4422 switch (pVCpu->iem.s.enmEffAddrMode)
4423 {
4424 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4425 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
4426 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
4427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4428 }
4429 break;
4430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4431 }
4432 return VINF_SUCCESS;
4433
4434}
4435
4436#undef IEM_CMPS_CASE
4437
4438/** Opcode 0xa8. */
4439FNIEMOP_DEF(iemOp_test_AL_Ib)
4440{
4441 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
4442 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4443 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
4444}
4445
4446
4447/** Opcode 0xa9. */
4448FNIEMOP_DEF(iemOp_test_eAX_Iz)
4449{
4450 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
4451 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4452 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
4453}
4454
4455
4456/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
4457#define IEM_STOS_CASE(ValBits, AddrBits) \
4458 IEM_MC_BEGIN(0, 2); \
4459 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4460 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4461 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
4462 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4463 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4464 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4465 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4466 } IEM_MC_ELSE() { \
4467 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4468 } IEM_MC_ENDIF(); \
4469 IEM_MC_ADVANCE_RIP(); \
4470 IEM_MC_END(); \
4471
4472/** Opcode 0xaa. */
4473FNIEMOP_DEF(iemOp_stosb_Yb_AL)
4474{
4475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4476
4477 /*
4478 * Use the C implementation if a repeat prefix is encountered.
4479 */
4480 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4481 {
4482 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
4483 switch (pVCpu->iem.s.enmEffAddrMode)
4484 {
4485 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
4486 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
4487 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
4488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4489 }
4490 }
4491 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
4492
4493 /*
4494 * Sharing case implementation with stos[wdq] below.
4495 */
4496 switch (pVCpu->iem.s.enmEffAddrMode)
4497 {
4498 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
4499 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
4500 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
4501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4502 }
4503 return VINF_SUCCESS;
4504}
4505
4506
4507/** Opcode 0xab. */
4508FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
4509{
4510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4511
4512 /*
4513 * Use the C implementation if a repeat prefix is encountered.
4514 */
4515 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4516 {
4517 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
4518 switch (pVCpu->iem.s.enmEffOpSize)
4519 {
4520 case IEMMODE_16BIT:
4521 switch (pVCpu->iem.s.enmEffAddrMode)
4522 {
4523 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
4524 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
4525 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
4526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4527 }
4528 break;
4529 case IEMMODE_32BIT:
4530 switch (pVCpu->iem.s.enmEffAddrMode)
4531 {
4532 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
4533 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
4534 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
4535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4536 }
4537 case IEMMODE_64BIT:
4538 switch (pVCpu->iem.s.enmEffAddrMode)
4539 {
4540 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
4541 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
4542 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
4543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4544 }
4545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4546 }
4547 }
4548 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
4549
4550 /*
4551 * Annoying double switch here.
4552 * Using ugly macro for implementing the cases, sharing it with stosb.
4553 */
4554 switch (pVCpu->iem.s.enmEffOpSize)
4555 {
4556 case IEMMODE_16BIT:
4557 switch (pVCpu->iem.s.enmEffAddrMode)
4558 {
4559 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
4560 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
4561 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
4562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4563 }
4564 break;
4565
4566 case IEMMODE_32BIT:
4567 switch (pVCpu->iem.s.enmEffAddrMode)
4568 {
4569 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
4570 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
4571 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
4572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4573 }
4574 break;
4575
4576 case IEMMODE_64BIT:
4577 switch (pVCpu->iem.s.enmEffAddrMode)
4578 {
4579 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4580 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
4581 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
4582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4583 }
4584 break;
4585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4586 }
4587 return VINF_SUCCESS;
4588}
4589
4590#undef IEM_STOS_CASE
4591
4592/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
4593#define IEM_LODS_CASE(ValBits, AddrBits) \
4594 IEM_MC_BEGIN(0, 2); \
4595 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4596 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4597 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4598 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4599 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
4600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4601 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4602 } IEM_MC_ELSE() { \
4603 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4604 } IEM_MC_ENDIF(); \
4605 IEM_MC_ADVANCE_RIP(); \
4606 IEM_MC_END();
4607
4608/** Opcode 0xac. */
4609FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
4610{
4611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4612
4613 /*
4614 * Use the C implementation if a repeat prefix is encountered.
4615 */
4616 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4617 {
4618 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
4619 switch (pVCpu->iem.s.enmEffAddrMode)
4620 {
4621 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
4622 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
4623 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
4624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4625 }
4626 }
4627 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
4628
4629 /*
4630 * Sharing case implementation with stos[wdq] below.
4631 */
4632 switch (pVCpu->iem.s.enmEffAddrMode)
4633 {
4634 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
4635 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
4636 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
4637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4638 }
4639 return VINF_SUCCESS;
4640}
4641
4642
4643/** Opcode 0xad. */
4644FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
4645{
4646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4647
4648 /*
4649 * Use the C implementation if a repeat prefix is encountered.
4650 */
4651 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4652 {
4653 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
4654 switch (pVCpu->iem.s.enmEffOpSize)
4655 {
4656 case IEMMODE_16BIT:
4657 switch (pVCpu->iem.s.enmEffAddrMode)
4658 {
4659 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
4660 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
4661 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
4662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4663 }
4664 break;
4665 case IEMMODE_32BIT:
4666 switch (pVCpu->iem.s.enmEffAddrMode)
4667 {
4668 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
4669 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
4670 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
4671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4672 }
4673 case IEMMODE_64BIT:
4674 switch (pVCpu->iem.s.enmEffAddrMode)
4675 {
4676 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
4677 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
4678 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
4679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4680 }
4681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4682 }
4683 }
4684 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
4685
4686 /*
4687 * Annoying double switch here.
4688 * Using ugly macro for implementing the cases, sharing it with lodsb.
4689 */
4690 switch (pVCpu->iem.s.enmEffOpSize)
4691 {
4692 case IEMMODE_16BIT:
4693 switch (pVCpu->iem.s.enmEffAddrMode)
4694 {
4695 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
4696 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
4697 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
4698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4699 }
4700 break;
4701
4702 case IEMMODE_32BIT:
4703 switch (pVCpu->iem.s.enmEffAddrMode)
4704 {
4705 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
4706 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
4707 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
4708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4709 }
4710 break;
4711
4712 case IEMMODE_64BIT:
4713 switch (pVCpu->iem.s.enmEffAddrMode)
4714 {
4715 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4716 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
4717 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
4718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4719 }
4720 break;
4721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4722 }
4723 return VINF_SUCCESS;
4724}
4725
4726#undef IEM_LODS_CASE
4727
4728/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
4729#define IEM_SCAS_CASE(ValBits, AddrBits) \
4730 IEM_MC_BEGIN(3, 2); \
4731 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
4732 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
4733 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4734 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4735 \
4736 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4737 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
4738 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
4739 IEM_MC_REF_EFLAGS(pEFlags); \
4740 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
4741 \
4742 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4743 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4744 } IEM_MC_ELSE() { \
4745 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4746 } IEM_MC_ENDIF(); \
4747 IEM_MC_ADVANCE_RIP(); \
4748 IEM_MC_END();
4749
4750/** Opcode 0xae. */
4751FNIEMOP_DEF(iemOp_scasb_AL_Xb)
4752{
4753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4754
4755 /*
4756 * Use the C implementation if a repeat prefix is encountered.
4757 */
4758 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4759 {
4760 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
4761 switch (pVCpu->iem.s.enmEffAddrMode)
4762 {
4763 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
4764 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
4765 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
4766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4767 }
4768 }
4769 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4770 {
4771 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
4772 switch (pVCpu->iem.s.enmEffAddrMode)
4773 {
4774 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
4775 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
4776 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
4777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4778 }
4779 }
4780 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
4781
4782 /*
4783 * Sharing case implementation with stos[wdq] below.
4784 */
4785 switch (pVCpu->iem.s.enmEffAddrMode)
4786 {
4787 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
4788 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
4789 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
4790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4791 }
4792 return VINF_SUCCESS;
4793}
4794
4795
4796/** Opcode 0xaf. */
4797FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
4798{
4799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4800
4801 /*
4802 * Use the C implementation if a repeat prefix is encountered.
4803 */
4804 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4805 {
4806 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
4807 switch (pVCpu->iem.s.enmEffOpSize)
4808 {
4809 case IEMMODE_16BIT:
4810 switch (pVCpu->iem.s.enmEffAddrMode)
4811 {
4812 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
4813 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
4814 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
4815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4816 }
4817 break;
4818 case IEMMODE_32BIT:
4819 switch (pVCpu->iem.s.enmEffAddrMode)
4820 {
4821 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
4822 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
4823 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
4824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4825 }
4826 case IEMMODE_64BIT:
4827 switch (pVCpu->iem.s.enmEffAddrMode)
4828 {
4829 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
4830 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
4831 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
4832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4833 }
4834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4835 }
4836 }
4837 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4838 {
4839 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
4840 switch (pVCpu->iem.s.enmEffOpSize)
4841 {
4842 case IEMMODE_16BIT:
4843 switch (pVCpu->iem.s.enmEffAddrMode)
4844 {
4845 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
4846 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
4847 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
4848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4849 }
4850 break;
4851 case IEMMODE_32BIT:
4852 switch (pVCpu->iem.s.enmEffAddrMode)
4853 {
4854 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
4855 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
4856 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
4857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4858 }
4859 case IEMMODE_64BIT:
4860 switch (pVCpu->iem.s.enmEffAddrMode)
4861 {
4862 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
4863 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
4864 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
4865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4866 }
4867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4868 }
4869 }
4870 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
4871
4872 /*
4873 * Annoying double switch here.
4874 * Using ugly macro for implementing the cases, sharing it with scasb.
4875 */
4876 switch (pVCpu->iem.s.enmEffOpSize)
4877 {
4878 case IEMMODE_16BIT:
4879 switch (pVCpu->iem.s.enmEffAddrMode)
4880 {
4881 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
4882 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
4883 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
4884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4885 }
4886 break;
4887
4888 case IEMMODE_32BIT:
4889 switch (pVCpu->iem.s.enmEffAddrMode)
4890 {
4891 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
4892 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
4893 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
4894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4895 }
4896 break;
4897
4898 case IEMMODE_64BIT:
4899 switch (pVCpu->iem.s.enmEffAddrMode)
4900 {
4901 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4902 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
4903 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
4904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4905 }
4906 break;
4907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4908 }
4909 return VINF_SUCCESS;
4910}
4911
4912#undef IEM_SCAS_CASE
4913
4914/**
4915 * Common 'mov r8, imm8' helper.
4916 */
4917FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
4918{
4919 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
4920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4921
4922 IEM_MC_BEGIN(0, 1);
4923 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
4924 IEM_MC_STORE_GREG_U8(iReg, u8Value);
4925 IEM_MC_ADVANCE_RIP();
4926 IEM_MC_END();
4927
4928 return VINF_SUCCESS;
4929}
4930
4931
4932/** Opcode 0xb0. */
4933FNIEMOP_DEF(iemOp_mov_AL_Ib)
4934{
4935 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
4936 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
4937}
4938
4939
4940/** Opcode 0xb1. */
4941FNIEMOP_DEF(iemOp_CL_Ib)
4942{
4943 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
4944 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
4945}
4946
4947
4948/** Opcode 0xb2. */
4949FNIEMOP_DEF(iemOp_DL_Ib)
4950{
4951 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
4952 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
4953}
4954
4955
4956/** Opcode 0xb3. */
4957FNIEMOP_DEF(iemOp_BL_Ib)
4958{
4959 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
4960 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
4961}
4962
4963
4964/** Opcode 0xb4. */
4965FNIEMOP_DEF(iemOp_mov_AH_Ib)
4966{
4967 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
4968 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
4969}
4970
4971
4972/** Opcode 0xb5. */
4973FNIEMOP_DEF(iemOp_CH_Ib)
4974{
4975 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
4976 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
4977}
4978
4979
4980/** Opcode 0xb6. */
4981FNIEMOP_DEF(iemOp_DH_Ib)
4982{
4983 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
4984 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
4985}
4986
4987
4988/** Opcode 0xb7. */
4989FNIEMOP_DEF(iemOp_BH_Ib)
4990{
4991 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
4992 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
4993}
4994
4995
4996/**
4997 * Common 'mov regX,immX' helper.
4998 */
4999FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5000{
5001 switch (pVCpu->iem.s.enmEffOpSize)
5002 {
5003 case IEMMODE_16BIT:
5004 {
5005 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5007
5008 IEM_MC_BEGIN(0, 1);
5009 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5010 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5011 IEM_MC_ADVANCE_RIP();
5012 IEM_MC_END();
5013 break;
5014 }
5015
5016 case IEMMODE_32BIT:
5017 {
5018 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5020
5021 IEM_MC_BEGIN(0, 1);
5022 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5023 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5024 IEM_MC_ADVANCE_RIP();
5025 IEM_MC_END();
5026 break;
5027 }
5028 case IEMMODE_64BIT:
5029 {
5030 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5032
5033 IEM_MC_BEGIN(0, 1);
5034 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5035 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5036 IEM_MC_ADVANCE_RIP();
5037 IEM_MC_END();
5038 break;
5039 }
5040 }
5041
5042 return VINF_SUCCESS;
5043}
5044
5045
5046/** Opcode 0xb8. */
5047FNIEMOP_DEF(iemOp_eAX_Iv)
5048{
5049 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5050 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5051}
5052
5053
5054/** Opcode 0xb9. */
5055FNIEMOP_DEF(iemOp_eCX_Iv)
5056{
5057 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5058 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5059}
5060
5061
5062/** Opcode 0xba. */
5063FNIEMOP_DEF(iemOp_eDX_Iv)
5064{
5065 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5066 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5067}
5068
5069
5070/** Opcode 0xbb. */
5071FNIEMOP_DEF(iemOp_eBX_Iv)
5072{
5073 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5074 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5075}
5076
5077
5078/** Opcode 0xbc. */
5079FNIEMOP_DEF(iemOp_eSP_Iv)
5080{
5081 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5082 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5083}
5084
5085
5086/** Opcode 0xbd. */
5087FNIEMOP_DEF(iemOp_eBP_Iv)
5088{
5089 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5090 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5091}
5092
5093
5094/** Opcode 0xbe. */
5095FNIEMOP_DEF(iemOp_eSI_Iv)
5096{
5097 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5098 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5099}
5100
5101
5102/** Opcode 0xbf. */
5103FNIEMOP_DEF(iemOp_eDI_Iv)
5104{
5105 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5106 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5107}
5108
5109
5110/** Opcode 0xc0. */
5111FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5112{
5113 IEMOP_HLP_MIN_186();
5114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5115 PCIEMOPSHIFTSIZES pImpl;
5116 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5117 {
5118 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5119 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5120 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5121 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5122 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5123 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5124 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5125 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5126 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5127 }
5128 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5129
5130 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5131 {
5132 /* register */
5133 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5135 IEM_MC_BEGIN(3, 0);
5136 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5137 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5138 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5139 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5140 IEM_MC_REF_EFLAGS(pEFlags);
5141 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5142 IEM_MC_ADVANCE_RIP();
5143 IEM_MC_END();
5144 }
5145 else
5146 {
5147 /* memory */
5148 IEM_MC_BEGIN(3, 2);
5149 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5150 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5151 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5153
5154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5155 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5156 IEM_MC_ASSIGN(cShiftArg, cShift);
5157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5158 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5159 IEM_MC_FETCH_EFLAGS(EFlags);
5160 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5161
5162 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5163 IEM_MC_COMMIT_EFLAGS(EFlags);
5164 IEM_MC_ADVANCE_RIP();
5165 IEM_MC_END();
5166 }
5167 return VINF_SUCCESS;
5168}
5169
5170
5171/** Opcode 0xc1. */
5172FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
5173{
5174 IEMOP_HLP_MIN_186();
5175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5176 PCIEMOPSHIFTSIZES pImpl;
5177 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5178 {
5179 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
5180 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
5181 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
5182 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
5183 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
5184 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
5185 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
5186 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5187 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5188 }
5189 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5190
5191 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5192 {
5193 /* register */
5194 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5196 switch (pVCpu->iem.s.enmEffOpSize)
5197 {
5198 case IEMMODE_16BIT:
5199 IEM_MC_BEGIN(3, 0);
5200 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5201 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5202 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5203 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5204 IEM_MC_REF_EFLAGS(pEFlags);
5205 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5206 IEM_MC_ADVANCE_RIP();
5207 IEM_MC_END();
5208 return VINF_SUCCESS;
5209
5210 case IEMMODE_32BIT:
5211 IEM_MC_BEGIN(3, 0);
5212 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5213 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5214 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5215 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5216 IEM_MC_REF_EFLAGS(pEFlags);
5217 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5218 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5219 IEM_MC_ADVANCE_RIP();
5220 IEM_MC_END();
5221 return VINF_SUCCESS;
5222
5223 case IEMMODE_64BIT:
5224 IEM_MC_BEGIN(3, 0);
5225 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5226 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5227 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5228 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5229 IEM_MC_REF_EFLAGS(pEFlags);
5230 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5231 IEM_MC_ADVANCE_RIP();
5232 IEM_MC_END();
5233 return VINF_SUCCESS;
5234
5235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5236 }
5237 }
5238 else
5239 {
5240 /* memory */
5241 switch (pVCpu->iem.s.enmEffOpSize)
5242 {
5243 case IEMMODE_16BIT:
5244 IEM_MC_BEGIN(3, 2);
5245 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5246 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5247 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5249
5250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5251 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5252 IEM_MC_ASSIGN(cShiftArg, cShift);
5253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5254 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5255 IEM_MC_FETCH_EFLAGS(EFlags);
5256 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5257
5258 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5259 IEM_MC_COMMIT_EFLAGS(EFlags);
5260 IEM_MC_ADVANCE_RIP();
5261 IEM_MC_END();
5262 return VINF_SUCCESS;
5263
5264 case IEMMODE_32BIT:
5265 IEM_MC_BEGIN(3, 2);
5266 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5267 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5268 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5270
5271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5272 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5273 IEM_MC_ASSIGN(cShiftArg, cShift);
5274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5275 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5276 IEM_MC_FETCH_EFLAGS(EFlags);
5277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5278
5279 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5280 IEM_MC_COMMIT_EFLAGS(EFlags);
5281 IEM_MC_ADVANCE_RIP();
5282 IEM_MC_END();
5283 return VINF_SUCCESS;
5284
5285 case IEMMODE_64BIT:
5286 IEM_MC_BEGIN(3, 2);
5287 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5288 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5289 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5291
5292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5293 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5294 IEM_MC_ASSIGN(cShiftArg, cShift);
5295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5296 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5297 IEM_MC_FETCH_EFLAGS(EFlags);
5298 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5299
5300 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5301 IEM_MC_COMMIT_EFLAGS(EFlags);
5302 IEM_MC_ADVANCE_RIP();
5303 IEM_MC_END();
5304 return VINF_SUCCESS;
5305
5306 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5307 }
5308 }
5309}
5310
5311
5312/** Opcode 0xc2. */
5313FNIEMOP_DEF(iemOp_retn_Iw)
5314{
5315 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
5316 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5319 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
5320}
5321
5322
5323/** Opcode 0xc3. */
5324FNIEMOP_DEF(iemOp_retn)
5325{
5326 IEMOP_MNEMONIC(retn, "retn");
5327 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5329 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
5330}
5331
5332
5333/** Opcode 0xc4. */
5334FNIEMOP_DEF(iemOp_les_Gv_Mp__vex2)
5335{
5336 /* The LES instruction is invalid 64-bit mode. In legacy and
5337 compatability mode it is invalid with MOD=3.
5338 The use as a VEX prefix is made possible by assigning the inverted
5339 REX.R to the top MOD bit, and the top bit in the inverted register
5340 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
5341 to accessing registers 0..7 in this VEX form. */
5342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5343 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5344 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5345 {
5346 IEMOP_MNEMONIC(vex2_prefix, "vex2");
5347 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5348 {
5349 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5350 if ( ( pVCpu->iem.s.fPrefixes
5351 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5352 == 0)
5353 {
5354 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5355 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5356 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
5357 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
5358 pVCpu->iem.s.idxPrefix = bRm & 0x3;
5359
5360 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5361 }
5362
5363 Log(("VEX2: Invalid prefix mix!\n"));
5364 }
5365 else
5366 Log(("VEX2: AVX support disabled!\n"));
5367
5368 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
5369 return IEMOP_RAISE_INVALID_OPCODE();
5370 }
5371 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
5372 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
5373}
5374
5375
5376/** Opcode 0xc5. */
5377FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex3)
5378{
5379 /* The LDS instruction is invalid 64-bit mode. In legacy and
5380 compatability mode it is invalid with MOD=3.
5381 The use as a VEX prefix is made possible by assigning the inverted
5382 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
5383 outside of 64-bit mode. VEX is not available in real or v86 mode. */
5384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5385 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
5386 {
5387 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5388 {
5389 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
5390 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
5391 }
5392 IEMOP_HLP_NO_REAL_OR_V86_MODE();
5393 }
5394
5395 IEMOP_MNEMONIC(vex3_prefix, "vex3");
5396 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5397 {
5398 /** @todo Test when exctly the VEX conformance checks kick in during
5399 * instruction decoding and fetching (using \#PF). */
5400 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
5401 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5402 if ( ( pVCpu->iem.s.fPrefixes
5403 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5404 == 0)
5405 {
5406 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5407 if (bVex2 & 0x80 /* VEX.W */)
5408 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5409 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5410 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
5411 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
5412 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
5413 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
5414 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
5415
5416 switch (bRm & 0x1f)
5417 {
5418 case 1: /* 0x0f lead opcode byte. */
5419 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5420
5421 case 2: /* 0x0f 0x38 lead opcode bytes. */
5422 /** @todo VEX: Just use new tables and decoders. */
5423 IEMOP_BITCH_ABOUT_STUB();
5424 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5425
5426 case 3: /* 0x0f 0x3a lead opcode bytes. */
5427 /** @todo VEX: Just use new tables and decoders. */
5428 IEMOP_BITCH_ABOUT_STUB();
5429 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5430
5431 default:
5432 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5433 return IEMOP_RAISE_INVALID_OPCODE();
5434 }
5435 }
5436 else
5437 Log(("VEX3: Invalid prefix mix!\n"));
5438 }
5439 else
5440 Log(("VEX3: AVX support disabled!\n"));
5441 return IEMOP_RAISE_INVALID_OPCODE();
5442}
5443
5444
5445/** Opcode 0xc6. */
5446FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
5447{
5448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5449 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
5450 return IEMOP_RAISE_INVALID_OPCODE();
5451 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
5452
5453 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5454 {
5455 /* register access */
5456 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5458 IEM_MC_BEGIN(0, 0);
5459 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
5460 IEM_MC_ADVANCE_RIP();
5461 IEM_MC_END();
5462 }
5463 else
5464 {
5465 /* memory access. */
5466 IEM_MC_BEGIN(0, 1);
5467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5469 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5471 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
5472 IEM_MC_ADVANCE_RIP();
5473 IEM_MC_END();
5474 }
5475 return VINF_SUCCESS;
5476}
5477
5478
5479/** Opcode 0xc7. */
5480FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
5481{
5482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5483 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
5484 return IEMOP_RAISE_INVALID_OPCODE();
5485 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
5486
5487 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5488 {
5489 /* register access */
5490 switch (pVCpu->iem.s.enmEffOpSize)
5491 {
5492 case IEMMODE_16BIT:
5493 IEM_MC_BEGIN(0, 0);
5494 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5496 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
5497 IEM_MC_ADVANCE_RIP();
5498 IEM_MC_END();
5499 return VINF_SUCCESS;
5500
5501 case IEMMODE_32BIT:
5502 IEM_MC_BEGIN(0, 0);
5503 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
5506 IEM_MC_ADVANCE_RIP();
5507 IEM_MC_END();
5508 return VINF_SUCCESS;
5509
5510 case IEMMODE_64BIT:
5511 IEM_MC_BEGIN(0, 0);
5512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
5515 IEM_MC_ADVANCE_RIP();
5516 IEM_MC_END();
5517 return VINF_SUCCESS;
5518
5519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5520 }
5521 }
5522 else
5523 {
5524 /* memory access. */
5525 switch (pVCpu->iem.s.enmEffOpSize)
5526 {
5527 case IEMMODE_16BIT:
5528 IEM_MC_BEGIN(0, 1);
5529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
5531 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5533 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
5534 IEM_MC_ADVANCE_RIP();
5535 IEM_MC_END();
5536 return VINF_SUCCESS;
5537
5538 case IEMMODE_32BIT:
5539 IEM_MC_BEGIN(0, 1);
5540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
5542 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5544 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
5545 IEM_MC_ADVANCE_RIP();
5546 IEM_MC_END();
5547 return VINF_SUCCESS;
5548
5549 case IEMMODE_64BIT:
5550 IEM_MC_BEGIN(0, 1);
5551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
5553 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
5554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5555 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
5556 IEM_MC_ADVANCE_RIP();
5557 IEM_MC_END();
5558 return VINF_SUCCESS;
5559
5560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5561 }
5562 }
5563}
5564
5565
5566
5567
5568/** Opcode 0xc8. */
5569FNIEMOP_DEF(iemOp_enter_Iw_Ib)
5570{
5571 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
5572 IEMOP_HLP_MIN_186();
5573 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5574 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
5575 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
5576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5577 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
5578}
5579
5580
5581/** Opcode 0xc9. */
5582FNIEMOP_DEF(iemOp_leave)
5583{
5584 IEMOP_MNEMONIC(leave, "leave");
5585 IEMOP_HLP_MIN_186();
5586 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5588 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
5589}
5590
5591
5592/** Opcode 0xca. */
5593FNIEMOP_DEF(iemOp_retf_Iw)
5594{
5595 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
5596 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5598 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5599 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
5600}
5601
5602
5603/** Opcode 0xcb. */
5604FNIEMOP_DEF(iemOp_retf)
5605{
5606 IEMOP_MNEMONIC(retf, "retf");
5607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5608 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5609 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
5610}
5611
5612
5613/** Opcode 0xcc. */
5614FNIEMOP_DEF(iemOp_int_3)
5615{
5616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5617 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
5618}
5619
5620
5621/** Opcode 0xcd. */
5622FNIEMOP_DEF(iemOp_int_Ib)
5623{
5624 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
5625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5626 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
5627}
5628
5629
5630/** Opcode 0xce. */
5631FNIEMOP_DEF(iemOp_into)
5632{
5633 IEMOP_MNEMONIC(into, "into");
5634 IEMOP_HLP_NO_64BIT();
5635
5636 IEM_MC_BEGIN(2, 0);
5637 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
5638 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
5639 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
5640 IEM_MC_END();
5641 return VINF_SUCCESS;
5642}
5643
5644
5645/** Opcode 0xcf. */
5646FNIEMOP_DEF(iemOp_iret)
5647{
5648 IEMOP_MNEMONIC(iret, "iret");
5649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5650 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
5651}
5652
5653
5654/** Opcode 0xd0. */
5655FNIEMOP_DEF(iemOp_Grp2_Eb_1)
5656{
5657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5658 PCIEMOPSHIFTSIZES pImpl;
5659 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5660 {
5661 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
5662 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
5663 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
5664 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
5665 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
5666 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
5667 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
5668 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5669 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
5670 }
5671 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5672
5673 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5674 {
5675 /* register */
5676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5677 IEM_MC_BEGIN(3, 0);
5678 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5679 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
5680 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5681 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5682 IEM_MC_REF_EFLAGS(pEFlags);
5683 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5684 IEM_MC_ADVANCE_RIP();
5685 IEM_MC_END();
5686 }
5687 else
5688 {
5689 /* memory */
5690 IEM_MC_BEGIN(3, 2);
5691 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5692 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
5693 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5695
5696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5698 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5699 IEM_MC_FETCH_EFLAGS(EFlags);
5700 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5701
5702 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5703 IEM_MC_COMMIT_EFLAGS(EFlags);
5704 IEM_MC_ADVANCE_RIP();
5705 IEM_MC_END();
5706 }
5707 return VINF_SUCCESS;
5708}
5709
5710
5711
5712/** Opcode 0xd1. */
5713FNIEMOP_DEF(iemOp_Grp2_Ev_1)
5714{
5715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5716 PCIEMOPSHIFTSIZES pImpl;
5717 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5718 {
5719 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
5720 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
5721 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
5722 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
5723 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
5724 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
5725 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
5726 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5727 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
5728 }
5729 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5730
5731 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5732 {
5733 /* register */
5734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5735 switch (pVCpu->iem.s.enmEffOpSize)
5736 {
5737 case IEMMODE_16BIT:
5738 IEM_MC_BEGIN(3, 0);
5739 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5740 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5741 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5742 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5743 IEM_MC_REF_EFLAGS(pEFlags);
5744 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5745 IEM_MC_ADVANCE_RIP();
5746 IEM_MC_END();
5747 return VINF_SUCCESS;
5748
5749 case IEMMODE_32BIT:
5750 IEM_MC_BEGIN(3, 0);
5751 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5752 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5753 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5754 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5755 IEM_MC_REF_EFLAGS(pEFlags);
5756 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5757 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5758 IEM_MC_ADVANCE_RIP();
5759 IEM_MC_END();
5760 return VINF_SUCCESS;
5761
5762 case IEMMODE_64BIT:
5763 IEM_MC_BEGIN(3, 0);
5764 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5765 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5766 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5767 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5768 IEM_MC_REF_EFLAGS(pEFlags);
5769 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5770 IEM_MC_ADVANCE_RIP();
5771 IEM_MC_END();
5772 return VINF_SUCCESS;
5773
5774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5775 }
5776 }
5777 else
5778 {
5779 /* memory */
5780 switch (pVCpu->iem.s.enmEffOpSize)
5781 {
5782 case IEMMODE_16BIT:
5783 IEM_MC_BEGIN(3, 2);
5784 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5785 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5786 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5788
5789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5791 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5792 IEM_MC_FETCH_EFLAGS(EFlags);
5793 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5794
5795 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5796 IEM_MC_COMMIT_EFLAGS(EFlags);
5797 IEM_MC_ADVANCE_RIP();
5798 IEM_MC_END();
5799 return VINF_SUCCESS;
5800
5801 case IEMMODE_32BIT:
5802 IEM_MC_BEGIN(3, 2);
5803 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5804 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5805 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5807
5808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5810 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5811 IEM_MC_FETCH_EFLAGS(EFlags);
5812 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5813
5814 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5815 IEM_MC_COMMIT_EFLAGS(EFlags);
5816 IEM_MC_ADVANCE_RIP();
5817 IEM_MC_END();
5818 return VINF_SUCCESS;
5819
5820 case IEMMODE_64BIT:
5821 IEM_MC_BEGIN(3, 2);
5822 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5823 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5824 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5826
5827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5829 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5830 IEM_MC_FETCH_EFLAGS(EFlags);
5831 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5832
5833 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5834 IEM_MC_COMMIT_EFLAGS(EFlags);
5835 IEM_MC_ADVANCE_RIP();
5836 IEM_MC_END();
5837 return VINF_SUCCESS;
5838
5839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5840 }
5841 }
5842}
5843
5844
5845/** Opcode 0xd2. */
5846FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
5847{
5848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5849 PCIEMOPSHIFTSIZES pImpl;
5850 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5851 {
5852 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
5853 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
5854 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
5855 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
5856 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
5857 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
5858 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
5859 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5860 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
5861 }
5862 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5863
5864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5865 {
5866 /* register */
5867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5868 IEM_MC_BEGIN(3, 0);
5869 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5870 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5872 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5873 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5874 IEM_MC_REF_EFLAGS(pEFlags);
5875 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5876 IEM_MC_ADVANCE_RIP();
5877 IEM_MC_END();
5878 }
5879 else
5880 {
5881 /* memory */
5882 IEM_MC_BEGIN(3, 2);
5883 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5884 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5885 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5887
5888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5890 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5891 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5892 IEM_MC_FETCH_EFLAGS(EFlags);
5893 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5894
5895 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5896 IEM_MC_COMMIT_EFLAGS(EFlags);
5897 IEM_MC_ADVANCE_RIP();
5898 IEM_MC_END();
5899 }
5900 return VINF_SUCCESS;
5901}
5902
5903
5904/** Opcode 0xd3. */
5905FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
5906{
5907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5908 PCIEMOPSHIFTSIZES pImpl;
5909 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5910 {
5911 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
5912 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
5913 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
5914 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
5915 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
5916 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
5917 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
5918 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5919 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5920 }
5921 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5922
5923 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5924 {
5925 /* register */
5926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5927 switch (pVCpu->iem.s.enmEffOpSize)
5928 {
5929 case IEMMODE_16BIT:
5930 IEM_MC_BEGIN(3, 0);
5931 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5932 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5933 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5934 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5935 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5936 IEM_MC_REF_EFLAGS(pEFlags);
5937 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5938 IEM_MC_ADVANCE_RIP();
5939 IEM_MC_END();
5940 return VINF_SUCCESS;
5941
5942 case IEMMODE_32BIT:
5943 IEM_MC_BEGIN(3, 0);
5944 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5945 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5946 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5947 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5948 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5949 IEM_MC_REF_EFLAGS(pEFlags);
5950 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5951 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5952 IEM_MC_ADVANCE_RIP();
5953 IEM_MC_END();
5954 return VINF_SUCCESS;
5955
5956 case IEMMODE_64BIT:
5957 IEM_MC_BEGIN(3, 0);
5958 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5959 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5960 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5961 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5962 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5963 IEM_MC_REF_EFLAGS(pEFlags);
5964 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5965 IEM_MC_ADVANCE_RIP();
5966 IEM_MC_END();
5967 return VINF_SUCCESS;
5968
5969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5970 }
5971 }
5972 else
5973 {
5974 /* memory */
5975 switch (pVCpu->iem.s.enmEffOpSize)
5976 {
5977 case IEMMODE_16BIT:
5978 IEM_MC_BEGIN(3, 2);
5979 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5980 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5981 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5983
5984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5986 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5987 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5988 IEM_MC_FETCH_EFLAGS(EFlags);
5989 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5990
5991 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5992 IEM_MC_COMMIT_EFLAGS(EFlags);
5993 IEM_MC_ADVANCE_RIP();
5994 IEM_MC_END();
5995 return VINF_SUCCESS;
5996
5997 case IEMMODE_32BIT:
5998 IEM_MC_BEGIN(3, 2);
5999 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6000 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6001 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6003
6004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6006 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6007 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6008 IEM_MC_FETCH_EFLAGS(EFlags);
6009 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6010
6011 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6012 IEM_MC_COMMIT_EFLAGS(EFlags);
6013 IEM_MC_ADVANCE_RIP();
6014 IEM_MC_END();
6015 return VINF_SUCCESS;
6016
6017 case IEMMODE_64BIT:
6018 IEM_MC_BEGIN(3, 2);
6019 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6020 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6021 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6023
6024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6026 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6027 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6028 IEM_MC_FETCH_EFLAGS(EFlags);
6029 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6030
6031 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6032 IEM_MC_COMMIT_EFLAGS(EFlags);
6033 IEM_MC_ADVANCE_RIP();
6034 IEM_MC_END();
6035 return VINF_SUCCESS;
6036
6037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6038 }
6039 }
6040}
6041
6042/** Opcode 0xd4. */
6043FNIEMOP_DEF(iemOp_aam_Ib)
6044{
6045 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6046 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6048 IEMOP_HLP_NO_64BIT();
6049 if (!bImm)
6050 return IEMOP_RAISE_DIVIDE_ERROR();
6051 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6052}
6053
6054
6055/** Opcode 0xd5. */
6056FNIEMOP_DEF(iemOp_aad_Ib)
6057{
6058 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6059 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6061 IEMOP_HLP_NO_64BIT();
6062 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6063}
6064
6065
6066/** Opcode 0xd6. */
6067FNIEMOP_DEF(iemOp_salc)
6068{
6069 IEMOP_MNEMONIC(salc, "salc");
6070 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6071 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6073 IEMOP_HLP_NO_64BIT();
6074
6075 IEM_MC_BEGIN(0, 0);
6076 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6077 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6078 } IEM_MC_ELSE() {
6079 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6080 } IEM_MC_ENDIF();
6081 IEM_MC_ADVANCE_RIP();
6082 IEM_MC_END();
6083 return VINF_SUCCESS;
6084}
6085
6086
6087/** Opcode 0xd7. */
6088FNIEMOP_DEF(iemOp_xlat)
6089{
6090 IEMOP_MNEMONIC(xlat, "xlat");
6091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6092 switch (pVCpu->iem.s.enmEffAddrMode)
6093 {
6094 case IEMMODE_16BIT:
6095 IEM_MC_BEGIN(2, 0);
6096 IEM_MC_LOCAL(uint8_t, u8Tmp);
6097 IEM_MC_LOCAL(uint16_t, u16Addr);
6098 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6099 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6100 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6101 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6102 IEM_MC_ADVANCE_RIP();
6103 IEM_MC_END();
6104 return VINF_SUCCESS;
6105
6106 case IEMMODE_32BIT:
6107 IEM_MC_BEGIN(2, 0);
6108 IEM_MC_LOCAL(uint8_t, u8Tmp);
6109 IEM_MC_LOCAL(uint32_t, u32Addr);
6110 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6111 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6112 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6113 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 return VINF_SUCCESS;
6117
6118 case IEMMODE_64BIT:
6119 IEM_MC_BEGIN(2, 0);
6120 IEM_MC_LOCAL(uint8_t, u8Tmp);
6121 IEM_MC_LOCAL(uint64_t, u64Addr);
6122 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
6123 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
6124 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
6125 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6126 IEM_MC_ADVANCE_RIP();
6127 IEM_MC_END();
6128 return VINF_SUCCESS;
6129
6130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6131 }
6132}
6133
6134
6135/**
6136 * Common worker for FPU instructions working on ST0 and STn, and storing the
6137 * result in ST0.
6138 *
6139 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6140 */
6141FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6142{
6143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6144
6145 IEM_MC_BEGIN(3, 1);
6146 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6147 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6148 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6149 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6150
6151 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6152 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6153 IEM_MC_PREPARE_FPU_USAGE();
6154 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6155 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6156 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6157 IEM_MC_ELSE()
6158 IEM_MC_FPU_STACK_UNDERFLOW(0);
6159 IEM_MC_ENDIF();
6160 IEM_MC_ADVANCE_RIP();
6161
6162 IEM_MC_END();
6163 return VINF_SUCCESS;
6164}
6165
6166
6167/**
6168 * Common worker for FPU instructions working on ST0 and STn, and only affecting
6169 * flags.
6170 *
6171 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6172 */
6173FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6174{
6175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6176
6177 IEM_MC_BEGIN(3, 1);
6178 IEM_MC_LOCAL(uint16_t, u16Fsw);
6179 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6180 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6181 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6182
6183 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6184 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6185 IEM_MC_PREPARE_FPU_USAGE();
6186 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6187 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6188 IEM_MC_UPDATE_FSW(u16Fsw);
6189 IEM_MC_ELSE()
6190 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6191 IEM_MC_ENDIF();
6192 IEM_MC_ADVANCE_RIP();
6193
6194 IEM_MC_END();
6195 return VINF_SUCCESS;
6196}
6197
6198
6199/**
6200 * Common worker for FPU instructions working on ST0 and STn, only affecting
6201 * flags, and popping when done.
6202 *
6203 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6204 */
6205FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6206{
6207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6208
6209 IEM_MC_BEGIN(3, 1);
6210 IEM_MC_LOCAL(uint16_t, u16Fsw);
6211 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6212 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6213 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6214
6215 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6216 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6217 IEM_MC_PREPARE_FPU_USAGE();
6218 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6219 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6220 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6221 IEM_MC_ELSE()
6222 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
6223 IEM_MC_ENDIF();
6224 IEM_MC_ADVANCE_RIP();
6225
6226 IEM_MC_END();
6227 return VINF_SUCCESS;
6228}
6229
6230
6231/** Opcode 0xd8 11/0. */
6232FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
6233{
6234 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
6235 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
6236}
6237
6238
6239/** Opcode 0xd8 11/1. */
6240FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
6241{
6242 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
6243 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
6244}
6245
6246
6247/** Opcode 0xd8 11/2. */
6248FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
6249{
6250 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
6251 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
6252}
6253
6254
6255/** Opcode 0xd8 11/3. */
6256FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
6257{
6258 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
6259 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
6260}
6261
6262
6263/** Opcode 0xd8 11/4. */
6264FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
6265{
6266 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
6267 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
6268}
6269
6270
6271/** Opcode 0xd8 11/5. */
6272FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
6273{
6274 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
6275 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
6276}
6277
6278
6279/** Opcode 0xd8 11/6. */
6280FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
6281{
6282 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
6283 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
6284}
6285
6286
6287/** Opcode 0xd8 11/7. */
6288FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
6289{
6290 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
6291 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
6292}
6293
6294
6295/**
6296 * Common worker for FPU instructions working on ST0 and an m32r, and storing
6297 * the result in ST0.
6298 *
6299 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6300 */
6301FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
6302{
6303 IEM_MC_BEGIN(3, 3);
6304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6305 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6306 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6307 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6308 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6309 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6310
6311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6313
6314 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6315 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6316 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6317
6318 IEM_MC_PREPARE_FPU_USAGE();
6319 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6320 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
6321 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6322 IEM_MC_ELSE()
6323 IEM_MC_FPU_STACK_UNDERFLOW(0);
6324 IEM_MC_ENDIF();
6325 IEM_MC_ADVANCE_RIP();
6326
6327 IEM_MC_END();
6328 return VINF_SUCCESS;
6329}
6330
6331
6332/** Opcode 0xd8 !11/0. */
6333FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
6334{
6335 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
6336 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
6337}
6338
6339
6340/** Opcode 0xd8 !11/1. */
6341FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
6342{
6343 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
6344 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
6345}
6346
6347
6348/** Opcode 0xd8 !11/2. */
6349FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
6350{
6351 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
6352
6353 IEM_MC_BEGIN(3, 3);
6354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6355 IEM_MC_LOCAL(uint16_t, u16Fsw);
6356 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6357 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6358 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6359 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6360
6361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6363
6364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6365 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6366 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6367
6368 IEM_MC_PREPARE_FPU_USAGE();
6369 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6370 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6371 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6372 IEM_MC_ELSE()
6373 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6374 IEM_MC_ENDIF();
6375 IEM_MC_ADVANCE_RIP();
6376
6377 IEM_MC_END();
6378 return VINF_SUCCESS;
6379}
6380
6381
6382/** Opcode 0xd8 !11/3. */
6383FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
6384{
6385 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
6386
6387 IEM_MC_BEGIN(3, 3);
6388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6389 IEM_MC_LOCAL(uint16_t, u16Fsw);
6390 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6391 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6392 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6393 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6394
6395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6397
6398 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6399 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6400 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6401
6402 IEM_MC_PREPARE_FPU_USAGE();
6403 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6404 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6405 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6406 IEM_MC_ELSE()
6407 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6408 IEM_MC_ENDIF();
6409 IEM_MC_ADVANCE_RIP();
6410
6411 IEM_MC_END();
6412 return VINF_SUCCESS;
6413}
6414
6415
6416/** Opcode 0xd8 !11/4. */
6417FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
6418{
6419 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
6420 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
6421}
6422
6423
6424/** Opcode 0xd8 !11/5. */
6425FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
6426{
6427 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
6428 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
6429}
6430
6431
6432/** Opcode 0xd8 !11/6. */
6433FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
6434{
6435 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
6436 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
6437}
6438
6439
6440/** Opcode 0xd8 !11/7. */
6441FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
6442{
6443 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
6444 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
6445}
6446
6447
6448/** Opcode 0xd8. */
6449FNIEMOP_DEF(iemOp_EscF0)
6450{
6451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6452 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
6453
6454 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6455 {
6456 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6457 {
6458 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
6459 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
6460 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
6461 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
6462 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
6463 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
6464 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
6465 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
6466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6467 }
6468 }
6469 else
6470 {
6471 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6472 {
6473 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
6474 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
6475 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
6476 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
6477 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
6478 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
6479 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
6480 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
6481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6482 }
6483 }
6484}
6485
6486
6487/** Opcode 0xd9 /0 mem32real
6488 * @sa iemOp_fld_m64r */
6489FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
6490{
6491 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
6492
6493 IEM_MC_BEGIN(2, 3);
6494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6495 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6496 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
6497 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6498 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
6499
6500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6502
6503 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6504 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6505 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6506
6507 IEM_MC_PREPARE_FPU_USAGE();
6508 IEM_MC_IF_FPUREG_IS_EMPTY(7)
6509 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
6510 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6511 IEM_MC_ELSE()
6512 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6513 IEM_MC_ENDIF();
6514 IEM_MC_ADVANCE_RIP();
6515
6516 IEM_MC_END();
6517 return VINF_SUCCESS;
6518}
6519
6520
6521/** Opcode 0xd9 !11/2 mem32real */
6522FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
6523{
6524 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
6525 IEM_MC_BEGIN(3, 2);
6526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6527 IEM_MC_LOCAL(uint16_t, u16Fsw);
6528 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6529 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
6530 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
6531
6532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6535 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6536
6537 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
6538 IEM_MC_PREPARE_FPU_USAGE();
6539 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6540 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
6541 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
6542 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6543 IEM_MC_ELSE()
6544 IEM_MC_IF_FCW_IM()
6545 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
6546 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
6547 IEM_MC_ENDIF();
6548 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6549 IEM_MC_ENDIF();
6550 IEM_MC_ADVANCE_RIP();
6551
6552 IEM_MC_END();
6553 return VINF_SUCCESS;
6554}
6555
6556
6557/** Opcode 0xd9 !11/3 */
6558FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
6559{
6560 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
6561 IEM_MC_BEGIN(3, 2);
6562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6563 IEM_MC_LOCAL(uint16_t, u16Fsw);
6564 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6565 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
6566 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
6567
6568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6570 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6571 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6572
6573 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
6574 IEM_MC_PREPARE_FPU_USAGE();
6575 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6576 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
6577 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
6578 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6579 IEM_MC_ELSE()
6580 IEM_MC_IF_FCW_IM()
6581 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
6582 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
6583 IEM_MC_ENDIF();
6584 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6585 IEM_MC_ENDIF();
6586 IEM_MC_ADVANCE_RIP();
6587
6588 IEM_MC_END();
6589 return VINF_SUCCESS;
6590}
6591
6592
6593/** Opcode 0xd9 !11/4 */
6594FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
6595{
6596 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
6597 IEM_MC_BEGIN(3, 0);
6598 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
6599 IEM_MC_ARG(uint8_t, iEffSeg, 1);
6600 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
6601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6603 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6604 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6605 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6606 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
6607 IEM_MC_END();
6608 return VINF_SUCCESS;
6609}
6610
6611
6612/** Opcode 0xd9 !11/5 */
6613FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
6614{
6615 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
6616 IEM_MC_BEGIN(1, 1);
6617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6618 IEM_MC_ARG(uint16_t, u16Fsw, 0);
6619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6621 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6622 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6623 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6624 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
6625 IEM_MC_END();
6626 return VINF_SUCCESS;
6627}
6628
6629
6630/** Opcode 0xd9 !11/6 */
6631FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
6632{
6633 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
6634 IEM_MC_BEGIN(3, 0);
6635 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
6636 IEM_MC_ARG(uint8_t, iEffSeg, 1);
6637 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
6638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6641 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6642 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6643 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
6644 IEM_MC_END();
6645 return VINF_SUCCESS;
6646}
6647
6648
6649/** Opcode 0xd9 !11/7 */
6650FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
6651{
6652 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
6653 IEM_MC_BEGIN(2, 0);
6654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6655 IEM_MC_LOCAL(uint16_t, u16Fcw);
6656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6658 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6659 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6660 IEM_MC_FETCH_FCW(u16Fcw);
6661 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
6662 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
6663 IEM_MC_END();
6664 return VINF_SUCCESS;
6665}
6666
6667
6668/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
6669FNIEMOP_DEF(iemOp_fnop)
6670{
6671 IEMOP_MNEMONIC(fnop, "fnop");
6672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6673
6674 IEM_MC_BEGIN(0, 0);
6675 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6677 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6678 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
6679 * intel optimizations. Investigate. */
6680 IEM_MC_UPDATE_FPU_OPCODE_IP();
6681 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
6682 IEM_MC_END();
6683 return VINF_SUCCESS;
6684}
6685
6686
6687/** Opcode 0xd9 11/0 stN */
6688FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
6689{
6690 IEMOP_MNEMONIC(fld_stN, "fld stN");
6691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6692
6693 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
6694 * indicates that it does. */
6695 IEM_MC_BEGIN(0, 2);
6696 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
6697 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6698 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6699 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6700
6701 IEM_MC_PREPARE_FPU_USAGE();
6702 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
6703 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
6704 IEM_MC_PUSH_FPU_RESULT(FpuRes);
6705 IEM_MC_ELSE()
6706 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
6707 IEM_MC_ENDIF();
6708
6709 IEM_MC_ADVANCE_RIP();
6710 IEM_MC_END();
6711
6712 return VINF_SUCCESS;
6713}
6714
6715
6716/** Opcode 0xd9 11/3 stN */
6717FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
6718{
6719 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
6720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6721
6722 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
6723 * indicates that it does. */
6724 IEM_MC_BEGIN(1, 3);
6725 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
6726 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
6727 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6728 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
6729 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6730 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6731
6732 IEM_MC_PREPARE_FPU_USAGE();
6733 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6734 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
6735 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
6736 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6737 IEM_MC_ELSE()
6738 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
6739 IEM_MC_ENDIF();
6740
6741 IEM_MC_ADVANCE_RIP();
6742 IEM_MC_END();
6743
6744 return VINF_SUCCESS;
6745}
6746
6747
6748/** Opcode 0xd9 11/4, 0xdd 11/2. */
6749FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
6750{
6751 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
6752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6753
6754 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
6755 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
6756 if (!iDstReg)
6757 {
6758 IEM_MC_BEGIN(0, 1);
6759 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
6760 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6761 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6762
6763 IEM_MC_PREPARE_FPU_USAGE();
6764 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
6765 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6766 IEM_MC_ELSE()
6767 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
6768 IEM_MC_ENDIF();
6769
6770 IEM_MC_ADVANCE_RIP();
6771 IEM_MC_END();
6772 }
6773 else
6774 {
6775 IEM_MC_BEGIN(0, 2);
6776 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
6777 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6778 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6779 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6780
6781 IEM_MC_PREPARE_FPU_USAGE();
6782 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6783 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
6784 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
6785 IEM_MC_ELSE()
6786 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
6787 IEM_MC_ENDIF();
6788
6789 IEM_MC_ADVANCE_RIP();
6790 IEM_MC_END();
6791 }
6792 return VINF_SUCCESS;
6793}
6794
6795
6796/**
6797 * Common worker for FPU instructions working on ST0 and replaces it with the
6798 * result, i.e. unary operators.
6799 *
6800 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6801 */
6802FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
6803{
6804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6805
6806 IEM_MC_BEGIN(2, 1);
6807 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6808 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6809 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
6810
6811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6812 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6813 IEM_MC_PREPARE_FPU_USAGE();
6814 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6815 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
6816 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6817 IEM_MC_ELSE()
6818 IEM_MC_FPU_STACK_UNDERFLOW(0);
6819 IEM_MC_ENDIF();
6820 IEM_MC_ADVANCE_RIP();
6821
6822 IEM_MC_END();
6823 return VINF_SUCCESS;
6824}
6825
6826
6827/** Opcode 0xd9 0xe0. */
6828FNIEMOP_DEF(iemOp_fchs)
6829{
6830 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
6831 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
6832}
6833
6834
6835/** Opcode 0xd9 0xe1. */
6836FNIEMOP_DEF(iemOp_fabs)
6837{
6838 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
6839 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
6840}
6841
6842
6843/**
6844 * Common worker for FPU instructions working on ST0 and only returns FSW.
6845 *
6846 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6847 */
6848FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
6849{
6850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6851
6852 IEM_MC_BEGIN(2, 1);
6853 IEM_MC_LOCAL(uint16_t, u16Fsw);
6854 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6855 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
6856
6857 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6858 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6859 IEM_MC_PREPARE_FPU_USAGE();
6860 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6861 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
6862 IEM_MC_UPDATE_FSW(u16Fsw);
6863 IEM_MC_ELSE()
6864 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6865 IEM_MC_ENDIF();
6866 IEM_MC_ADVANCE_RIP();
6867
6868 IEM_MC_END();
6869 return VINF_SUCCESS;
6870}
6871
6872
6873/** Opcode 0xd9 0xe4. */
6874FNIEMOP_DEF(iemOp_ftst)
6875{
6876 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
6877 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
6878}
6879
6880
6881/** Opcode 0xd9 0xe5. */
6882FNIEMOP_DEF(iemOp_fxam)
6883{
6884 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
6885 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
6886}
6887
6888
6889/**
6890 * Common worker for FPU instructions pushing a constant onto the FPU stack.
6891 *
6892 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6893 */
6894FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
6895{
6896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6897
6898 IEM_MC_BEGIN(1, 1);
6899 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6900 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6901
6902 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6903 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6904 IEM_MC_PREPARE_FPU_USAGE();
6905 IEM_MC_IF_FPUREG_IS_EMPTY(7)
6906 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
6907 IEM_MC_PUSH_FPU_RESULT(FpuRes);
6908 IEM_MC_ELSE()
6909 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
6910 IEM_MC_ENDIF();
6911 IEM_MC_ADVANCE_RIP();
6912
6913 IEM_MC_END();
6914 return VINF_SUCCESS;
6915}
6916
6917
6918/** Opcode 0xd9 0xe8. */
6919FNIEMOP_DEF(iemOp_fld1)
6920{
6921 IEMOP_MNEMONIC(fld1, "fld1");
6922 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
6923}
6924
6925
6926/** Opcode 0xd9 0xe9. */
6927FNIEMOP_DEF(iemOp_fldl2t)
6928{
6929 IEMOP_MNEMONIC(fldl2t, "fldl2t");
6930 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
6931}
6932
6933
6934/** Opcode 0xd9 0xea. */
6935FNIEMOP_DEF(iemOp_fldl2e)
6936{
6937 IEMOP_MNEMONIC(fldl2e, "fldl2e");
6938 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
6939}
6940
6941/** Opcode 0xd9 0xeb. */
6942FNIEMOP_DEF(iemOp_fldpi)
6943{
6944 IEMOP_MNEMONIC(fldpi, "fldpi");
6945 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
6946}
6947
6948
6949/** Opcode 0xd9 0xec. */
6950FNIEMOP_DEF(iemOp_fldlg2)
6951{
6952 IEMOP_MNEMONIC(fldlg2, "fldlg2");
6953 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
6954}
6955
6956/** Opcode 0xd9 0xed. */
6957FNIEMOP_DEF(iemOp_fldln2)
6958{
6959 IEMOP_MNEMONIC(fldln2, "fldln2");
6960 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
6961}
6962
6963
6964/** Opcode 0xd9 0xee. */
6965FNIEMOP_DEF(iemOp_fldz)
6966{
6967 IEMOP_MNEMONIC(fldz, "fldz");
6968 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
6969}
6970
6971
6972/** Opcode 0xd9 0xf0. */
6973FNIEMOP_DEF(iemOp_f2xm1)
6974{
6975 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
6976 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
6977}
6978
6979
6980/**
6981 * Common worker for FPU instructions working on STn and ST0, storing the result
6982 * in STn, and popping the stack unless IE, DE or ZE was raised.
6983 *
6984 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6985 */
6986FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6987{
6988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6989
6990 IEM_MC_BEGIN(3, 1);
6991 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6992 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6993 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6994 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6995
6996 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6997 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6998
6999 IEM_MC_PREPARE_FPU_USAGE();
7000 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7001 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7002 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7003 IEM_MC_ELSE()
7004 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7005 IEM_MC_ENDIF();
7006 IEM_MC_ADVANCE_RIP();
7007
7008 IEM_MC_END();
7009 return VINF_SUCCESS;
7010}
7011
7012
7013/** Opcode 0xd9 0xf1. */
7014FNIEMOP_DEF(iemOp_fyl2x)
7015{
7016 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7017 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7018}
7019
7020
7021/**
7022 * Common worker for FPU instructions working on ST0 and having two outputs, one
7023 * replacing ST0 and one pushed onto the stack.
7024 *
7025 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7026 */
7027FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7028{
7029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7030
7031 IEM_MC_BEGIN(2, 1);
7032 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7033 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7034 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7035
7036 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7037 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7038 IEM_MC_PREPARE_FPU_USAGE();
7039 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7040 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7041 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7042 IEM_MC_ELSE()
7043 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7044 IEM_MC_ENDIF();
7045 IEM_MC_ADVANCE_RIP();
7046
7047 IEM_MC_END();
7048 return VINF_SUCCESS;
7049}
7050
7051
7052/** Opcode 0xd9 0xf2. */
7053FNIEMOP_DEF(iemOp_fptan)
7054{
7055 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7056 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7057}
7058
7059
7060/** Opcode 0xd9 0xf3. */
7061FNIEMOP_DEF(iemOp_fpatan)
7062{
7063 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7064 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7065}
7066
7067
7068/** Opcode 0xd9 0xf4. */
7069FNIEMOP_DEF(iemOp_fxtract)
7070{
7071 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7072 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7073}
7074
7075
7076/** Opcode 0xd9 0xf5. */
7077FNIEMOP_DEF(iemOp_fprem1)
7078{
7079 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7080 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7081}
7082
7083
7084/** Opcode 0xd9 0xf6. */
7085FNIEMOP_DEF(iemOp_fdecstp)
7086{
7087 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7089 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7090 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7091 * FINCSTP and FDECSTP. */
7092
7093 IEM_MC_BEGIN(0,0);
7094
7095 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7096 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7097
7098 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7099 IEM_MC_FPU_STACK_DEC_TOP();
7100 IEM_MC_UPDATE_FSW_CONST(0);
7101
7102 IEM_MC_ADVANCE_RIP();
7103 IEM_MC_END();
7104 return VINF_SUCCESS;
7105}
7106
7107
7108/** Opcode 0xd9 0xf7. */
7109FNIEMOP_DEF(iemOp_fincstp)
7110{
7111 IEMOP_MNEMONIC(fincstp, "fincstp");
7112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7113 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7114 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7115 * FINCSTP and FDECSTP. */
7116
7117 IEM_MC_BEGIN(0,0);
7118
7119 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7120 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7121
7122 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7123 IEM_MC_FPU_STACK_INC_TOP();
7124 IEM_MC_UPDATE_FSW_CONST(0);
7125
7126 IEM_MC_ADVANCE_RIP();
7127 IEM_MC_END();
7128 return VINF_SUCCESS;
7129}
7130
7131
7132/** Opcode 0xd9 0xf8. */
7133FNIEMOP_DEF(iemOp_fprem)
7134{
7135 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
7136 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
7137}
7138
7139
7140/** Opcode 0xd9 0xf9. */
7141FNIEMOP_DEF(iemOp_fyl2xp1)
7142{
7143 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
7144 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
7145}
7146
7147
7148/** Opcode 0xd9 0xfa. */
7149FNIEMOP_DEF(iemOp_fsqrt)
7150{
7151 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
7152 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
7153}
7154
7155
7156/** Opcode 0xd9 0xfb. */
7157FNIEMOP_DEF(iemOp_fsincos)
7158{
7159 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
7160 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
7161}
7162
7163
7164/** Opcode 0xd9 0xfc. */
7165FNIEMOP_DEF(iemOp_frndint)
7166{
7167 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
7168 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
7169}
7170
7171
7172/** Opcode 0xd9 0xfd. */
7173FNIEMOP_DEF(iemOp_fscale)
7174{
7175 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
7176 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
7177}
7178
7179
7180/** Opcode 0xd9 0xfe. */
7181FNIEMOP_DEF(iemOp_fsin)
7182{
7183 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
7184 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
7185}
7186
7187
7188/** Opcode 0xd9 0xff. */
7189FNIEMOP_DEF(iemOp_fcos)
7190{
7191 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
7192 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
7193}
7194
7195
7196/** Used by iemOp_EscF1. */
7197IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
7198{
7199 /* 0xe0 */ iemOp_fchs,
7200 /* 0xe1 */ iemOp_fabs,
7201 /* 0xe2 */ iemOp_Invalid,
7202 /* 0xe3 */ iemOp_Invalid,
7203 /* 0xe4 */ iemOp_ftst,
7204 /* 0xe5 */ iemOp_fxam,
7205 /* 0xe6 */ iemOp_Invalid,
7206 /* 0xe7 */ iemOp_Invalid,
7207 /* 0xe8 */ iemOp_fld1,
7208 /* 0xe9 */ iemOp_fldl2t,
7209 /* 0xea */ iemOp_fldl2e,
7210 /* 0xeb */ iemOp_fldpi,
7211 /* 0xec */ iemOp_fldlg2,
7212 /* 0xed */ iemOp_fldln2,
7213 /* 0xee */ iemOp_fldz,
7214 /* 0xef */ iemOp_Invalid,
7215 /* 0xf0 */ iemOp_f2xm1,
7216 /* 0xf1 */ iemOp_fyl2x,
7217 /* 0xf2 */ iemOp_fptan,
7218 /* 0xf3 */ iemOp_fpatan,
7219 /* 0xf4 */ iemOp_fxtract,
7220 /* 0xf5 */ iemOp_fprem1,
7221 /* 0xf6 */ iemOp_fdecstp,
7222 /* 0xf7 */ iemOp_fincstp,
7223 /* 0xf8 */ iemOp_fprem,
7224 /* 0xf9 */ iemOp_fyl2xp1,
7225 /* 0xfa */ iemOp_fsqrt,
7226 /* 0xfb */ iemOp_fsincos,
7227 /* 0xfc */ iemOp_frndint,
7228 /* 0xfd */ iemOp_fscale,
7229 /* 0xfe */ iemOp_fsin,
7230 /* 0xff */ iemOp_fcos
7231};
7232
7233
7234/** Opcode 0xd9. */
7235FNIEMOP_DEF(iemOp_EscF1)
7236{
7237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7238 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
7239
7240 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7241 {
7242 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7243 {
7244 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
7245 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
7246 case 2:
7247 if (bRm == 0xd0)
7248 return FNIEMOP_CALL(iemOp_fnop);
7249 return IEMOP_RAISE_INVALID_OPCODE();
7250 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
7251 case 4:
7252 case 5:
7253 case 6:
7254 case 7:
7255 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
7256 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
7257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7258 }
7259 }
7260 else
7261 {
7262 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7263 {
7264 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
7265 case 1: return IEMOP_RAISE_INVALID_OPCODE();
7266 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
7267 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
7268 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
7269 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
7270 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
7271 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
7272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7273 }
7274 }
7275}
7276
7277
7278/** Opcode 0xda 11/0. */
7279FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
7280{
7281 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
7282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7283
7284 IEM_MC_BEGIN(0, 1);
7285 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7286
7287 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7288 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7289
7290 IEM_MC_PREPARE_FPU_USAGE();
7291 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7292 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
7293 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7294 IEM_MC_ENDIF();
7295 IEM_MC_UPDATE_FPU_OPCODE_IP();
7296 IEM_MC_ELSE()
7297 IEM_MC_FPU_STACK_UNDERFLOW(0);
7298 IEM_MC_ENDIF();
7299 IEM_MC_ADVANCE_RIP();
7300
7301 IEM_MC_END();
7302 return VINF_SUCCESS;
7303}
7304
7305
7306/** Opcode 0xda 11/1. */
7307FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
7308{
7309 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
7310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7311
7312 IEM_MC_BEGIN(0, 1);
7313 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7314
7315 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7316 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7317
7318 IEM_MC_PREPARE_FPU_USAGE();
7319 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7320 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
7321 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7322 IEM_MC_ENDIF();
7323 IEM_MC_UPDATE_FPU_OPCODE_IP();
7324 IEM_MC_ELSE()
7325 IEM_MC_FPU_STACK_UNDERFLOW(0);
7326 IEM_MC_ENDIF();
7327 IEM_MC_ADVANCE_RIP();
7328
7329 IEM_MC_END();
7330 return VINF_SUCCESS;
7331}
7332
7333
7334/** Opcode 0xda 11/2. */
7335FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
7336{
7337 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
7338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7339
7340 IEM_MC_BEGIN(0, 1);
7341 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7342
7343 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7344 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7345
7346 IEM_MC_PREPARE_FPU_USAGE();
7347 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7348 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7349 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7350 IEM_MC_ENDIF();
7351 IEM_MC_UPDATE_FPU_OPCODE_IP();
7352 IEM_MC_ELSE()
7353 IEM_MC_FPU_STACK_UNDERFLOW(0);
7354 IEM_MC_ENDIF();
7355 IEM_MC_ADVANCE_RIP();
7356
7357 IEM_MC_END();
7358 return VINF_SUCCESS;
7359}
7360
7361
7362/** Opcode 0xda 11/3. */
7363FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
7364{
7365 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
7366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7367
7368 IEM_MC_BEGIN(0, 1);
7369 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7370
7371 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7372 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7373
7374 IEM_MC_PREPARE_FPU_USAGE();
7375 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7376 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
7377 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7378 IEM_MC_ENDIF();
7379 IEM_MC_UPDATE_FPU_OPCODE_IP();
7380 IEM_MC_ELSE()
7381 IEM_MC_FPU_STACK_UNDERFLOW(0);
7382 IEM_MC_ENDIF();
7383 IEM_MC_ADVANCE_RIP();
7384
7385 IEM_MC_END();
7386 return VINF_SUCCESS;
7387}
7388
7389
7390/**
7391 * Common worker for FPU instructions working on ST0 and STn, only affecting
7392 * flags, and popping twice when done.
7393 *
7394 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7395 */
7396FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7397{
7398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7399
7400 IEM_MC_BEGIN(3, 1);
7401 IEM_MC_LOCAL(uint16_t, u16Fsw);
7402 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7403 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7404 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7405
7406 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7407 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7408
7409 IEM_MC_PREPARE_FPU_USAGE();
7410 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
7411 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7412 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
7413 IEM_MC_ELSE()
7414 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
7415 IEM_MC_ENDIF();
7416 IEM_MC_ADVANCE_RIP();
7417
7418 IEM_MC_END();
7419 return VINF_SUCCESS;
7420}
7421
7422
7423/** Opcode 0xda 0xe9. */
7424FNIEMOP_DEF(iemOp_fucompp)
7425{
7426 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
7427 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
7428}
7429
7430
7431/**
7432 * Common worker for FPU instructions working on ST0 and an m32i, and storing
7433 * the result in ST0.
7434 *
7435 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7436 */
7437FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
7438{
7439 IEM_MC_BEGIN(3, 3);
7440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7441 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7442 IEM_MC_LOCAL(int32_t, i32Val2);
7443 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7444 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7445 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7446
7447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7449
7450 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7451 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7452 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7453
7454 IEM_MC_PREPARE_FPU_USAGE();
7455 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7456 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
7457 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7458 IEM_MC_ELSE()
7459 IEM_MC_FPU_STACK_UNDERFLOW(0);
7460 IEM_MC_ENDIF();
7461 IEM_MC_ADVANCE_RIP();
7462
7463 IEM_MC_END();
7464 return VINF_SUCCESS;
7465}
7466
7467
7468/** Opcode 0xda !11/0. */
7469FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
7470{
7471 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
7472 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
7473}
7474
7475
7476/** Opcode 0xda !11/1. */
7477FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
7478{
7479 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
7480 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
7481}
7482
7483
7484/** Opcode 0xda !11/2. */
7485FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
7486{
7487 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
7488
7489 IEM_MC_BEGIN(3, 3);
7490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7491 IEM_MC_LOCAL(uint16_t, u16Fsw);
7492 IEM_MC_LOCAL(int32_t, i32Val2);
7493 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7494 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7495 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7496
7497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7499
7500 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7501 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7502 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7503
7504 IEM_MC_PREPARE_FPU_USAGE();
7505 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7506 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
7507 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7508 IEM_MC_ELSE()
7509 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7510 IEM_MC_ENDIF();
7511 IEM_MC_ADVANCE_RIP();
7512
7513 IEM_MC_END();
7514 return VINF_SUCCESS;
7515}
7516
7517
7518/** Opcode 0xda !11/3. */
7519FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
7520{
7521 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
7522
7523 IEM_MC_BEGIN(3, 3);
7524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7525 IEM_MC_LOCAL(uint16_t, u16Fsw);
7526 IEM_MC_LOCAL(int32_t, i32Val2);
7527 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7528 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7529 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7530
7531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7533
7534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7535 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7536 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7537
7538 IEM_MC_PREPARE_FPU_USAGE();
7539 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7540 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
7541 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7542 IEM_MC_ELSE()
7543 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7544 IEM_MC_ENDIF();
7545 IEM_MC_ADVANCE_RIP();
7546
7547 IEM_MC_END();
7548 return VINF_SUCCESS;
7549}
7550
7551
7552/** Opcode 0xda !11/4. */
7553FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
7554{
7555 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
7556 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
7557}
7558
7559
7560/** Opcode 0xda !11/5. */
7561FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
7562{
7563 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
7564 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
7565}
7566
7567
7568/** Opcode 0xda !11/6. */
7569FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
7570{
7571 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
7572 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
7573}
7574
7575
7576/** Opcode 0xda !11/7. */
7577FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
7578{
7579 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
7580 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
7581}
7582
7583
7584/** Opcode 0xda. */
7585FNIEMOP_DEF(iemOp_EscF2)
7586{
7587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7588 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
7589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7590 {
7591 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7592 {
7593 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
7594 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
7595 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
7596 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
7597 case 4: return IEMOP_RAISE_INVALID_OPCODE();
7598 case 5:
7599 if (bRm == 0xe9)
7600 return FNIEMOP_CALL(iemOp_fucompp);
7601 return IEMOP_RAISE_INVALID_OPCODE();
7602 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7603 case 7: return IEMOP_RAISE_INVALID_OPCODE();
7604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7605 }
7606 }
7607 else
7608 {
7609 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7610 {
7611 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
7612 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
7613 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
7614 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
7615 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
7616 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
7617 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
7618 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
7619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7620 }
7621 }
7622}
7623
7624
7625/** Opcode 0xdb !11/0. */
7626FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
7627{
7628 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
7629
7630 IEM_MC_BEGIN(2, 3);
7631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7632 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7633 IEM_MC_LOCAL(int32_t, i32Val);
7634 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7635 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
7636
7637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7639
7640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7641 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7642 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7643
7644 IEM_MC_PREPARE_FPU_USAGE();
7645 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7646 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
7647 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7648 IEM_MC_ELSE()
7649 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7650 IEM_MC_ENDIF();
7651 IEM_MC_ADVANCE_RIP();
7652
7653 IEM_MC_END();
7654 return VINF_SUCCESS;
7655}
7656
7657
7658/** Opcode 0xdb !11/1. */
7659FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
7660{
7661 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
7662 IEM_MC_BEGIN(3, 2);
7663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7664 IEM_MC_LOCAL(uint16_t, u16Fsw);
7665 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7666 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7667 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7668
7669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7671 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7672 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7673
7674 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7675 IEM_MC_PREPARE_FPU_USAGE();
7676 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7677 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7678 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7679 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7680 IEM_MC_ELSE()
7681 IEM_MC_IF_FCW_IM()
7682 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7683 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7684 IEM_MC_ENDIF();
7685 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7686 IEM_MC_ENDIF();
7687 IEM_MC_ADVANCE_RIP();
7688
7689 IEM_MC_END();
7690 return VINF_SUCCESS;
7691}
7692
7693
7694/** Opcode 0xdb !11/2. */
7695FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
7696{
7697 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
7698 IEM_MC_BEGIN(3, 2);
7699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7700 IEM_MC_LOCAL(uint16_t, u16Fsw);
7701 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7702 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7703 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7704
7705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7707 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7708 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7709
7710 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7711 IEM_MC_PREPARE_FPU_USAGE();
7712 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7713 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7714 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7715 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7716 IEM_MC_ELSE()
7717 IEM_MC_IF_FCW_IM()
7718 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7719 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7720 IEM_MC_ENDIF();
7721 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7722 IEM_MC_ENDIF();
7723 IEM_MC_ADVANCE_RIP();
7724
7725 IEM_MC_END();
7726 return VINF_SUCCESS;
7727}
7728
7729
7730/** Opcode 0xdb !11/3. */
7731FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
7732{
7733 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
7734 IEM_MC_BEGIN(3, 2);
7735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7736 IEM_MC_LOCAL(uint16_t, u16Fsw);
7737 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7738 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7739 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7740
7741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7744 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7745
7746 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7747 IEM_MC_PREPARE_FPU_USAGE();
7748 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7749 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7750 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7751 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7752 IEM_MC_ELSE()
7753 IEM_MC_IF_FCW_IM()
7754 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7755 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7756 IEM_MC_ENDIF();
7757 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7758 IEM_MC_ENDIF();
7759 IEM_MC_ADVANCE_RIP();
7760
7761 IEM_MC_END();
7762 return VINF_SUCCESS;
7763}
7764
7765
7766/** Opcode 0xdb !11/5. */
7767FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
7768{
7769 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
7770
7771 IEM_MC_BEGIN(2, 3);
7772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7773 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7774 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
7775 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7776 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
7777
7778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7780
7781 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7782 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7783 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7784
7785 IEM_MC_PREPARE_FPU_USAGE();
7786 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7787 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
7788 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7789 IEM_MC_ELSE()
7790 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7791 IEM_MC_ENDIF();
7792 IEM_MC_ADVANCE_RIP();
7793
7794 IEM_MC_END();
7795 return VINF_SUCCESS;
7796}
7797
7798
7799/** Opcode 0xdb !11/7. */
7800FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
7801{
7802 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
7803 IEM_MC_BEGIN(3, 2);
7804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7805 IEM_MC_LOCAL(uint16_t, u16Fsw);
7806 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7807 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
7808 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7809
7810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7812 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7813 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7814
7815 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7816 IEM_MC_PREPARE_FPU_USAGE();
7817 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7818 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
7819 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
7820 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7821 IEM_MC_ELSE()
7822 IEM_MC_IF_FCW_IM()
7823 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
7824 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
7825 IEM_MC_ENDIF();
7826 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7827 IEM_MC_ENDIF();
7828 IEM_MC_ADVANCE_RIP();
7829
7830 IEM_MC_END();
7831 return VINF_SUCCESS;
7832}
7833
7834
7835/** Opcode 0xdb 11/0. */
7836FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
7837{
7838 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
7839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7840
7841 IEM_MC_BEGIN(0, 1);
7842 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7843
7844 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7845 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7846
7847 IEM_MC_PREPARE_FPU_USAGE();
7848 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7849 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
7850 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7851 IEM_MC_ENDIF();
7852 IEM_MC_UPDATE_FPU_OPCODE_IP();
7853 IEM_MC_ELSE()
7854 IEM_MC_FPU_STACK_UNDERFLOW(0);
7855 IEM_MC_ENDIF();
7856 IEM_MC_ADVANCE_RIP();
7857
7858 IEM_MC_END();
7859 return VINF_SUCCESS;
7860}
7861
7862
7863/** Opcode 0xdb 11/1. */
7864FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
7865{
7866 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
7867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7868
7869 IEM_MC_BEGIN(0, 1);
7870 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7871
7872 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7873 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7874
7875 IEM_MC_PREPARE_FPU_USAGE();
7876 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7877 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7878 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7879 IEM_MC_ENDIF();
7880 IEM_MC_UPDATE_FPU_OPCODE_IP();
7881 IEM_MC_ELSE()
7882 IEM_MC_FPU_STACK_UNDERFLOW(0);
7883 IEM_MC_ENDIF();
7884 IEM_MC_ADVANCE_RIP();
7885
7886 IEM_MC_END();
7887 return VINF_SUCCESS;
7888}
7889
7890
7891/** Opcode 0xdb 11/2. */
7892FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
7893{
7894 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
7895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7896
7897 IEM_MC_BEGIN(0, 1);
7898 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7899
7900 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7901 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7902
7903 IEM_MC_PREPARE_FPU_USAGE();
7904 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7905 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7906 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7907 IEM_MC_ENDIF();
7908 IEM_MC_UPDATE_FPU_OPCODE_IP();
7909 IEM_MC_ELSE()
7910 IEM_MC_FPU_STACK_UNDERFLOW(0);
7911 IEM_MC_ENDIF();
7912 IEM_MC_ADVANCE_RIP();
7913
7914 IEM_MC_END();
7915 return VINF_SUCCESS;
7916}
7917
7918
7919/** Opcode 0xdb 11/3. */
7920FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
7921{
7922 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
7923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7924
7925 IEM_MC_BEGIN(0, 1);
7926 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7927
7928 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7929 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7930
7931 IEM_MC_PREPARE_FPU_USAGE();
7932 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7933 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
7934 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7935 IEM_MC_ENDIF();
7936 IEM_MC_UPDATE_FPU_OPCODE_IP();
7937 IEM_MC_ELSE()
7938 IEM_MC_FPU_STACK_UNDERFLOW(0);
7939 IEM_MC_ENDIF();
7940 IEM_MC_ADVANCE_RIP();
7941
7942 IEM_MC_END();
7943 return VINF_SUCCESS;
7944}
7945
7946
7947/** Opcode 0xdb 0xe0. */
7948FNIEMOP_DEF(iemOp_fneni)
7949{
7950 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
7951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7952 IEM_MC_BEGIN(0,0);
7953 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7954 IEM_MC_ADVANCE_RIP();
7955 IEM_MC_END();
7956 return VINF_SUCCESS;
7957}
7958
7959
7960/** Opcode 0xdb 0xe1. */
7961FNIEMOP_DEF(iemOp_fndisi)
7962{
7963 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
7964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7965 IEM_MC_BEGIN(0,0);
7966 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7967 IEM_MC_ADVANCE_RIP();
7968 IEM_MC_END();
7969 return VINF_SUCCESS;
7970}
7971
7972
7973/** Opcode 0xdb 0xe2. */
7974FNIEMOP_DEF(iemOp_fnclex)
7975{
7976 IEMOP_MNEMONIC(fnclex, "fnclex");
7977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7978
7979 IEM_MC_BEGIN(0,0);
7980 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7981 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7982 IEM_MC_CLEAR_FSW_EX();
7983 IEM_MC_ADVANCE_RIP();
7984 IEM_MC_END();
7985 return VINF_SUCCESS;
7986}
7987
7988
7989/** Opcode 0xdb 0xe3. */
7990FNIEMOP_DEF(iemOp_fninit)
7991{
7992 IEMOP_MNEMONIC(fninit, "fninit");
7993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7994 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
7995}
7996
7997
7998/** Opcode 0xdb 0xe4. */
7999FNIEMOP_DEF(iemOp_fnsetpm)
8000{
8001 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8003 IEM_MC_BEGIN(0,0);
8004 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8005 IEM_MC_ADVANCE_RIP();
8006 IEM_MC_END();
8007 return VINF_SUCCESS;
8008}
8009
8010
8011/** Opcode 0xdb 0xe5. */
8012FNIEMOP_DEF(iemOp_frstpm)
8013{
8014 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8015#if 0 /* #UDs on newer CPUs */
8016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8017 IEM_MC_BEGIN(0,0);
8018 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8019 IEM_MC_ADVANCE_RIP();
8020 IEM_MC_END();
8021 return VINF_SUCCESS;
8022#else
8023 return IEMOP_RAISE_INVALID_OPCODE();
8024#endif
8025}
8026
8027
8028/** Opcode 0xdb 11/5. */
8029FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8030{
8031 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8032 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8033}
8034
8035
8036/** Opcode 0xdb 11/6. */
8037FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8038{
8039 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8040 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8041}
8042
8043
8044/** Opcode 0xdb. */
8045FNIEMOP_DEF(iemOp_EscF3)
8046{
8047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8048 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8049 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8050 {
8051 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8052 {
8053 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8054 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8055 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8056 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8057 case 4:
8058 switch (bRm)
8059 {
8060 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8061 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8062 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8063 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8064 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8065 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8066 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8067 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8069 }
8070 break;
8071 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8072 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8073 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8075 }
8076 }
8077 else
8078 {
8079 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8080 {
8081 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8082 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8083 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8084 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8085 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8086 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8087 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8088 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8089 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8090 }
8091 }
8092}
8093
8094
8095/**
8096 * Common worker for FPU instructions working on STn and ST0, and storing the
8097 * result in STn unless IE, DE or ZE was raised.
8098 *
8099 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8100 */
8101FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8102{
8103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8104
8105 IEM_MC_BEGIN(3, 1);
8106 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8107 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8108 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8109 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8110
8111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8112 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8113
8114 IEM_MC_PREPARE_FPU_USAGE();
8115 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
8116 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8117 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8118 IEM_MC_ELSE()
8119 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8120 IEM_MC_ENDIF();
8121 IEM_MC_ADVANCE_RIP();
8122
8123 IEM_MC_END();
8124 return VINF_SUCCESS;
8125}
8126
8127
8128/** Opcode 0xdc 11/0. */
8129FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
8130{
8131 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
8132 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
8133}
8134
8135
8136/** Opcode 0xdc 11/1. */
8137FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
8138{
8139 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
8140 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
8141}
8142
8143
8144/** Opcode 0xdc 11/4. */
8145FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
8146{
8147 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
8148 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
8149}
8150
8151
8152/** Opcode 0xdc 11/5. */
8153FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
8154{
8155 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
8156 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
8157}
8158
8159
8160/** Opcode 0xdc 11/6. */
8161FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
8162{
8163 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
8164 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
8165}
8166
8167
8168/** Opcode 0xdc 11/7. */
8169FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
8170{
8171 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
8172 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
8173}
8174
8175
8176/**
8177 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
8178 * memory operand, and storing the result in ST0.
8179 *
8180 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8181 */
8182FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
8183{
8184 IEM_MC_BEGIN(3, 3);
8185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8186 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8187 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
8188 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8189 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
8190 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
8191
8192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8194 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8195 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8196
8197 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8198 IEM_MC_PREPARE_FPU_USAGE();
8199 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
8200 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
8201 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8202 IEM_MC_ELSE()
8203 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8204 IEM_MC_ENDIF();
8205 IEM_MC_ADVANCE_RIP();
8206
8207 IEM_MC_END();
8208 return VINF_SUCCESS;
8209}
8210
8211
8212/** Opcode 0xdc !11/0. */
8213FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
8214{
8215 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
8216 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
8217}
8218
8219
8220/** Opcode 0xdc !11/1. */
8221FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
8222{
8223 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
8224 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
8225}
8226
8227
8228/** Opcode 0xdc !11/2. */
8229FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
8230{
8231 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
8232
8233 IEM_MC_BEGIN(3, 3);
8234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8235 IEM_MC_LOCAL(uint16_t, u16Fsw);
8236 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8237 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8238 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8239 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8240
8241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8243
8244 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8245 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8246 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8247
8248 IEM_MC_PREPARE_FPU_USAGE();
8249 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8250 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8251 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8252 IEM_MC_ELSE()
8253 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8254 IEM_MC_ENDIF();
8255 IEM_MC_ADVANCE_RIP();
8256
8257 IEM_MC_END();
8258 return VINF_SUCCESS;
8259}
8260
8261
8262/** Opcode 0xdc !11/3. */
8263FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
8264{
8265 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
8266
8267 IEM_MC_BEGIN(3, 3);
8268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8269 IEM_MC_LOCAL(uint16_t, u16Fsw);
8270 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8271 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8272 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8273 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8274
8275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8277
8278 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8279 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8280 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8281
8282 IEM_MC_PREPARE_FPU_USAGE();
8283 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8284 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8285 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8286 IEM_MC_ELSE()
8287 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8288 IEM_MC_ENDIF();
8289 IEM_MC_ADVANCE_RIP();
8290
8291 IEM_MC_END();
8292 return VINF_SUCCESS;
8293}
8294
8295
8296/** Opcode 0xdc !11/4. */
8297FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
8298{
8299 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
8300 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
8301}
8302
8303
8304/** Opcode 0xdc !11/5. */
8305FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
8306{
8307 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
8308 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
8309}
8310
8311
8312/** Opcode 0xdc !11/6. */
8313FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
8314{
8315 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
8316 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
8317}
8318
8319
8320/** Opcode 0xdc !11/7. */
8321FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
8322{
8323 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
8324 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
8325}
8326
8327
8328/** Opcode 0xdc. */
8329FNIEMOP_DEF(iemOp_EscF4)
8330{
8331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8332 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
8333 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8334 {
8335 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8336 {
8337 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
8338 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
8339 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
8340 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
8341 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
8342 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
8343 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
8344 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
8345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8346 }
8347 }
8348 else
8349 {
8350 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8351 {
8352 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
8353 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
8354 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
8355 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
8356 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
8357 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
8358 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
8359 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
8360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8361 }
8362 }
8363}
8364
8365
8366/** Opcode 0xdd !11/0.
8367 * @sa iemOp_fld_m32r */
8368FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
8369{
8370 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
8371
8372 IEM_MC_BEGIN(2, 3);
8373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8374 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8375 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
8376 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8377 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
8378
8379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8381 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8382 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8383
8384 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8385 IEM_MC_PREPARE_FPU_USAGE();
8386 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8387 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
8388 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8389 IEM_MC_ELSE()
8390 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8391 IEM_MC_ENDIF();
8392 IEM_MC_ADVANCE_RIP();
8393
8394 IEM_MC_END();
8395 return VINF_SUCCESS;
8396}
8397
8398
8399/** Opcode 0xdd !11/0. */
8400FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
8401{
8402 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
8403 IEM_MC_BEGIN(3, 2);
8404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8405 IEM_MC_LOCAL(uint16_t, u16Fsw);
8406 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8407 IEM_MC_ARG(int64_t *, pi64Dst, 1);
8408 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8409
8410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8412 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8413 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8414
8415 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8416 IEM_MC_PREPARE_FPU_USAGE();
8417 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8418 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
8419 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8420 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8421 IEM_MC_ELSE()
8422 IEM_MC_IF_FCW_IM()
8423 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
8424 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
8425 IEM_MC_ENDIF();
8426 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8427 IEM_MC_ENDIF();
8428 IEM_MC_ADVANCE_RIP();
8429
8430 IEM_MC_END();
8431 return VINF_SUCCESS;
8432}
8433
8434
8435/** Opcode 0xdd !11/0. */
8436FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
8437{
8438 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
8439 IEM_MC_BEGIN(3, 2);
8440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8441 IEM_MC_LOCAL(uint16_t, u16Fsw);
8442 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8443 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
8444 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8445
8446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8448 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8449 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8450
8451 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8452 IEM_MC_PREPARE_FPU_USAGE();
8453 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8454 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
8455 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8456 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8457 IEM_MC_ELSE()
8458 IEM_MC_IF_FCW_IM()
8459 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
8460 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
8461 IEM_MC_ENDIF();
8462 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8463 IEM_MC_ENDIF();
8464 IEM_MC_ADVANCE_RIP();
8465
8466 IEM_MC_END();
8467 return VINF_SUCCESS;
8468}
8469
8470
8471
8472
8473/** Opcode 0xdd !11/0. */
8474FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
8475{
8476 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
8477 IEM_MC_BEGIN(3, 2);
8478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8479 IEM_MC_LOCAL(uint16_t, u16Fsw);
8480 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8481 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
8482 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8483
8484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8487 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8488
8489 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8490 IEM_MC_PREPARE_FPU_USAGE();
8491 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8492 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
8493 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8494 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8495 IEM_MC_ELSE()
8496 IEM_MC_IF_FCW_IM()
8497 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
8498 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
8499 IEM_MC_ENDIF();
8500 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8501 IEM_MC_ENDIF();
8502 IEM_MC_ADVANCE_RIP();
8503
8504 IEM_MC_END();
8505 return VINF_SUCCESS;
8506}
8507
8508
8509/** Opcode 0xdd !11/0. */
8510FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
8511{
8512 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
8513 IEM_MC_BEGIN(3, 0);
8514 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8515 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8516 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8520 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8521 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8522 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8523 IEM_MC_END();
8524 return VINF_SUCCESS;
8525}
8526
8527
8528/** Opcode 0xdd !11/0. */
8529FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
8530{
8531 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
8532 IEM_MC_BEGIN(3, 0);
8533 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8534 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8535 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8539 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8540 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8541 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
8542 IEM_MC_END();
8543 return VINF_SUCCESS;
8544
8545}
8546
8547/** Opcode 0xdd !11/0. */
8548FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
8549{
8550 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
8551
8552 IEM_MC_BEGIN(0, 2);
8553 IEM_MC_LOCAL(uint16_t, u16Tmp);
8554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8555
8556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8558 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8559
8560 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8561 IEM_MC_FETCH_FSW(u16Tmp);
8562 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
8563 IEM_MC_ADVANCE_RIP();
8564
8565/** @todo Debug / drop a hint to the verifier that things may differ
8566 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
8567 * NT4SP1. (X86_FSW_PE) */
8568 IEM_MC_END();
8569 return VINF_SUCCESS;
8570}
8571
8572
8573/** Opcode 0xdd 11/0. */
8574FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
8575{
8576 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
8577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8578 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
8579 unmodified. */
8580
8581 IEM_MC_BEGIN(0, 0);
8582
8583 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8584 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8585
8586 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8587 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
8588 IEM_MC_UPDATE_FPU_OPCODE_IP();
8589
8590 IEM_MC_ADVANCE_RIP();
8591 IEM_MC_END();
8592 return VINF_SUCCESS;
8593}
8594
8595
8596/** Opcode 0xdd 11/1. */
8597FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
8598{
8599 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
8600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8601
8602 IEM_MC_BEGIN(0, 2);
8603 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8604 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8605 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8606 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8607
8608 IEM_MC_PREPARE_FPU_USAGE();
8609 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8610 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8611 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8612 IEM_MC_ELSE()
8613 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8614 IEM_MC_ENDIF();
8615
8616 IEM_MC_ADVANCE_RIP();
8617 IEM_MC_END();
8618 return VINF_SUCCESS;
8619}
8620
8621
8622/** Opcode 0xdd 11/3. */
8623FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
8624{
8625 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
8626 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
8627}
8628
8629
8630/** Opcode 0xdd 11/4. */
8631FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
8632{
8633 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
8634 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
8635}
8636
8637
8638/** Opcode 0xdd. */
8639FNIEMOP_DEF(iemOp_EscF5)
8640{
8641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8642 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
8643 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8644 {
8645 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8646 {
8647 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
8648 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
8649 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
8650 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
8651 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
8652 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
8653 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8654 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8656 }
8657 }
8658 else
8659 {
8660 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8661 {
8662 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
8663 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
8664 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
8665 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
8666 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
8667 case 5: return IEMOP_RAISE_INVALID_OPCODE();
8668 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
8669 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
8670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8671 }
8672 }
8673}
8674
8675
8676/** Opcode 0xde 11/0. */
8677FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
8678{
8679 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
8680 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
8681}
8682
8683
8684/** Opcode 0xde 11/0. */
8685FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
8686{
8687 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
8688 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
8689}
8690
8691
8692/** Opcode 0xde 0xd9. */
8693FNIEMOP_DEF(iemOp_fcompp)
8694{
8695 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
8696 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
8697}
8698
8699
8700/** Opcode 0xde 11/4. */
8701FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
8702{
8703 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
8704 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
8705}
8706
8707
8708/** Opcode 0xde 11/5. */
8709FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
8710{
8711 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
8712 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
8713}
8714
8715
8716/** Opcode 0xde 11/6. */
8717FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
8718{
8719 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
8720 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
8721}
8722
8723
8724/** Opcode 0xde 11/7. */
8725FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
8726{
8727 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
8728 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
8729}
8730
8731
8732/**
8733 * Common worker for FPU instructions working on ST0 and an m16i, and storing
8734 * the result in ST0.
8735 *
8736 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8737 */
8738FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
8739{
8740 IEM_MC_BEGIN(3, 3);
8741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8742 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8743 IEM_MC_LOCAL(int16_t, i16Val2);
8744 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8745 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8746 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8747
8748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8750
8751 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8752 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8753 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8754
8755 IEM_MC_PREPARE_FPU_USAGE();
8756 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8757 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
8758 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8759 IEM_MC_ELSE()
8760 IEM_MC_FPU_STACK_UNDERFLOW(0);
8761 IEM_MC_ENDIF();
8762 IEM_MC_ADVANCE_RIP();
8763
8764 IEM_MC_END();
8765 return VINF_SUCCESS;
8766}
8767
8768
8769/** Opcode 0xde !11/0. */
8770FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
8771{
8772 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
8773 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
8774}
8775
8776
8777/** Opcode 0xde !11/1. */
8778FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
8779{
8780 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
8781 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
8782}
8783
8784
8785/** Opcode 0xde !11/2. */
8786FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
8787{
8788 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
8789
8790 IEM_MC_BEGIN(3, 3);
8791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8792 IEM_MC_LOCAL(uint16_t, u16Fsw);
8793 IEM_MC_LOCAL(int16_t, i16Val2);
8794 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8795 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8796 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8797
8798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8800
8801 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8802 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8803 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8804
8805 IEM_MC_PREPARE_FPU_USAGE();
8806 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8807 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
8808 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8809 IEM_MC_ELSE()
8810 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8811 IEM_MC_ENDIF();
8812 IEM_MC_ADVANCE_RIP();
8813
8814 IEM_MC_END();
8815 return VINF_SUCCESS;
8816}
8817
8818
8819/** Opcode 0xde !11/3. */
8820FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
8821{
8822 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
8823
8824 IEM_MC_BEGIN(3, 3);
8825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8826 IEM_MC_LOCAL(uint16_t, u16Fsw);
8827 IEM_MC_LOCAL(int16_t, i16Val2);
8828 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8829 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8830 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8831
8832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8834
8835 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8836 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8837 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8838
8839 IEM_MC_PREPARE_FPU_USAGE();
8840 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8841 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
8842 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8843 IEM_MC_ELSE()
8844 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8845 IEM_MC_ENDIF();
8846 IEM_MC_ADVANCE_RIP();
8847
8848 IEM_MC_END();
8849 return VINF_SUCCESS;
8850}
8851
8852
8853/** Opcode 0xde !11/4. */
8854FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
8855{
8856 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
8857 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
8858}
8859
8860
8861/** Opcode 0xde !11/5. */
8862FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
8863{
8864 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
8865 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
8866}
8867
8868
8869/** Opcode 0xde !11/6. */
8870FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
8871{
8872 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
8873 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
8874}
8875
8876
8877/** Opcode 0xde !11/7. */
8878FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
8879{
8880 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
8881 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
8882}
8883
8884
8885/** Opcode 0xde. */
8886FNIEMOP_DEF(iemOp_EscF6)
8887{
8888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8889 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
8890 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8891 {
8892 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8893 {
8894 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
8895 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
8896 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8897 case 3: if (bRm == 0xd9)
8898 return FNIEMOP_CALL(iemOp_fcompp);
8899 return IEMOP_RAISE_INVALID_OPCODE();
8900 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
8901 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
8902 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
8903 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
8904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8905 }
8906 }
8907 else
8908 {
8909 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8910 {
8911 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
8912 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
8913 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
8914 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
8915 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
8916 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
8917 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
8918 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
8919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8920 }
8921 }
8922}
8923
8924
8925/** Opcode 0xdf 11/0.
8926 * Undocument instruction, assumed to work like ffree + fincstp. */
8927FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
8928{
8929 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
8930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8931
8932 IEM_MC_BEGIN(0, 0);
8933
8934 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8935 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8936
8937 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8938 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
8939 IEM_MC_FPU_STACK_INC_TOP();
8940 IEM_MC_UPDATE_FPU_OPCODE_IP();
8941
8942 IEM_MC_ADVANCE_RIP();
8943 IEM_MC_END();
8944 return VINF_SUCCESS;
8945}
8946
8947
8948/** Opcode 0xdf 0xe0. */
8949FNIEMOP_DEF(iemOp_fnstsw_ax)
8950{
8951 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
8952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8953
8954 IEM_MC_BEGIN(0, 1);
8955 IEM_MC_LOCAL(uint16_t, u16Tmp);
8956 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8957 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8958 IEM_MC_FETCH_FSW(u16Tmp);
8959 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
8960 IEM_MC_ADVANCE_RIP();
8961 IEM_MC_END();
8962 return VINF_SUCCESS;
8963}
8964
8965
8966/** Opcode 0xdf 11/5. */
8967FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
8968{
8969 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
8970 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
8971}
8972
8973
8974/** Opcode 0xdf 11/6. */
8975FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
8976{
8977 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
8978 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
8979}
8980
8981
8982/** Opcode 0xdf !11/0. */
8983FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
8984{
8985 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
8986
8987 IEM_MC_BEGIN(2, 3);
8988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8989 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8990 IEM_MC_LOCAL(int16_t, i16Val);
8991 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8992 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
8993
8994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8996
8997 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8998 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8999 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9000
9001 IEM_MC_PREPARE_FPU_USAGE();
9002 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9003 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9004 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9005 IEM_MC_ELSE()
9006 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9007 IEM_MC_ENDIF();
9008 IEM_MC_ADVANCE_RIP();
9009
9010 IEM_MC_END();
9011 return VINF_SUCCESS;
9012}
9013
9014
9015/** Opcode 0xdf !11/1. */
9016FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9017{
9018 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9019 IEM_MC_BEGIN(3, 2);
9020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9021 IEM_MC_LOCAL(uint16_t, u16Fsw);
9022 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9023 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9024 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9025
9026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9028 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9029 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9030
9031 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9032 IEM_MC_PREPARE_FPU_USAGE();
9033 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9034 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9035 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9036 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9037 IEM_MC_ELSE()
9038 IEM_MC_IF_FCW_IM()
9039 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9040 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9041 IEM_MC_ENDIF();
9042 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9043 IEM_MC_ENDIF();
9044 IEM_MC_ADVANCE_RIP();
9045
9046 IEM_MC_END();
9047 return VINF_SUCCESS;
9048}
9049
9050
9051/** Opcode 0xdf !11/2. */
9052FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9053{
9054 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9055 IEM_MC_BEGIN(3, 2);
9056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9057 IEM_MC_LOCAL(uint16_t, u16Fsw);
9058 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9059 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9060 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9061
9062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9064 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9065 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9066
9067 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9068 IEM_MC_PREPARE_FPU_USAGE();
9069 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9070 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9071 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9072 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9073 IEM_MC_ELSE()
9074 IEM_MC_IF_FCW_IM()
9075 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9076 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9077 IEM_MC_ENDIF();
9078 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9079 IEM_MC_ENDIF();
9080 IEM_MC_ADVANCE_RIP();
9081
9082 IEM_MC_END();
9083 return VINF_SUCCESS;
9084}
9085
9086
9087/** Opcode 0xdf !11/3. */
9088FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9089{
9090 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9091 IEM_MC_BEGIN(3, 2);
9092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9093 IEM_MC_LOCAL(uint16_t, u16Fsw);
9094 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9095 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9096 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9097
9098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9100 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9101 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9102
9103 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9104 IEM_MC_PREPARE_FPU_USAGE();
9105 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9106 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9107 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9108 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9109 IEM_MC_ELSE()
9110 IEM_MC_IF_FCW_IM()
9111 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9112 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9113 IEM_MC_ENDIF();
9114 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9115 IEM_MC_ENDIF();
9116 IEM_MC_ADVANCE_RIP();
9117
9118 IEM_MC_END();
9119 return VINF_SUCCESS;
9120}
9121
9122
9123/** Opcode 0xdf !11/4. */
9124FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
9125
9126
9127/** Opcode 0xdf !11/5. */
9128FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
9129{
9130 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
9131
9132 IEM_MC_BEGIN(2, 3);
9133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9134 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9135 IEM_MC_LOCAL(int64_t, i64Val);
9136 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9137 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
9138
9139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9141
9142 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9143 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9144 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9145
9146 IEM_MC_PREPARE_FPU_USAGE();
9147 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9148 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
9149 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9150 IEM_MC_ELSE()
9151 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9152 IEM_MC_ENDIF();
9153 IEM_MC_ADVANCE_RIP();
9154
9155 IEM_MC_END();
9156 return VINF_SUCCESS;
9157}
9158
9159
9160/** Opcode 0xdf !11/6. */
9161FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
9162
9163
9164/** Opcode 0xdf !11/7. */
9165FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
9166{
9167 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
9168 IEM_MC_BEGIN(3, 2);
9169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9170 IEM_MC_LOCAL(uint16_t, u16Fsw);
9171 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9172 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9173 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9174
9175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9177 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9178 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9179
9180 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9181 IEM_MC_PREPARE_FPU_USAGE();
9182 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9183 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9184 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9185 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9186 IEM_MC_ELSE()
9187 IEM_MC_IF_FCW_IM()
9188 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9189 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9190 IEM_MC_ENDIF();
9191 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9192 IEM_MC_ENDIF();
9193 IEM_MC_ADVANCE_RIP();
9194
9195 IEM_MC_END();
9196 return VINF_SUCCESS;
9197}
9198
9199
9200/** Opcode 0xdf. */
9201FNIEMOP_DEF(iemOp_EscF7)
9202{
9203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9204 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9205 {
9206 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9207 {
9208 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
9209 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
9210 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9211 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9212 case 4: if (bRm == 0xe0)
9213 return FNIEMOP_CALL(iemOp_fnstsw_ax);
9214 return IEMOP_RAISE_INVALID_OPCODE();
9215 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
9216 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
9217 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9219 }
9220 }
9221 else
9222 {
9223 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9224 {
9225 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
9226 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
9227 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
9228 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
9229 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
9230 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
9231 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
9232 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
9233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9234 }
9235 }
9236}
9237
9238
9239/** Opcode 0xe0. */
9240FNIEMOP_DEF(iemOp_loopne_Jb)
9241{
9242 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
9243 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9245 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9246
9247 switch (pVCpu->iem.s.enmEffAddrMode)
9248 {
9249 case IEMMODE_16BIT:
9250 IEM_MC_BEGIN(0,0);
9251 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9252 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9253 IEM_MC_REL_JMP_S8(i8Imm);
9254 } IEM_MC_ELSE() {
9255 IEM_MC_ADVANCE_RIP();
9256 } IEM_MC_ENDIF();
9257 IEM_MC_END();
9258 return VINF_SUCCESS;
9259
9260 case IEMMODE_32BIT:
9261 IEM_MC_BEGIN(0,0);
9262 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9263 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9264 IEM_MC_REL_JMP_S8(i8Imm);
9265 } IEM_MC_ELSE() {
9266 IEM_MC_ADVANCE_RIP();
9267 } IEM_MC_ENDIF();
9268 IEM_MC_END();
9269 return VINF_SUCCESS;
9270
9271 case IEMMODE_64BIT:
9272 IEM_MC_BEGIN(0,0);
9273 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9274 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9275 IEM_MC_REL_JMP_S8(i8Imm);
9276 } IEM_MC_ELSE() {
9277 IEM_MC_ADVANCE_RIP();
9278 } IEM_MC_ENDIF();
9279 IEM_MC_END();
9280 return VINF_SUCCESS;
9281
9282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9283 }
9284}
9285
9286
9287/** Opcode 0xe1. */
9288FNIEMOP_DEF(iemOp_loope_Jb)
9289{
9290 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
9291 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9293 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9294
9295 switch (pVCpu->iem.s.enmEffAddrMode)
9296 {
9297 case IEMMODE_16BIT:
9298 IEM_MC_BEGIN(0,0);
9299 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9300 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9301 IEM_MC_REL_JMP_S8(i8Imm);
9302 } IEM_MC_ELSE() {
9303 IEM_MC_ADVANCE_RIP();
9304 } IEM_MC_ENDIF();
9305 IEM_MC_END();
9306 return VINF_SUCCESS;
9307
9308 case IEMMODE_32BIT:
9309 IEM_MC_BEGIN(0,0);
9310 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9311 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9312 IEM_MC_REL_JMP_S8(i8Imm);
9313 } IEM_MC_ELSE() {
9314 IEM_MC_ADVANCE_RIP();
9315 } IEM_MC_ENDIF();
9316 IEM_MC_END();
9317 return VINF_SUCCESS;
9318
9319 case IEMMODE_64BIT:
9320 IEM_MC_BEGIN(0,0);
9321 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9322 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9323 IEM_MC_REL_JMP_S8(i8Imm);
9324 } IEM_MC_ELSE() {
9325 IEM_MC_ADVANCE_RIP();
9326 } IEM_MC_ENDIF();
9327 IEM_MC_END();
9328 return VINF_SUCCESS;
9329
9330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9331 }
9332}
9333
9334
9335/** Opcode 0xe2. */
9336FNIEMOP_DEF(iemOp_loop_Jb)
9337{
9338 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
9339 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9341 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9342
9343 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
9344 * using the 32-bit operand size override. How can that be restarted? See
9345 * weird pseudo code in intel manual. */
9346 switch (pVCpu->iem.s.enmEffAddrMode)
9347 {
9348 case IEMMODE_16BIT:
9349 IEM_MC_BEGIN(0,0);
9350 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9351 {
9352 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9353 IEM_MC_IF_CX_IS_NZ() {
9354 IEM_MC_REL_JMP_S8(i8Imm);
9355 } IEM_MC_ELSE() {
9356 IEM_MC_ADVANCE_RIP();
9357 } IEM_MC_ENDIF();
9358 }
9359 else
9360 {
9361 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
9362 IEM_MC_ADVANCE_RIP();
9363 }
9364 IEM_MC_END();
9365 return VINF_SUCCESS;
9366
9367 case IEMMODE_32BIT:
9368 IEM_MC_BEGIN(0,0);
9369 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9370 {
9371 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9372 IEM_MC_IF_ECX_IS_NZ() {
9373 IEM_MC_REL_JMP_S8(i8Imm);
9374 } IEM_MC_ELSE() {
9375 IEM_MC_ADVANCE_RIP();
9376 } IEM_MC_ENDIF();
9377 }
9378 else
9379 {
9380 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
9381 IEM_MC_ADVANCE_RIP();
9382 }
9383 IEM_MC_END();
9384 return VINF_SUCCESS;
9385
9386 case IEMMODE_64BIT:
9387 IEM_MC_BEGIN(0,0);
9388 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9389 {
9390 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9391 IEM_MC_IF_RCX_IS_NZ() {
9392 IEM_MC_REL_JMP_S8(i8Imm);
9393 } IEM_MC_ELSE() {
9394 IEM_MC_ADVANCE_RIP();
9395 } IEM_MC_ENDIF();
9396 }
9397 else
9398 {
9399 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
9400 IEM_MC_ADVANCE_RIP();
9401 }
9402 IEM_MC_END();
9403 return VINF_SUCCESS;
9404
9405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9406 }
9407}
9408
9409
9410/** Opcode 0xe3. */
9411FNIEMOP_DEF(iemOp_jecxz_Jb)
9412{
9413 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
9414 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9416 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9417
9418 switch (pVCpu->iem.s.enmEffAddrMode)
9419 {
9420 case IEMMODE_16BIT:
9421 IEM_MC_BEGIN(0,0);
9422 IEM_MC_IF_CX_IS_NZ() {
9423 IEM_MC_ADVANCE_RIP();
9424 } IEM_MC_ELSE() {
9425 IEM_MC_REL_JMP_S8(i8Imm);
9426 } IEM_MC_ENDIF();
9427 IEM_MC_END();
9428 return VINF_SUCCESS;
9429
9430 case IEMMODE_32BIT:
9431 IEM_MC_BEGIN(0,0);
9432 IEM_MC_IF_ECX_IS_NZ() {
9433 IEM_MC_ADVANCE_RIP();
9434 } IEM_MC_ELSE() {
9435 IEM_MC_REL_JMP_S8(i8Imm);
9436 } IEM_MC_ENDIF();
9437 IEM_MC_END();
9438 return VINF_SUCCESS;
9439
9440 case IEMMODE_64BIT:
9441 IEM_MC_BEGIN(0,0);
9442 IEM_MC_IF_RCX_IS_NZ() {
9443 IEM_MC_ADVANCE_RIP();
9444 } IEM_MC_ELSE() {
9445 IEM_MC_REL_JMP_S8(i8Imm);
9446 } IEM_MC_ENDIF();
9447 IEM_MC_END();
9448 return VINF_SUCCESS;
9449
9450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9451 }
9452}
9453
9454
9455/** Opcode 0xe4 */
9456FNIEMOP_DEF(iemOp_in_AL_Ib)
9457{
9458 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
9459 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9461 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
9462}
9463
9464
9465/** Opcode 0xe5 */
9466FNIEMOP_DEF(iemOp_in_eAX_Ib)
9467{
9468 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
9469 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9471 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9472}
9473
9474
9475/** Opcode 0xe6 */
9476FNIEMOP_DEF(iemOp_out_Ib_AL)
9477{
9478 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
9479 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9481 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
9482}
9483
9484
9485/** Opcode 0xe7 */
9486FNIEMOP_DEF(iemOp_out_Ib_eAX)
9487{
9488 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
9489 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9491 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9492}
9493
9494
9495/** Opcode 0xe8. */
9496FNIEMOP_DEF(iemOp_call_Jv)
9497{
9498 IEMOP_MNEMONIC(call_Jv, "call Jv");
9499 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9500 switch (pVCpu->iem.s.enmEffOpSize)
9501 {
9502 case IEMMODE_16BIT:
9503 {
9504 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9505 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
9506 }
9507
9508 case IEMMODE_32BIT:
9509 {
9510 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9511 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
9512 }
9513
9514 case IEMMODE_64BIT:
9515 {
9516 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9517 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
9518 }
9519
9520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9521 }
9522}
9523
9524
9525/** Opcode 0xe9. */
9526FNIEMOP_DEF(iemOp_jmp_Jv)
9527{
9528 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
9529 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9530 switch (pVCpu->iem.s.enmEffOpSize)
9531 {
9532 case IEMMODE_16BIT:
9533 {
9534 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
9535 IEM_MC_BEGIN(0, 0);
9536 IEM_MC_REL_JMP_S16(i16Imm);
9537 IEM_MC_END();
9538 return VINF_SUCCESS;
9539 }
9540
9541 case IEMMODE_64BIT:
9542 case IEMMODE_32BIT:
9543 {
9544 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
9545 IEM_MC_BEGIN(0, 0);
9546 IEM_MC_REL_JMP_S32(i32Imm);
9547 IEM_MC_END();
9548 return VINF_SUCCESS;
9549 }
9550
9551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9552 }
9553}
9554
9555
9556/** Opcode 0xea. */
9557FNIEMOP_DEF(iemOp_jmp_Ap)
9558{
9559 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
9560 IEMOP_HLP_NO_64BIT();
9561
9562 /* Decode the far pointer address and pass it on to the far call C implementation. */
9563 uint32_t offSeg;
9564 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
9565 IEM_OPCODE_GET_NEXT_U32(&offSeg);
9566 else
9567 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
9568 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
9569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9570 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
9571}
9572
9573
9574/** Opcode 0xeb. */
9575FNIEMOP_DEF(iemOp_jmp_Jb)
9576{
9577 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
9578 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9580 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9581
9582 IEM_MC_BEGIN(0, 0);
9583 IEM_MC_REL_JMP_S8(i8Imm);
9584 IEM_MC_END();
9585 return VINF_SUCCESS;
9586}
9587
9588
9589/** Opcode 0xec */
9590FNIEMOP_DEF(iemOp_in_AL_DX)
9591{
9592 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
9593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9594 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
9595}
9596
9597
9598/** Opcode 0xed */
9599FNIEMOP_DEF(iemOp_eAX_DX)
9600{
9601 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
9602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9603 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9604}
9605
9606
9607/** Opcode 0xee */
9608FNIEMOP_DEF(iemOp_out_DX_AL)
9609{
9610 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
9611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9612 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
9613}
9614
9615
9616/** Opcode 0xef */
9617FNIEMOP_DEF(iemOp_out_DX_eAX)
9618{
9619 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
9620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9621 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9622}
9623
9624
9625/** Opcode 0xf0. */
9626FNIEMOP_DEF(iemOp_lock)
9627{
9628 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
9629 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
9630
9631 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9632 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9633}
9634
9635
9636/** Opcode 0xf1. */
9637FNIEMOP_DEF(iemOp_int_1)
9638{
9639 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
9640 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
9641 /** @todo testcase! */
9642 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
9643}
9644
9645
9646/** Opcode 0xf2. */
9647FNIEMOP_DEF(iemOp_repne)
9648{
9649 /* This overrides any previous REPE prefix. */
9650 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
9651 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
9652 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
9653
9654 /* For the 4 entry opcode tables, REPNZ overrides any previous
9655 REPZ and operand size prefixes. */
9656 pVCpu->iem.s.idxPrefix = 3;
9657
9658 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9659 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9660}
9661
9662
9663/** Opcode 0xf3. */
9664FNIEMOP_DEF(iemOp_repe)
9665{
9666 /* This overrides any previous REPNE prefix. */
9667 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
9668 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
9669 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
9670
9671 /* For the 4 entry opcode tables, REPNZ overrides any previous
9672 REPNZ and operand size prefixes. */
9673 pVCpu->iem.s.idxPrefix = 2;
9674
9675 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9676 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9677}
9678
9679
9680/** Opcode 0xf4. */
9681FNIEMOP_DEF(iemOp_hlt)
9682{
9683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9684 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
9685}
9686
9687
9688/** Opcode 0xf5. */
9689FNIEMOP_DEF(iemOp_cmc)
9690{
9691 IEMOP_MNEMONIC(cmc, "cmc");
9692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9693 IEM_MC_BEGIN(0, 0);
9694 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
9695 IEM_MC_ADVANCE_RIP();
9696 IEM_MC_END();
9697 return VINF_SUCCESS;
9698}
9699
9700
9701/**
9702 * Common implementation of 'inc/dec/not/neg Eb'.
9703 *
9704 * @param bRm The RM byte.
9705 * @param pImpl The instruction implementation.
9706 */
9707FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
9708{
9709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9710 {
9711 /* register access */
9712 IEM_MC_BEGIN(2, 0);
9713 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9714 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9715 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9716 IEM_MC_REF_EFLAGS(pEFlags);
9717 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
9718 IEM_MC_ADVANCE_RIP();
9719 IEM_MC_END();
9720 }
9721 else
9722 {
9723 /* memory access. */
9724 IEM_MC_BEGIN(2, 2);
9725 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9726 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9728
9729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9730 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9731 IEM_MC_FETCH_EFLAGS(EFlags);
9732 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9733 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
9734 else
9735 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
9736
9737 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9738 IEM_MC_COMMIT_EFLAGS(EFlags);
9739 IEM_MC_ADVANCE_RIP();
9740 IEM_MC_END();
9741 }
9742 return VINF_SUCCESS;
9743}
9744
9745
9746/**
9747 * Common implementation of 'inc/dec/not/neg Ev'.
9748 *
9749 * @param bRm The RM byte.
9750 * @param pImpl The instruction implementation.
9751 */
9752FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
9753{
9754 /* Registers are handled by a common worker. */
9755 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9756 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9757
9758 /* Memory we do here. */
9759 switch (pVCpu->iem.s.enmEffOpSize)
9760 {
9761 case IEMMODE_16BIT:
9762 IEM_MC_BEGIN(2, 2);
9763 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9764 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9766
9767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9768 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9769 IEM_MC_FETCH_EFLAGS(EFlags);
9770 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9771 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
9772 else
9773 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
9774
9775 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9776 IEM_MC_COMMIT_EFLAGS(EFlags);
9777 IEM_MC_ADVANCE_RIP();
9778 IEM_MC_END();
9779 return VINF_SUCCESS;
9780
9781 case IEMMODE_32BIT:
9782 IEM_MC_BEGIN(2, 2);
9783 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9784 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9786
9787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9788 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9789 IEM_MC_FETCH_EFLAGS(EFlags);
9790 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9791 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
9792 else
9793 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
9794
9795 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9796 IEM_MC_COMMIT_EFLAGS(EFlags);
9797 IEM_MC_ADVANCE_RIP();
9798 IEM_MC_END();
9799 return VINF_SUCCESS;
9800
9801 case IEMMODE_64BIT:
9802 IEM_MC_BEGIN(2, 2);
9803 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9806
9807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9808 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9809 IEM_MC_FETCH_EFLAGS(EFlags);
9810 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9811 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
9812 else
9813 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
9814
9815 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9816 IEM_MC_COMMIT_EFLAGS(EFlags);
9817 IEM_MC_ADVANCE_RIP();
9818 IEM_MC_END();
9819 return VINF_SUCCESS;
9820
9821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9822 }
9823}
9824
9825
9826/** Opcode 0xf6 /0. */
9827FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
9828{
9829 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
9830 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9831
9832 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9833 {
9834 /* register access */
9835 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9837
9838 IEM_MC_BEGIN(3, 0);
9839 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9840 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
9841 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9842 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9843 IEM_MC_REF_EFLAGS(pEFlags);
9844 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
9845 IEM_MC_ADVANCE_RIP();
9846 IEM_MC_END();
9847 }
9848 else
9849 {
9850 /* memory access. */
9851 IEM_MC_BEGIN(3, 2);
9852 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9853 IEM_MC_ARG(uint8_t, u8Src, 1);
9854 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9856
9857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9858 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9859 IEM_MC_ASSIGN(u8Src, u8Imm);
9860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9861 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9862 IEM_MC_FETCH_EFLAGS(EFlags);
9863 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
9864
9865 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
9866 IEM_MC_COMMIT_EFLAGS(EFlags);
9867 IEM_MC_ADVANCE_RIP();
9868 IEM_MC_END();
9869 }
9870 return VINF_SUCCESS;
9871}
9872
9873
9874/** Opcode 0xf7 /0. */
9875FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
9876{
9877 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
9878 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9879
9880 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9881 {
9882 /* register access */
9883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9884 switch (pVCpu->iem.s.enmEffOpSize)
9885 {
9886 case IEMMODE_16BIT:
9887 {
9888 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9889 IEM_MC_BEGIN(3, 0);
9890 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9891 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
9892 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9893 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9894 IEM_MC_REF_EFLAGS(pEFlags);
9895 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
9896 IEM_MC_ADVANCE_RIP();
9897 IEM_MC_END();
9898 return VINF_SUCCESS;
9899 }
9900
9901 case IEMMODE_32BIT:
9902 {
9903 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9904 IEM_MC_BEGIN(3, 0);
9905 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9906 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
9907 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9908 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9909 IEM_MC_REF_EFLAGS(pEFlags);
9910 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
9911 /* No clearing the high dword here - test doesn't write back the result. */
9912 IEM_MC_ADVANCE_RIP();
9913 IEM_MC_END();
9914 return VINF_SUCCESS;
9915 }
9916
9917 case IEMMODE_64BIT:
9918 {
9919 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9920 IEM_MC_BEGIN(3, 0);
9921 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9922 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
9923 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9924 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9925 IEM_MC_REF_EFLAGS(pEFlags);
9926 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
9927 IEM_MC_ADVANCE_RIP();
9928 IEM_MC_END();
9929 return VINF_SUCCESS;
9930 }
9931
9932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9933 }
9934 }
9935 else
9936 {
9937 /* memory access. */
9938 switch (pVCpu->iem.s.enmEffOpSize)
9939 {
9940 case IEMMODE_16BIT:
9941 {
9942 IEM_MC_BEGIN(3, 2);
9943 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9944 IEM_MC_ARG(uint16_t, u16Src, 1);
9945 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9947
9948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9949 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9950 IEM_MC_ASSIGN(u16Src, u16Imm);
9951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9952 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9953 IEM_MC_FETCH_EFLAGS(EFlags);
9954 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
9955
9956 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
9957 IEM_MC_COMMIT_EFLAGS(EFlags);
9958 IEM_MC_ADVANCE_RIP();
9959 IEM_MC_END();
9960 return VINF_SUCCESS;
9961 }
9962
9963 case IEMMODE_32BIT:
9964 {
9965 IEM_MC_BEGIN(3, 2);
9966 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9967 IEM_MC_ARG(uint32_t, u32Src, 1);
9968 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9970
9971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9972 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9973 IEM_MC_ASSIGN(u32Src, u32Imm);
9974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9975 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9976 IEM_MC_FETCH_EFLAGS(EFlags);
9977 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
9978
9979 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
9980 IEM_MC_COMMIT_EFLAGS(EFlags);
9981 IEM_MC_ADVANCE_RIP();
9982 IEM_MC_END();
9983 return VINF_SUCCESS;
9984 }
9985
9986 case IEMMODE_64BIT:
9987 {
9988 IEM_MC_BEGIN(3, 2);
9989 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9990 IEM_MC_ARG(uint64_t, u64Src, 1);
9991 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9993
9994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9995 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9996 IEM_MC_ASSIGN(u64Src, u64Imm);
9997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9998 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9999 IEM_MC_FETCH_EFLAGS(EFlags);
10000 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10001
10002 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10003 IEM_MC_COMMIT_EFLAGS(EFlags);
10004 IEM_MC_ADVANCE_RIP();
10005 IEM_MC_END();
10006 return VINF_SUCCESS;
10007 }
10008
10009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10010 }
10011 }
10012}
10013
10014
10015/** Opcode 0xf6 /4, /5, /6 and /7. */
10016FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10017{
10018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10019 {
10020 /* register access */
10021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10022 IEM_MC_BEGIN(3, 1);
10023 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10024 IEM_MC_ARG(uint8_t, u8Value, 1);
10025 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10026 IEM_MC_LOCAL(int32_t, rc);
10027
10028 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10029 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10030 IEM_MC_REF_EFLAGS(pEFlags);
10031 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10032 IEM_MC_IF_LOCAL_IS_Z(rc) {
10033 IEM_MC_ADVANCE_RIP();
10034 } IEM_MC_ELSE() {
10035 IEM_MC_RAISE_DIVIDE_ERROR();
10036 } IEM_MC_ENDIF();
10037
10038 IEM_MC_END();
10039 }
10040 else
10041 {
10042 /* memory access. */
10043 IEM_MC_BEGIN(3, 2);
10044 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10045 IEM_MC_ARG(uint8_t, u8Value, 1);
10046 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10048 IEM_MC_LOCAL(int32_t, rc);
10049
10050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10052 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10053 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10054 IEM_MC_REF_EFLAGS(pEFlags);
10055 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10056 IEM_MC_IF_LOCAL_IS_Z(rc) {
10057 IEM_MC_ADVANCE_RIP();
10058 } IEM_MC_ELSE() {
10059 IEM_MC_RAISE_DIVIDE_ERROR();
10060 } IEM_MC_ENDIF();
10061
10062 IEM_MC_END();
10063 }
10064 return VINF_SUCCESS;
10065}
10066
10067
10068/** Opcode 0xf7 /4, /5, /6 and /7. */
10069FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10070{
10071 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10072
10073 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10074 {
10075 /* register access */
10076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10077 switch (pVCpu->iem.s.enmEffOpSize)
10078 {
10079 case IEMMODE_16BIT:
10080 {
10081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10082 IEM_MC_BEGIN(4, 1);
10083 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10084 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10085 IEM_MC_ARG(uint16_t, u16Value, 2);
10086 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10087 IEM_MC_LOCAL(int32_t, rc);
10088
10089 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10090 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10091 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10092 IEM_MC_REF_EFLAGS(pEFlags);
10093 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10094 IEM_MC_IF_LOCAL_IS_Z(rc) {
10095 IEM_MC_ADVANCE_RIP();
10096 } IEM_MC_ELSE() {
10097 IEM_MC_RAISE_DIVIDE_ERROR();
10098 } IEM_MC_ENDIF();
10099
10100 IEM_MC_END();
10101 return VINF_SUCCESS;
10102 }
10103
10104 case IEMMODE_32BIT:
10105 {
10106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10107 IEM_MC_BEGIN(4, 1);
10108 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10109 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10110 IEM_MC_ARG(uint32_t, u32Value, 2);
10111 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10112 IEM_MC_LOCAL(int32_t, rc);
10113
10114 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10115 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10116 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10117 IEM_MC_REF_EFLAGS(pEFlags);
10118 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10119 IEM_MC_IF_LOCAL_IS_Z(rc) {
10120 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10121 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10122 IEM_MC_ADVANCE_RIP();
10123 } IEM_MC_ELSE() {
10124 IEM_MC_RAISE_DIVIDE_ERROR();
10125 } IEM_MC_ENDIF();
10126
10127 IEM_MC_END();
10128 return VINF_SUCCESS;
10129 }
10130
10131 case IEMMODE_64BIT:
10132 {
10133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10134 IEM_MC_BEGIN(4, 1);
10135 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10136 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10137 IEM_MC_ARG(uint64_t, u64Value, 2);
10138 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10139 IEM_MC_LOCAL(int32_t, rc);
10140
10141 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10142 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10143 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10144 IEM_MC_REF_EFLAGS(pEFlags);
10145 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10146 IEM_MC_IF_LOCAL_IS_Z(rc) {
10147 IEM_MC_ADVANCE_RIP();
10148 } IEM_MC_ELSE() {
10149 IEM_MC_RAISE_DIVIDE_ERROR();
10150 } IEM_MC_ENDIF();
10151
10152 IEM_MC_END();
10153 return VINF_SUCCESS;
10154 }
10155
10156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10157 }
10158 }
10159 else
10160 {
10161 /* memory access. */
10162 switch (pVCpu->iem.s.enmEffOpSize)
10163 {
10164 case IEMMODE_16BIT:
10165 {
10166 IEM_MC_BEGIN(4, 2);
10167 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10168 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10169 IEM_MC_ARG(uint16_t, u16Value, 2);
10170 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10172 IEM_MC_LOCAL(int32_t, rc);
10173
10174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10176 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10177 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10178 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10179 IEM_MC_REF_EFLAGS(pEFlags);
10180 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10181 IEM_MC_IF_LOCAL_IS_Z(rc) {
10182 IEM_MC_ADVANCE_RIP();
10183 } IEM_MC_ELSE() {
10184 IEM_MC_RAISE_DIVIDE_ERROR();
10185 } IEM_MC_ENDIF();
10186
10187 IEM_MC_END();
10188 return VINF_SUCCESS;
10189 }
10190
10191 case IEMMODE_32BIT:
10192 {
10193 IEM_MC_BEGIN(4, 2);
10194 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10195 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10196 IEM_MC_ARG(uint32_t, u32Value, 2);
10197 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10199 IEM_MC_LOCAL(int32_t, rc);
10200
10201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10203 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10204 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10205 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10206 IEM_MC_REF_EFLAGS(pEFlags);
10207 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10208 IEM_MC_IF_LOCAL_IS_Z(rc) {
10209 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10210 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10211 IEM_MC_ADVANCE_RIP();
10212 } IEM_MC_ELSE() {
10213 IEM_MC_RAISE_DIVIDE_ERROR();
10214 } IEM_MC_ENDIF();
10215
10216 IEM_MC_END();
10217 return VINF_SUCCESS;
10218 }
10219
10220 case IEMMODE_64BIT:
10221 {
10222 IEM_MC_BEGIN(4, 2);
10223 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10224 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10225 IEM_MC_ARG(uint64_t, u64Value, 2);
10226 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10228 IEM_MC_LOCAL(int32_t, rc);
10229
10230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10232 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10233 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10234 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10235 IEM_MC_REF_EFLAGS(pEFlags);
10236 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10237 IEM_MC_IF_LOCAL_IS_Z(rc) {
10238 IEM_MC_ADVANCE_RIP();
10239 } IEM_MC_ELSE() {
10240 IEM_MC_RAISE_DIVIDE_ERROR();
10241 } IEM_MC_ENDIF();
10242
10243 IEM_MC_END();
10244 return VINF_SUCCESS;
10245 }
10246
10247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10248 }
10249 }
10250}
10251
10252/** Opcode 0xf6. */
10253FNIEMOP_DEF(iemOp_Grp3_Eb)
10254{
10255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10256 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10257 {
10258 case 0:
10259 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
10260 case 1:
10261/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10262 return IEMOP_RAISE_INVALID_OPCODE();
10263 case 2:
10264 IEMOP_MNEMONIC(not_Eb, "not Eb");
10265 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
10266 case 3:
10267 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
10268 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
10269 case 4:
10270 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
10271 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10272 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
10273 case 5:
10274 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
10275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10276 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
10277 case 6:
10278 IEMOP_MNEMONIC(div_Eb, "div Eb");
10279 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10280 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
10281 case 7:
10282 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
10283 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10284 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
10285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10286 }
10287}
10288
10289
10290/** Opcode 0xf7. */
10291FNIEMOP_DEF(iemOp_Grp3_Ev)
10292{
10293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10294 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10295 {
10296 case 0:
10297 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
10298 case 1:
10299/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10300 return IEMOP_RAISE_INVALID_OPCODE();
10301 case 2:
10302 IEMOP_MNEMONIC(not_Ev, "not Ev");
10303 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
10304 case 3:
10305 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
10306 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
10307 case 4:
10308 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
10309 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10310 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
10311 case 5:
10312 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
10313 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10314 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
10315 case 6:
10316 IEMOP_MNEMONIC(div_Ev, "div Ev");
10317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10318 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
10319 case 7:
10320 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
10321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10322 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
10323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10324 }
10325}
10326
10327
10328/** Opcode 0xf8. */
10329FNIEMOP_DEF(iemOp_clc)
10330{
10331 IEMOP_MNEMONIC(clc, "clc");
10332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10333 IEM_MC_BEGIN(0, 0);
10334 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
10335 IEM_MC_ADVANCE_RIP();
10336 IEM_MC_END();
10337 return VINF_SUCCESS;
10338}
10339
10340
10341/** Opcode 0xf9. */
10342FNIEMOP_DEF(iemOp_stc)
10343{
10344 IEMOP_MNEMONIC(stc, "stc");
10345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10346 IEM_MC_BEGIN(0, 0);
10347 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
10348 IEM_MC_ADVANCE_RIP();
10349 IEM_MC_END();
10350 return VINF_SUCCESS;
10351}
10352
10353
10354/** Opcode 0xfa. */
10355FNIEMOP_DEF(iemOp_cli)
10356{
10357 IEMOP_MNEMONIC(cli, "cli");
10358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10359 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
10360}
10361
10362
10363FNIEMOP_DEF(iemOp_sti)
10364{
10365 IEMOP_MNEMONIC(sti, "sti");
10366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10367 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
10368}
10369
10370
10371/** Opcode 0xfc. */
10372FNIEMOP_DEF(iemOp_cld)
10373{
10374 IEMOP_MNEMONIC(cld, "cld");
10375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10376 IEM_MC_BEGIN(0, 0);
10377 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
10378 IEM_MC_ADVANCE_RIP();
10379 IEM_MC_END();
10380 return VINF_SUCCESS;
10381}
10382
10383
10384/** Opcode 0xfd. */
10385FNIEMOP_DEF(iemOp_std)
10386{
10387 IEMOP_MNEMONIC(std, "std");
10388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10389 IEM_MC_BEGIN(0, 0);
10390 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
10391 IEM_MC_ADVANCE_RIP();
10392 IEM_MC_END();
10393 return VINF_SUCCESS;
10394}
10395
10396
10397/** Opcode 0xfe. */
10398FNIEMOP_DEF(iemOp_Grp4)
10399{
10400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10401 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10402 {
10403 case 0:
10404 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
10405 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
10406 case 1:
10407 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
10408 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
10409 default:
10410 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
10411 return IEMOP_RAISE_INVALID_OPCODE();
10412 }
10413}
10414
10415
10416/**
10417 * Opcode 0xff /2.
10418 * @param bRm The RM byte.
10419 */
10420FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
10421{
10422 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
10423 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10424
10425 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10426 {
10427 /* The new RIP is taken from a register. */
10428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10429 switch (pVCpu->iem.s.enmEffOpSize)
10430 {
10431 case IEMMODE_16BIT:
10432 IEM_MC_BEGIN(1, 0);
10433 IEM_MC_ARG(uint16_t, u16Target, 0);
10434 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10435 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
10436 IEM_MC_END()
10437 return VINF_SUCCESS;
10438
10439 case IEMMODE_32BIT:
10440 IEM_MC_BEGIN(1, 0);
10441 IEM_MC_ARG(uint32_t, u32Target, 0);
10442 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10443 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
10444 IEM_MC_END()
10445 return VINF_SUCCESS;
10446
10447 case IEMMODE_64BIT:
10448 IEM_MC_BEGIN(1, 0);
10449 IEM_MC_ARG(uint64_t, u64Target, 0);
10450 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10451 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
10452 IEM_MC_END()
10453 return VINF_SUCCESS;
10454
10455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10456 }
10457 }
10458 else
10459 {
10460 /* The new RIP is taken from a register. */
10461 switch (pVCpu->iem.s.enmEffOpSize)
10462 {
10463 case IEMMODE_16BIT:
10464 IEM_MC_BEGIN(1, 1);
10465 IEM_MC_ARG(uint16_t, u16Target, 0);
10466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10469 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10470 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
10471 IEM_MC_END()
10472 return VINF_SUCCESS;
10473
10474 case IEMMODE_32BIT:
10475 IEM_MC_BEGIN(1, 1);
10476 IEM_MC_ARG(uint32_t, u32Target, 0);
10477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10480 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10481 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
10482 IEM_MC_END()
10483 return VINF_SUCCESS;
10484
10485 case IEMMODE_64BIT:
10486 IEM_MC_BEGIN(1, 1);
10487 IEM_MC_ARG(uint64_t, u64Target, 0);
10488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10491 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10492 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
10493 IEM_MC_END()
10494 return VINF_SUCCESS;
10495
10496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10497 }
10498 }
10499}
10500
10501typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
10502
10503FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
10504{
10505 /* Registers? How?? */
10506 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
10507 { /* likely */ }
10508 else
10509 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
10510
10511 /* Far pointer loaded from memory. */
10512 switch (pVCpu->iem.s.enmEffOpSize)
10513 {
10514 case IEMMODE_16BIT:
10515 IEM_MC_BEGIN(3, 1);
10516 IEM_MC_ARG(uint16_t, u16Sel, 0);
10517 IEM_MC_ARG(uint16_t, offSeg, 1);
10518 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
10519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10522 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10523 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
10524 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10525 IEM_MC_END();
10526 return VINF_SUCCESS;
10527
10528 case IEMMODE_64BIT:
10529 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
10530 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
10531 * and call far qword [rsp] encodings. */
10532 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
10533 {
10534 IEM_MC_BEGIN(3, 1);
10535 IEM_MC_ARG(uint16_t, u16Sel, 0);
10536 IEM_MC_ARG(uint64_t, offSeg, 1);
10537 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
10538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10541 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10542 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
10543 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10544 IEM_MC_END();
10545 return VINF_SUCCESS;
10546 }
10547 /* AMD falls thru. */
10548 /* fall thru */
10549
10550 case IEMMODE_32BIT:
10551 IEM_MC_BEGIN(3, 1);
10552 IEM_MC_ARG(uint16_t, u16Sel, 0);
10553 IEM_MC_ARG(uint32_t, offSeg, 1);
10554 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
10555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10558 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10559 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
10560 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10561 IEM_MC_END();
10562 return VINF_SUCCESS;
10563
10564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10565 }
10566}
10567
10568
10569/**
10570 * Opcode 0xff /3.
10571 * @param bRm The RM byte.
10572 */
10573FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
10574{
10575 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
10576 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
10577}
10578
10579
10580/**
10581 * Opcode 0xff /4.
10582 * @param bRm The RM byte.
10583 */
10584FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
10585{
10586 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
10587 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10588
10589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10590 {
10591 /* The new RIP is taken from a register. */
10592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10593 switch (pVCpu->iem.s.enmEffOpSize)
10594 {
10595 case IEMMODE_16BIT:
10596 IEM_MC_BEGIN(0, 1);
10597 IEM_MC_LOCAL(uint16_t, u16Target);
10598 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10599 IEM_MC_SET_RIP_U16(u16Target);
10600 IEM_MC_END()
10601 return VINF_SUCCESS;
10602
10603 case IEMMODE_32BIT:
10604 IEM_MC_BEGIN(0, 1);
10605 IEM_MC_LOCAL(uint32_t, u32Target);
10606 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10607 IEM_MC_SET_RIP_U32(u32Target);
10608 IEM_MC_END()
10609 return VINF_SUCCESS;
10610
10611 case IEMMODE_64BIT:
10612 IEM_MC_BEGIN(0, 1);
10613 IEM_MC_LOCAL(uint64_t, u64Target);
10614 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10615 IEM_MC_SET_RIP_U64(u64Target);
10616 IEM_MC_END()
10617 return VINF_SUCCESS;
10618
10619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10620 }
10621 }
10622 else
10623 {
10624 /* The new RIP is taken from a memory location. */
10625 switch (pVCpu->iem.s.enmEffOpSize)
10626 {
10627 case IEMMODE_16BIT:
10628 IEM_MC_BEGIN(0, 2);
10629 IEM_MC_LOCAL(uint16_t, u16Target);
10630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10633 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10634 IEM_MC_SET_RIP_U16(u16Target);
10635 IEM_MC_END()
10636 return VINF_SUCCESS;
10637
10638 case IEMMODE_32BIT:
10639 IEM_MC_BEGIN(0, 2);
10640 IEM_MC_LOCAL(uint32_t, u32Target);
10641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10644 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10645 IEM_MC_SET_RIP_U32(u32Target);
10646 IEM_MC_END()
10647 return VINF_SUCCESS;
10648
10649 case IEMMODE_64BIT:
10650 IEM_MC_BEGIN(0, 2);
10651 IEM_MC_LOCAL(uint64_t, u64Target);
10652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10655 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10656 IEM_MC_SET_RIP_U64(u64Target);
10657 IEM_MC_END()
10658 return VINF_SUCCESS;
10659
10660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10661 }
10662 }
10663}
10664
10665
10666/**
10667 * Opcode 0xff /5.
10668 * @param bRm The RM byte.
10669 */
10670FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
10671{
10672 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
10673 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
10674}
10675
10676
10677/**
10678 * Opcode 0xff /6.
10679 * @param bRm The RM byte.
10680 */
10681FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
10682{
10683 IEMOP_MNEMONIC(push_Ev, "push Ev");
10684
10685 /* Registers are handled by a common worker. */
10686 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10687 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10688
10689 /* Memory we do here. */
10690 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10691 switch (pVCpu->iem.s.enmEffOpSize)
10692 {
10693 case IEMMODE_16BIT:
10694 IEM_MC_BEGIN(0, 2);
10695 IEM_MC_LOCAL(uint16_t, u16Src);
10696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10699 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10700 IEM_MC_PUSH_U16(u16Src);
10701 IEM_MC_ADVANCE_RIP();
10702 IEM_MC_END();
10703 return VINF_SUCCESS;
10704
10705 case IEMMODE_32BIT:
10706 IEM_MC_BEGIN(0, 2);
10707 IEM_MC_LOCAL(uint32_t, u32Src);
10708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10711 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10712 IEM_MC_PUSH_U32(u32Src);
10713 IEM_MC_ADVANCE_RIP();
10714 IEM_MC_END();
10715 return VINF_SUCCESS;
10716
10717 case IEMMODE_64BIT:
10718 IEM_MC_BEGIN(0, 2);
10719 IEM_MC_LOCAL(uint64_t, u64Src);
10720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10723 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10724 IEM_MC_PUSH_U64(u64Src);
10725 IEM_MC_ADVANCE_RIP();
10726 IEM_MC_END();
10727 return VINF_SUCCESS;
10728
10729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10730 }
10731}
10732
10733
10734/** Opcode 0xff. */
10735FNIEMOP_DEF(iemOp_Grp5)
10736{
10737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10738 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10739 {
10740 case 0:
10741 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
10742 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
10743 case 1:
10744 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
10745 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
10746 case 2:
10747 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
10748 case 3:
10749 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
10750 case 4:
10751 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
10752 case 5:
10753 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
10754 case 6:
10755 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
10756 case 7:
10757 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
10758 return IEMOP_RAISE_INVALID_OPCODE();
10759 }
10760 AssertFailedReturn(VERR_IEM_IPE_3);
10761}
10762
10763
10764
10765const PFNIEMOP g_apfnOneByteMap[256] =
10766{
10767 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
10768 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
10769 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
10770 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
10771 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
10772 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
10773 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
10774 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
10775 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
10776 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
10777 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
10778 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
10779 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
10780 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
10781 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
10782 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
10783 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
10784 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
10785 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
10786 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
10787 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
10788 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
10789 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
10790 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
10791 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
10792 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
10793 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
10794 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
10795 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
10796 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
10797 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
10798 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
10799 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
10800 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
10801 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
10802 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
10803 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
10804 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
10805 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
10806 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
10807 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
10808 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
10809 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
10810 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
10811 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
10812 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
10813 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
10814 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
10815 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
10816 /* 0xc4 */ iemOp_les_Gv_Mp__vex2, iemOp_lds_Gv_Mp__vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
10817 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
10818 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
10819 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
10820 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
10821 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
10822 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
10823 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
10824 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
10825 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
10826 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
10827 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
10828 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
10829 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
10830 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
10831};
10832
10833
10834/** @} */
10835
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette