VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 65761

Last change on this file since 65761 was 65761, checked in by vboxsync, 8 years ago

IEM: VEX decoding updates.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 363.3 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 65761 2017-02-13 12:15:26Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25
26/** @name One byte opcodes.
27 *
28 * @{
29 */
30
31/** Opcode 0x00. */
32FNIEMOP_DEF(iemOp_add_Eb_Gb)
33{
34 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
35 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
36}
37
38
39/** Opcode 0x01. */
40FNIEMOP_DEF(iemOp_add_Ev_Gv)
41{
42 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
43 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
44}
45
46
47/** Opcode 0x02. */
48FNIEMOP_DEF(iemOp_add_Gb_Eb)
49{
50 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
51 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
52}
53
54
55/** Opcode 0x03. */
56FNIEMOP_DEF(iemOp_add_Gv_Ev)
57{
58 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
59 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
60}
61
62
63/** Opcode 0x04. */
64FNIEMOP_DEF(iemOp_add_Al_Ib)
65{
66 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
67 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
68}
69
70
71/** Opcode 0x05. */
72FNIEMOP_DEF(iemOp_add_eAX_Iz)
73{
74 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
75 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
76}
77
78
79/** Opcode 0x06. */
80FNIEMOP_DEF(iemOp_push_ES)
81{
82 IEMOP_MNEMONIC(push_es, "push es");
83 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
84}
85
86
87/** Opcode 0x07. */
88FNIEMOP_DEF(iemOp_pop_ES)
89{
90 IEMOP_MNEMONIC(pop_es, "pop es");
91 IEMOP_HLP_NO_64BIT();
92 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
93 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
94}
95
96
97/** Opcode 0x08. */
98FNIEMOP_DEF(iemOp_or_Eb_Gb)
99{
100 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
101 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
102 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
103}
104
105
106/** Opcode 0x09. */
107FNIEMOP_DEF(iemOp_or_Ev_Gv)
108{
109 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
110 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
111 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
112}
113
114
115/** Opcode 0x0a. */
116FNIEMOP_DEF(iemOp_or_Gb_Eb)
117{
118 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
119 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
120 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
121}
122
123
124/** Opcode 0x0b. */
125FNIEMOP_DEF(iemOp_or_Gv_Ev)
126{
127 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
128 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
129 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
130}
131
132
133/** Opcode 0x0c. */
134FNIEMOP_DEF(iemOp_or_Al_Ib)
135{
136 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
137 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
138 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
139}
140
141
142/** Opcode 0x0d. */
143FNIEMOP_DEF(iemOp_or_eAX_Iz)
144{
145 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
146 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
147 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
148}
149
150
151/** Opcode 0x0e. */
152FNIEMOP_DEF(iemOp_push_CS)
153{
154 IEMOP_MNEMONIC(push_cs, "push cs");
155 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
156}
157
158
159/** Opcode 0x0f. */
160FNIEMOP_DEF(iemOp_2byteEscape)
161{
162#ifdef VBOX_STRICT
163 static bool s_fTested = false;
164 if (RT_LIKELY(s_fTested)) { /* likely */ }
165 else
166 {
167 s_fTested = true;
168 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
169 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
170 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
171 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
172 }
173#endif
174
175 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
176
177 /** @todo PUSH CS on 8086, undefined on 80186. */
178 IEMOP_HLP_MIN_286();
179 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
180}
181
182/** Opcode 0x10. */
183FNIEMOP_DEF(iemOp_adc_Eb_Gb)
184{
185 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
186 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
187}
188
189
190/** Opcode 0x11. */
191FNIEMOP_DEF(iemOp_adc_Ev_Gv)
192{
193 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
194 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
195}
196
197
198/** Opcode 0x12. */
199FNIEMOP_DEF(iemOp_adc_Gb_Eb)
200{
201 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
202 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
203}
204
205
206/** Opcode 0x13. */
207FNIEMOP_DEF(iemOp_adc_Gv_Ev)
208{
209 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
210 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
211}
212
213
214/** Opcode 0x14. */
215FNIEMOP_DEF(iemOp_adc_Al_Ib)
216{
217 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
218 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
219}
220
221
222/** Opcode 0x15. */
223FNIEMOP_DEF(iemOp_adc_eAX_Iz)
224{
225 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
226 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
227}
228
229
230/** Opcode 0x16. */
231FNIEMOP_DEF(iemOp_push_SS)
232{
233 IEMOP_MNEMONIC(push_ss, "push ss");
234 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
235}
236
237
238/** Opcode 0x17. */
239FNIEMOP_DEF(iemOp_pop_SS)
240{
241 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
243 IEMOP_HLP_NO_64BIT();
244 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
245}
246
247
248/** Opcode 0x18. */
249FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
250{
251 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
252 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
253}
254
255
256/** Opcode 0x19. */
257FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
258{
259 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
260 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
261}
262
263
264/** Opcode 0x1a. */
265FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
266{
267 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
268 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
269}
270
271
272/** Opcode 0x1b. */
273FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
274{
275 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
277}
278
279
280/** Opcode 0x1c. */
281FNIEMOP_DEF(iemOp_sbb_Al_Ib)
282{
283 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
284 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
285}
286
287
288/** Opcode 0x1d. */
289FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
290{
291 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
292 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
293}
294
295
296/** Opcode 0x1e. */
297FNIEMOP_DEF(iemOp_push_DS)
298{
299 IEMOP_MNEMONIC(push_ds, "push ds");
300 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
301}
302
303
304/** Opcode 0x1f. */
305FNIEMOP_DEF(iemOp_pop_DS)
306{
307 IEMOP_MNEMONIC(pop_ds, "pop ds");
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
309 IEMOP_HLP_NO_64BIT();
310 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
311}
312
313
314/** Opcode 0x20. */
315FNIEMOP_DEF(iemOp_and_Eb_Gb)
316{
317 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
318 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
319 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
320}
321
322
323/** Opcode 0x21. */
324FNIEMOP_DEF(iemOp_and_Ev_Gv)
325{
326 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
327 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
328 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
329}
330
331
332/** Opcode 0x22. */
333FNIEMOP_DEF(iemOp_and_Gb_Eb)
334{
335 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
336 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
337 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
338}
339
340
341/** Opcode 0x23. */
342FNIEMOP_DEF(iemOp_and_Gv_Ev)
343{
344 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
345 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
346 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
347}
348
349
350/** Opcode 0x24. */
351FNIEMOP_DEF(iemOp_and_Al_Ib)
352{
353 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
354 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
355 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
356}
357
358
359/** Opcode 0x25. */
360FNIEMOP_DEF(iemOp_and_eAX_Iz)
361{
362 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
363 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
364 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
365}
366
367
368/** Opcode 0x26. */
369FNIEMOP_DEF(iemOp_seg_ES)
370{
371 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
372 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
373 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
374
375 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
376 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
377}
378
379
380/** Opcode 0x27. */
381FNIEMOP_DEF(iemOp_daa)
382{
383 IEMOP_MNEMONIC(daa_AL, "daa AL");
384 IEMOP_HLP_NO_64BIT();
385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
386 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
387 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
388}
389
390
391/** Opcode 0x28. */
392FNIEMOP_DEF(iemOp_sub_Eb_Gb)
393{
394 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
395 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
396}
397
398
399/** Opcode 0x29. */
400FNIEMOP_DEF(iemOp_sub_Ev_Gv)
401{
402 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
403 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
404}
405
406
407/** Opcode 0x2a. */
408FNIEMOP_DEF(iemOp_sub_Gb_Eb)
409{
410 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
411 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
412}
413
414
415/** Opcode 0x2b. */
416FNIEMOP_DEF(iemOp_sub_Gv_Ev)
417{
418 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
419 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
420}
421
422
423/** Opcode 0x2c. */
424FNIEMOP_DEF(iemOp_sub_Al_Ib)
425{
426 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
427 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
428}
429
430
431/** Opcode 0x2d. */
432FNIEMOP_DEF(iemOp_sub_eAX_Iz)
433{
434 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
435 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
436}
437
438
439/** Opcode 0x2e. */
440FNIEMOP_DEF(iemOp_seg_CS)
441{
442 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
443 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
444 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
445
446 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
447 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
448}
449
450
451/** Opcode 0x2f. */
452FNIEMOP_DEF(iemOp_das)
453{
454 IEMOP_MNEMONIC(das_AL, "das AL");
455 IEMOP_HLP_NO_64BIT();
456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
457 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
458 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
459}
460
461
462/** Opcode 0x30. */
463FNIEMOP_DEF(iemOp_xor_Eb_Gb)
464{
465 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
466 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
467 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
468}
469
470
471/** Opcode 0x31. */
472FNIEMOP_DEF(iemOp_xor_Ev_Gv)
473{
474 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
475 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
476 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
477}
478
479
480/** Opcode 0x32. */
481FNIEMOP_DEF(iemOp_xor_Gb_Eb)
482{
483 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
485 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
486}
487
488
489/** Opcode 0x33. */
490FNIEMOP_DEF(iemOp_xor_Gv_Ev)
491{
492 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
493 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
494 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
495}
496
497
498/** Opcode 0x34. */
499FNIEMOP_DEF(iemOp_xor_Al_Ib)
500{
501 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
502 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
503 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
504}
505
506
507/** Opcode 0x35. */
508FNIEMOP_DEF(iemOp_xor_eAX_Iz)
509{
510 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
511 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
512 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
513}
514
515
516/** Opcode 0x36. */
517FNIEMOP_DEF(iemOp_seg_SS)
518{
519 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
520 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
521 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
522
523 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
524 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
525}
526
527
528/** Opcode 0x37. */
529FNIEMOP_STUB(iemOp_aaa);
530
531
532/** Opcode 0x38. */
533FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
534{
535 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
537}
538
539
540/** Opcode 0x39. */
541FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
542{
543 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
544 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
545}
546
547
548/** Opcode 0x3a. */
549FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
550{
551 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
552 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
553}
554
555
556/** Opcode 0x3b. */
557FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
558{
559 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
560 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
561}
562
563
564/** Opcode 0x3c. */
565FNIEMOP_DEF(iemOp_cmp_Al_Ib)
566{
567 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
568 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
569}
570
571
572/** Opcode 0x3d. */
573FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
574{
575 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
577}
578
579
580/** Opcode 0x3e. */
581FNIEMOP_DEF(iemOp_seg_DS)
582{
583 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
584 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
585 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
586
587 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
588 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
589}
590
591
592/** Opcode 0x3f. */
593FNIEMOP_STUB(iemOp_aas);
594
595/**
596 * Common 'inc/dec/not/neg register' helper.
597 */
598FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
599{
600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
601 switch (pVCpu->iem.s.enmEffOpSize)
602 {
603 case IEMMODE_16BIT:
604 IEM_MC_BEGIN(2, 0);
605 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
606 IEM_MC_ARG(uint32_t *, pEFlags, 1);
607 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
608 IEM_MC_REF_EFLAGS(pEFlags);
609 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 return VINF_SUCCESS;
613
614 case IEMMODE_32BIT:
615 IEM_MC_BEGIN(2, 0);
616 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
617 IEM_MC_ARG(uint32_t *, pEFlags, 1);
618 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
619 IEM_MC_REF_EFLAGS(pEFlags);
620 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
621 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
622 IEM_MC_ADVANCE_RIP();
623 IEM_MC_END();
624 return VINF_SUCCESS;
625
626 case IEMMODE_64BIT:
627 IEM_MC_BEGIN(2, 0);
628 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
629 IEM_MC_ARG(uint32_t *, pEFlags, 1);
630 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
631 IEM_MC_REF_EFLAGS(pEFlags);
632 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
633 IEM_MC_ADVANCE_RIP();
634 IEM_MC_END();
635 return VINF_SUCCESS;
636 }
637 return VINF_SUCCESS;
638}
639
640
641/** Opcode 0x40. */
642FNIEMOP_DEF(iemOp_inc_eAX)
643{
644 /*
645 * This is a REX prefix in 64-bit mode.
646 */
647 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
648 {
649 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
650 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
651
652 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
653 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
654 }
655
656 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
657 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
658}
659
660
661/** Opcode 0x41. */
662FNIEMOP_DEF(iemOp_inc_eCX)
663{
664 /*
665 * This is a REX prefix in 64-bit mode.
666 */
667 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
668 {
669 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
670 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
671 pVCpu->iem.s.uRexB = 1 << 3;
672
673 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
674 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
675 }
676
677 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
678 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
679}
680
681
682/** Opcode 0x42. */
683FNIEMOP_DEF(iemOp_inc_eDX)
684{
685 /*
686 * This is a REX prefix in 64-bit mode.
687 */
688 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
689 {
690 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
691 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
692 pVCpu->iem.s.uRexIndex = 1 << 3;
693
694 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
695 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
696 }
697
698 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
699 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
700}
701
702
703
704/** Opcode 0x43. */
705FNIEMOP_DEF(iemOp_inc_eBX)
706{
707 /*
708 * This is a REX prefix in 64-bit mode.
709 */
710 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
711 {
712 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
713 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
714 pVCpu->iem.s.uRexB = 1 << 3;
715 pVCpu->iem.s.uRexIndex = 1 << 3;
716
717 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
718 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
719 }
720
721 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
722 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
723}
724
725
726/** Opcode 0x44. */
727FNIEMOP_DEF(iemOp_inc_eSP)
728{
729 /*
730 * This is a REX prefix in 64-bit mode.
731 */
732 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
733 {
734 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
735 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
736 pVCpu->iem.s.uRexReg = 1 << 3;
737
738 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
739 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
740 }
741
742 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
743 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
744}
745
746
747/** Opcode 0x45. */
748FNIEMOP_DEF(iemOp_inc_eBP)
749{
750 /*
751 * This is a REX prefix in 64-bit mode.
752 */
753 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
754 {
755 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
756 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
757 pVCpu->iem.s.uRexReg = 1 << 3;
758 pVCpu->iem.s.uRexB = 1 << 3;
759
760 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
761 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
762 }
763
764 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
765 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
766}
767
768
769/** Opcode 0x46. */
770FNIEMOP_DEF(iemOp_inc_eSI)
771{
772 /*
773 * This is a REX prefix in 64-bit mode.
774 */
775 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
776 {
777 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
778 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
779 pVCpu->iem.s.uRexReg = 1 << 3;
780 pVCpu->iem.s.uRexIndex = 1 << 3;
781
782 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
783 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
784 }
785
786 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
787 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
788}
789
790
791/** Opcode 0x47. */
792FNIEMOP_DEF(iemOp_inc_eDI)
793{
794 /*
795 * This is a REX prefix in 64-bit mode.
796 */
797 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
798 {
799 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
800 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
801 pVCpu->iem.s.uRexReg = 1 << 3;
802 pVCpu->iem.s.uRexB = 1 << 3;
803 pVCpu->iem.s.uRexIndex = 1 << 3;
804
805 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
806 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
807 }
808
809 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
810 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
811}
812
813
814/** Opcode 0x48. */
815FNIEMOP_DEF(iemOp_dec_eAX)
816{
817 /*
818 * This is a REX prefix in 64-bit mode.
819 */
820 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
821 {
822 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
823 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
824 iemRecalEffOpSize(pVCpu);
825
826 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
827 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
828 }
829
830 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
831 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
832}
833
834
835/** Opcode 0x49. */
836FNIEMOP_DEF(iemOp_dec_eCX)
837{
838 /*
839 * This is a REX prefix in 64-bit mode.
840 */
841 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
842 {
843 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
844 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
845 pVCpu->iem.s.uRexB = 1 << 3;
846 iemRecalEffOpSize(pVCpu);
847
848 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
849 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
850 }
851
852 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
853 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
854}
855
856
857/** Opcode 0x4a. */
858FNIEMOP_DEF(iemOp_dec_eDX)
859{
860 /*
861 * This is a REX prefix in 64-bit mode.
862 */
863 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
864 {
865 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
866 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
867 pVCpu->iem.s.uRexIndex = 1 << 3;
868 iemRecalEffOpSize(pVCpu);
869
870 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
871 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
872 }
873
874 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
875 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
876}
877
878
879/** Opcode 0x4b. */
880FNIEMOP_DEF(iemOp_dec_eBX)
881{
882 /*
883 * This is a REX prefix in 64-bit mode.
884 */
885 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
886 {
887 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
888 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
889 pVCpu->iem.s.uRexB = 1 << 3;
890 pVCpu->iem.s.uRexIndex = 1 << 3;
891 iemRecalEffOpSize(pVCpu);
892
893 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
894 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
895 }
896
897 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
898 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
899}
900
901
902/** Opcode 0x4c. */
903FNIEMOP_DEF(iemOp_dec_eSP)
904{
905 /*
906 * This is a REX prefix in 64-bit mode.
907 */
908 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
909 {
910 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
911 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
912 pVCpu->iem.s.uRexReg = 1 << 3;
913 iemRecalEffOpSize(pVCpu);
914
915 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
916 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
917 }
918
919 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
920 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
921}
922
923
924/** Opcode 0x4d. */
925FNIEMOP_DEF(iemOp_dec_eBP)
926{
927 /*
928 * This is a REX prefix in 64-bit mode.
929 */
930 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
931 {
932 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
933 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
934 pVCpu->iem.s.uRexReg = 1 << 3;
935 pVCpu->iem.s.uRexB = 1 << 3;
936 iemRecalEffOpSize(pVCpu);
937
938 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
939 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
940 }
941
942 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
943 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
944}
945
946
947/** Opcode 0x4e. */
948FNIEMOP_DEF(iemOp_dec_eSI)
949{
950 /*
951 * This is a REX prefix in 64-bit mode.
952 */
953 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
954 {
955 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
956 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
957 pVCpu->iem.s.uRexReg = 1 << 3;
958 pVCpu->iem.s.uRexIndex = 1 << 3;
959 iemRecalEffOpSize(pVCpu);
960
961 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
962 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
963 }
964
965 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
966 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
967}
968
969
970/** Opcode 0x4f. */
971FNIEMOP_DEF(iemOp_dec_eDI)
972{
973 /*
974 * This is a REX prefix in 64-bit mode.
975 */
976 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
977 {
978 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
979 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
980 pVCpu->iem.s.uRexReg = 1 << 3;
981 pVCpu->iem.s.uRexB = 1 << 3;
982 pVCpu->iem.s.uRexIndex = 1 << 3;
983 iemRecalEffOpSize(pVCpu);
984
985 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
986 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
987 }
988
989 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
990 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
991}
992
993
994/**
995 * Common 'push register' helper.
996 */
997FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
998{
999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1000 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1001 {
1002 iReg |= pVCpu->iem.s.uRexB;
1003 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1004 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1005 }
1006
1007 switch (pVCpu->iem.s.enmEffOpSize)
1008 {
1009 case IEMMODE_16BIT:
1010 IEM_MC_BEGIN(0, 1);
1011 IEM_MC_LOCAL(uint16_t, u16Value);
1012 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1013 IEM_MC_PUSH_U16(u16Value);
1014 IEM_MC_ADVANCE_RIP();
1015 IEM_MC_END();
1016 break;
1017
1018 case IEMMODE_32BIT:
1019 IEM_MC_BEGIN(0, 1);
1020 IEM_MC_LOCAL(uint32_t, u32Value);
1021 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1022 IEM_MC_PUSH_U32(u32Value);
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 break;
1026
1027 case IEMMODE_64BIT:
1028 IEM_MC_BEGIN(0, 1);
1029 IEM_MC_LOCAL(uint64_t, u64Value);
1030 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1031 IEM_MC_PUSH_U64(u64Value);
1032 IEM_MC_ADVANCE_RIP();
1033 IEM_MC_END();
1034 break;
1035 }
1036
1037 return VINF_SUCCESS;
1038}
1039
1040
1041/** Opcode 0x50. */
1042FNIEMOP_DEF(iemOp_push_eAX)
1043{
1044 IEMOP_MNEMONIC(push_rAX, "push rAX");
1045 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1046}
1047
1048
1049/** Opcode 0x51. */
1050FNIEMOP_DEF(iemOp_push_eCX)
1051{
1052 IEMOP_MNEMONIC(push_rCX, "push rCX");
1053 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1054}
1055
1056
1057/** Opcode 0x52. */
1058FNIEMOP_DEF(iemOp_push_eDX)
1059{
1060 IEMOP_MNEMONIC(push_rDX, "push rDX");
1061 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1062}
1063
1064
1065/** Opcode 0x53. */
1066FNIEMOP_DEF(iemOp_push_eBX)
1067{
1068 IEMOP_MNEMONIC(push_rBX, "push rBX");
1069 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1070}
1071
1072
1073/** Opcode 0x54. */
1074FNIEMOP_DEF(iemOp_push_eSP)
1075{
1076 IEMOP_MNEMONIC(push_rSP, "push rSP");
1077 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1078 {
1079 IEM_MC_BEGIN(0, 1);
1080 IEM_MC_LOCAL(uint16_t, u16Value);
1081 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1082 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1083 IEM_MC_PUSH_U16(u16Value);
1084 IEM_MC_ADVANCE_RIP();
1085 IEM_MC_END();
1086 }
1087 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1088}
1089
1090
1091/** Opcode 0x55. */
1092FNIEMOP_DEF(iemOp_push_eBP)
1093{
1094 IEMOP_MNEMONIC(push_rBP, "push rBP");
1095 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1096}
1097
1098
1099/** Opcode 0x56. */
1100FNIEMOP_DEF(iemOp_push_eSI)
1101{
1102 IEMOP_MNEMONIC(push_rSI, "push rSI");
1103 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1104}
1105
1106
1107/** Opcode 0x57. */
1108FNIEMOP_DEF(iemOp_push_eDI)
1109{
1110 IEMOP_MNEMONIC(push_rDI, "push rDI");
1111 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1112}
1113
1114
1115/**
1116 * Common 'pop register' helper.
1117 */
1118FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1119{
1120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1121 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1122 {
1123 iReg |= pVCpu->iem.s.uRexB;
1124 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1125 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1126 }
1127
1128 switch (pVCpu->iem.s.enmEffOpSize)
1129 {
1130 case IEMMODE_16BIT:
1131 IEM_MC_BEGIN(0, 1);
1132 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1133 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1134 IEM_MC_POP_U16(pu16Dst);
1135 IEM_MC_ADVANCE_RIP();
1136 IEM_MC_END();
1137 break;
1138
1139 case IEMMODE_32BIT:
1140 IEM_MC_BEGIN(0, 1);
1141 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1142 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1143 IEM_MC_POP_U32(pu32Dst);
1144 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1145 IEM_MC_ADVANCE_RIP();
1146 IEM_MC_END();
1147 break;
1148
1149 case IEMMODE_64BIT:
1150 IEM_MC_BEGIN(0, 1);
1151 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1152 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1153 IEM_MC_POP_U64(pu64Dst);
1154 IEM_MC_ADVANCE_RIP();
1155 IEM_MC_END();
1156 break;
1157 }
1158
1159 return VINF_SUCCESS;
1160}
1161
1162
1163/** Opcode 0x58. */
1164FNIEMOP_DEF(iemOp_pop_eAX)
1165{
1166 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1167 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1168}
1169
1170
1171/** Opcode 0x59. */
1172FNIEMOP_DEF(iemOp_pop_eCX)
1173{
1174 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1175 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1176}
1177
1178
1179/** Opcode 0x5a. */
1180FNIEMOP_DEF(iemOp_pop_eDX)
1181{
1182 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1183 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1184}
1185
1186
1187/** Opcode 0x5b. */
1188FNIEMOP_DEF(iemOp_pop_eBX)
1189{
1190 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1191 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1192}
1193
1194
1195/** Opcode 0x5c. */
1196FNIEMOP_DEF(iemOp_pop_eSP)
1197{
1198 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1199 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1200 {
1201 if (pVCpu->iem.s.uRexB)
1202 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1203 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1204 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1205 }
1206
1207 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1208 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1209 /** @todo add testcase for this instruction. */
1210 switch (pVCpu->iem.s.enmEffOpSize)
1211 {
1212 case IEMMODE_16BIT:
1213 IEM_MC_BEGIN(0, 1);
1214 IEM_MC_LOCAL(uint16_t, u16Dst);
1215 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1216 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1217 IEM_MC_ADVANCE_RIP();
1218 IEM_MC_END();
1219 break;
1220
1221 case IEMMODE_32BIT:
1222 IEM_MC_BEGIN(0, 1);
1223 IEM_MC_LOCAL(uint32_t, u32Dst);
1224 IEM_MC_POP_U32(&u32Dst);
1225 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1226 IEM_MC_ADVANCE_RIP();
1227 IEM_MC_END();
1228 break;
1229
1230 case IEMMODE_64BIT:
1231 IEM_MC_BEGIN(0, 1);
1232 IEM_MC_LOCAL(uint64_t, u64Dst);
1233 IEM_MC_POP_U64(&u64Dst);
1234 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1235 IEM_MC_ADVANCE_RIP();
1236 IEM_MC_END();
1237 break;
1238 }
1239
1240 return VINF_SUCCESS;
1241}
1242
1243
1244/** Opcode 0x5d. */
1245FNIEMOP_DEF(iemOp_pop_eBP)
1246{
1247 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1248 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1249}
1250
1251
1252/** Opcode 0x5e. */
1253FNIEMOP_DEF(iemOp_pop_eSI)
1254{
1255 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1256 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1257}
1258
1259
1260/** Opcode 0x5f. */
1261FNIEMOP_DEF(iemOp_pop_eDI)
1262{
1263 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1264 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1265}
1266
1267
1268/** Opcode 0x60. */
1269FNIEMOP_DEF(iemOp_pusha)
1270{
1271 IEMOP_MNEMONIC(pusha, "pusha");
1272 IEMOP_HLP_MIN_186();
1273 IEMOP_HLP_NO_64BIT();
1274 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1275 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1276 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1277 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1278}
1279
1280
1281/** Opcode 0x61. */
1282FNIEMOP_DEF(iemOp_popa)
1283{
1284 IEMOP_MNEMONIC(popa, "popa");
1285 IEMOP_HLP_MIN_186();
1286 IEMOP_HLP_NO_64BIT();
1287 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1288 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1289 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1290 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1291}
1292
1293
1294/** Opcode 0x62. */
1295FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
1296// IEMOP_HLP_MIN_186();
1297
1298
1299/** Opcode 0x63 - non-64-bit modes. */
1300FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1301{
1302 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1303 IEMOP_HLP_MIN_286();
1304 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1306
1307 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1308 {
1309 /* Register */
1310 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1311 IEM_MC_BEGIN(3, 0);
1312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1313 IEM_MC_ARG(uint16_t, u16Src, 1);
1314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1315
1316 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1317 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1318 IEM_MC_REF_EFLAGS(pEFlags);
1319 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1320
1321 IEM_MC_ADVANCE_RIP();
1322 IEM_MC_END();
1323 }
1324 else
1325 {
1326 /* Memory */
1327 IEM_MC_BEGIN(3, 2);
1328 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1329 IEM_MC_ARG(uint16_t, u16Src, 1);
1330 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1332
1333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1334 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1335 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1336 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1337 IEM_MC_FETCH_EFLAGS(EFlags);
1338 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1339
1340 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
1341 IEM_MC_COMMIT_EFLAGS(EFlags);
1342 IEM_MC_ADVANCE_RIP();
1343 IEM_MC_END();
1344 }
1345 return VINF_SUCCESS;
1346
1347}
1348
1349
1350/** Opcode 0x63.
1351 * @note This is a weird one. It works like a regular move instruction if
1352 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
1353 * @todo This definitely needs a testcase to verify the odd cases. */
1354FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
1355{
1356 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
1357
1358 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
1359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1360
1361 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1362 {
1363 /*
1364 * Register to register.
1365 */
1366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1367 IEM_MC_BEGIN(0, 1);
1368 IEM_MC_LOCAL(uint64_t, u64Value);
1369 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1370 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1371 IEM_MC_ADVANCE_RIP();
1372 IEM_MC_END();
1373 }
1374 else
1375 {
1376 /*
1377 * We're loading a register from memory.
1378 */
1379 IEM_MC_BEGIN(0, 2);
1380 IEM_MC_LOCAL(uint64_t, u64Value);
1381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1384 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1385 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 return VINF_SUCCESS;
1390}
1391
1392
1393/** Opcode 0x64. */
1394FNIEMOP_DEF(iemOp_seg_FS)
1395{
1396 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
1397 IEMOP_HLP_MIN_386();
1398
1399 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
1400 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
1401
1402 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1403 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1404}
1405
1406
1407/** Opcode 0x65. */
1408FNIEMOP_DEF(iemOp_seg_GS)
1409{
1410 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
1411 IEMOP_HLP_MIN_386();
1412
1413 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
1414 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
1415
1416 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1417 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1418}
1419
1420
1421/** Opcode 0x66. */
1422FNIEMOP_DEF(iemOp_op_size)
1423{
1424 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
1425 IEMOP_HLP_MIN_386();
1426
1427 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
1428 iemRecalEffOpSize(pVCpu);
1429
1430 /* For the 4 entry opcode tables, the operand prefix doesn't not count
1431 when REPZ or REPNZ are present. */
1432 if (pVCpu->iem.s.idxPrefix == 0)
1433 pVCpu->iem.s.idxPrefix = 1;
1434
1435 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1436 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1437}
1438
1439
1440/** Opcode 0x67. */
1441FNIEMOP_DEF(iemOp_addr_size)
1442{
1443 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
1444 IEMOP_HLP_MIN_386();
1445
1446 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
1447 switch (pVCpu->iem.s.enmDefAddrMode)
1448 {
1449 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1450 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
1451 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1452 default: AssertFailed();
1453 }
1454
1455 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1456 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1457}
1458
1459
1460/** Opcode 0x68. */
1461FNIEMOP_DEF(iemOp_push_Iz)
1462{
1463 IEMOP_MNEMONIC(push_Iz, "push Iz");
1464 IEMOP_HLP_MIN_186();
1465 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1466 switch (pVCpu->iem.s.enmEffOpSize)
1467 {
1468 case IEMMODE_16BIT:
1469 {
1470 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1472 IEM_MC_BEGIN(0,0);
1473 IEM_MC_PUSH_U16(u16Imm);
1474 IEM_MC_ADVANCE_RIP();
1475 IEM_MC_END();
1476 return VINF_SUCCESS;
1477 }
1478
1479 case IEMMODE_32BIT:
1480 {
1481 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1483 IEM_MC_BEGIN(0,0);
1484 IEM_MC_PUSH_U32(u32Imm);
1485 IEM_MC_ADVANCE_RIP();
1486 IEM_MC_END();
1487 return VINF_SUCCESS;
1488 }
1489
1490 case IEMMODE_64BIT:
1491 {
1492 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1494 IEM_MC_BEGIN(0,0);
1495 IEM_MC_PUSH_U64(u64Imm);
1496 IEM_MC_ADVANCE_RIP();
1497 IEM_MC_END();
1498 return VINF_SUCCESS;
1499 }
1500
1501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1502 }
1503}
1504
1505
1506/** Opcode 0x69. */
1507FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
1508{
1509 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
1510 IEMOP_HLP_MIN_186();
1511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1512 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1513
1514 switch (pVCpu->iem.s.enmEffOpSize)
1515 {
1516 case IEMMODE_16BIT:
1517 {
1518 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1519 {
1520 /* register operand */
1521 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1523
1524 IEM_MC_BEGIN(3, 1);
1525 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1526 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
1527 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1528 IEM_MC_LOCAL(uint16_t, u16Tmp);
1529
1530 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1531 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1532 IEM_MC_REF_EFLAGS(pEFlags);
1533 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1534 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1535
1536 IEM_MC_ADVANCE_RIP();
1537 IEM_MC_END();
1538 }
1539 else
1540 {
1541 /* memory operand */
1542 IEM_MC_BEGIN(3, 2);
1543 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1544 IEM_MC_ARG(uint16_t, u16Src, 1);
1545 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1546 IEM_MC_LOCAL(uint16_t, u16Tmp);
1547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1548
1549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
1550 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1551 IEM_MC_ASSIGN(u16Src, u16Imm);
1552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1553 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1554 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1555 IEM_MC_REF_EFLAGS(pEFlags);
1556 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1557 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1558
1559 IEM_MC_ADVANCE_RIP();
1560 IEM_MC_END();
1561 }
1562 return VINF_SUCCESS;
1563 }
1564
1565 case IEMMODE_32BIT:
1566 {
1567 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1568 {
1569 /* register operand */
1570 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1572
1573 IEM_MC_BEGIN(3, 1);
1574 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1575 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
1576 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1577 IEM_MC_LOCAL(uint32_t, u32Tmp);
1578
1579 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1580 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1581 IEM_MC_REF_EFLAGS(pEFlags);
1582 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1583 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1584
1585 IEM_MC_ADVANCE_RIP();
1586 IEM_MC_END();
1587 }
1588 else
1589 {
1590 /* memory operand */
1591 IEM_MC_BEGIN(3, 2);
1592 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1593 IEM_MC_ARG(uint32_t, u32Src, 1);
1594 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1595 IEM_MC_LOCAL(uint32_t, u32Tmp);
1596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1597
1598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
1599 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1600 IEM_MC_ASSIGN(u32Src, u32Imm);
1601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1602 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1603 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1604 IEM_MC_REF_EFLAGS(pEFlags);
1605 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1606 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1607
1608 IEM_MC_ADVANCE_RIP();
1609 IEM_MC_END();
1610 }
1611 return VINF_SUCCESS;
1612 }
1613
1614 case IEMMODE_64BIT:
1615 {
1616 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1617 {
1618 /* register operand */
1619 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1621
1622 IEM_MC_BEGIN(3, 1);
1623 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1624 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
1625 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1626 IEM_MC_LOCAL(uint64_t, u64Tmp);
1627
1628 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1629 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1630 IEM_MC_REF_EFLAGS(pEFlags);
1631 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1632 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1633
1634 IEM_MC_ADVANCE_RIP();
1635 IEM_MC_END();
1636 }
1637 else
1638 {
1639 /* memory operand */
1640 IEM_MC_BEGIN(3, 2);
1641 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1642 IEM_MC_ARG(uint64_t, u64Src, 1);
1643 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1644 IEM_MC_LOCAL(uint64_t, u64Tmp);
1645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1646
1647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
1648 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1649 IEM_MC_ASSIGN(u64Src, u64Imm);
1650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1651 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1652 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1653 IEM_MC_REF_EFLAGS(pEFlags);
1654 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1655 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1656
1657 IEM_MC_ADVANCE_RIP();
1658 IEM_MC_END();
1659 }
1660 return VINF_SUCCESS;
1661 }
1662 }
1663 AssertFailedReturn(VERR_IEM_IPE_9);
1664}
1665
1666
1667/** Opcode 0x6a. */
1668FNIEMOP_DEF(iemOp_push_Ib)
1669{
1670 IEMOP_MNEMONIC(push_Ib, "push Ib");
1671 IEMOP_HLP_MIN_186();
1672 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
1673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1675
1676 IEM_MC_BEGIN(0,0);
1677 switch (pVCpu->iem.s.enmEffOpSize)
1678 {
1679 case IEMMODE_16BIT:
1680 IEM_MC_PUSH_U16(i8Imm);
1681 break;
1682 case IEMMODE_32BIT:
1683 IEM_MC_PUSH_U32(i8Imm);
1684 break;
1685 case IEMMODE_64BIT:
1686 IEM_MC_PUSH_U64(i8Imm);
1687 break;
1688 }
1689 IEM_MC_ADVANCE_RIP();
1690 IEM_MC_END();
1691 return VINF_SUCCESS;
1692}
1693
1694
1695/** Opcode 0x6b. */
1696FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
1697{
1698 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
1699 IEMOP_HLP_MIN_186();
1700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1701 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1702
1703 switch (pVCpu->iem.s.enmEffOpSize)
1704 {
1705 case IEMMODE_16BIT:
1706 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1707 {
1708 /* register operand */
1709 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1711
1712 IEM_MC_BEGIN(3, 1);
1713 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1714 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
1715 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1716 IEM_MC_LOCAL(uint16_t, u16Tmp);
1717
1718 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1719 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1720 IEM_MC_REF_EFLAGS(pEFlags);
1721 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1722 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1723
1724 IEM_MC_ADVANCE_RIP();
1725 IEM_MC_END();
1726 }
1727 else
1728 {
1729 /* memory operand */
1730 IEM_MC_BEGIN(3, 2);
1731 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1732 IEM_MC_ARG(uint16_t, u16Src, 1);
1733 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1734 IEM_MC_LOCAL(uint16_t, u16Tmp);
1735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1736
1737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1738 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
1739 IEM_MC_ASSIGN(u16Src, u16Imm);
1740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1741 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1742 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1743 IEM_MC_REF_EFLAGS(pEFlags);
1744 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1745 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1746
1747 IEM_MC_ADVANCE_RIP();
1748 IEM_MC_END();
1749 }
1750 return VINF_SUCCESS;
1751
1752 case IEMMODE_32BIT:
1753 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1754 {
1755 /* register operand */
1756 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1758
1759 IEM_MC_BEGIN(3, 1);
1760 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1761 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
1762 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1763 IEM_MC_LOCAL(uint32_t, u32Tmp);
1764
1765 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1766 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1767 IEM_MC_REF_EFLAGS(pEFlags);
1768 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1769 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1770
1771 IEM_MC_ADVANCE_RIP();
1772 IEM_MC_END();
1773 }
1774 else
1775 {
1776 /* memory operand */
1777 IEM_MC_BEGIN(3, 2);
1778 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1779 IEM_MC_ARG(uint32_t, u32Src, 1);
1780 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1781 IEM_MC_LOCAL(uint32_t, u32Tmp);
1782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1783
1784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1785 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
1786 IEM_MC_ASSIGN(u32Src, u32Imm);
1787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1788 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1789 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1790 IEM_MC_REF_EFLAGS(pEFlags);
1791 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1792 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1793
1794 IEM_MC_ADVANCE_RIP();
1795 IEM_MC_END();
1796 }
1797 return VINF_SUCCESS;
1798
1799 case IEMMODE_64BIT:
1800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1801 {
1802 /* register operand */
1803 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1805
1806 IEM_MC_BEGIN(3, 1);
1807 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1808 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
1809 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1810 IEM_MC_LOCAL(uint64_t, u64Tmp);
1811
1812 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1813 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1814 IEM_MC_REF_EFLAGS(pEFlags);
1815 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1816 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1817
1818 IEM_MC_ADVANCE_RIP();
1819 IEM_MC_END();
1820 }
1821 else
1822 {
1823 /* memory operand */
1824 IEM_MC_BEGIN(3, 2);
1825 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1826 IEM_MC_ARG(uint64_t, u64Src, 1);
1827 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1828 IEM_MC_LOCAL(uint64_t, u64Tmp);
1829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1830
1831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1832 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
1833 IEM_MC_ASSIGN(u64Src, u64Imm);
1834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1835 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1836 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1837 IEM_MC_REF_EFLAGS(pEFlags);
1838 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1839 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1840
1841 IEM_MC_ADVANCE_RIP();
1842 IEM_MC_END();
1843 }
1844 return VINF_SUCCESS;
1845 }
1846 AssertFailedReturn(VERR_IEM_IPE_8);
1847}
1848
1849
1850/** Opcode 0x6c. */
1851FNIEMOP_DEF(iemOp_insb_Yb_DX)
1852{
1853 IEMOP_HLP_MIN_186();
1854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1855 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
1856 {
1857 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
1858 switch (pVCpu->iem.s.enmEffAddrMode)
1859 {
1860 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
1861 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
1862 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
1863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1864 }
1865 }
1866 else
1867 {
1868 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
1869 switch (pVCpu->iem.s.enmEffAddrMode)
1870 {
1871 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
1872 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
1873 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
1874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1875 }
1876 }
1877}
1878
1879
1880/** Opcode 0x6d. */
1881FNIEMOP_DEF(iemOp_inswd_Yv_DX)
1882{
1883 IEMOP_HLP_MIN_186();
1884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1885 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1886 {
1887 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
1888 switch (pVCpu->iem.s.enmEffOpSize)
1889 {
1890 case IEMMODE_16BIT:
1891 switch (pVCpu->iem.s.enmEffAddrMode)
1892 {
1893 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
1894 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
1895 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
1896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1897 }
1898 break;
1899 case IEMMODE_64BIT:
1900 case IEMMODE_32BIT:
1901 switch (pVCpu->iem.s.enmEffAddrMode)
1902 {
1903 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
1904 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
1905 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
1906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1907 }
1908 break;
1909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1910 }
1911 }
1912 else
1913 {
1914 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
1915 switch (pVCpu->iem.s.enmEffOpSize)
1916 {
1917 case IEMMODE_16BIT:
1918 switch (pVCpu->iem.s.enmEffAddrMode)
1919 {
1920 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
1921 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
1922 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
1923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1924 }
1925 break;
1926 case IEMMODE_64BIT:
1927 case IEMMODE_32BIT:
1928 switch (pVCpu->iem.s.enmEffAddrMode)
1929 {
1930 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
1931 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
1932 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
1933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1934 }
1935 break;
1936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1937 }
1938 }
1939}
1940
1941
1942/** Opcode 0x6e. */
1943FNIEMOP_DEF(iemOp_outsb_Yb_DX)
1944{
1945 IEMOP_HLP_MIN_186();
1946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1947 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
1948 {
1949 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
1950 switch (pVCpu->iem.s.enmEffAddrMode)
1951 {
1952 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
1953 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
1954 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
1955 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1956 }
1957 }
1958 else
1959 {
1960 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
1961 switch (pVCpu->iem.s.enmEffAddrMode)
1962 {
1963 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
1964 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
1965 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
1966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1967 }
1968 }
1969}
1970
1971
1972/** Opcode 0x6f. */
1973FNIEMOP_DEF(iemOp_outswd_Yv_DX)
1974{
1975 IEMOP_HLP_MIN_186();
1976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1977 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1978 {
1979 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
1980 switch (pVCpu->iem.s.enmEffOpSize)
1981 {
1982 case IEMMODE_16BIT:
1983 switch (pVCpu->iem.s.enmEffAddrMode)
1984 {
1985 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
1986 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
1987 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
1988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1989 }
1990 break;
1991 case IEMMODE_64BIT:
1992 case IEMMODE_32BIT:
1993 switch (pVCpu->iem.s.enmEffAddrMode)
1994 {
1995 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
1996 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
1997 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
1998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1999 }
2000 break;
2001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2002 }
2003 }
2004 else
2005 {
2006 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2007 switch (pVCpu->iem.s.enmEffOpSize)
2008 {
2009 case IEMMODE_16BIT:
2010 switch (pVCpu->iem.s.enmEffAddrMode)
2011 {
2012 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2013 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2014 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2016 }
2017 break;
2018 case IEMMODE_64BIT:
2019 case IEMMODE_32BIT:
2020 switch (pVCpu->iem.s.enmEffAddrMode)
2021 {
2022 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2023 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2024 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2026 }
2027 break;
2028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2029 }
2030 }
2031}
2032
2033
2034/** Opcode 0x70. */
2035FNIEMOP_DEF(iemOp_jo_Jb)
2036{
2037 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2038 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2040 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2041
2042 IEM_MC_BEGIN(0, 0);
2043 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2044 IEM_MC_REL_JMP_S8(i8Imm);
2045 } IEM_MC_ELSE() {
2046 IEM_MC_ADVANCE_RIP();
2047 } IEM_MC_ENDIF();
2048 IEM_MC_END();
2049 return VINF_SUCCESS;
2050}
2051
2052
2053/** Opcode 0x71. */
2054FNIEMOP_DEF(iemOp_jno_Jb)
2055{
2056 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2057 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2059 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2060
2061 IEM_MC_BEGIN(0, 0);
2062 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2063 IEM_MC_ADVANCE_RIP();
2064 } IEM_MC_ELSE() {
2065 IEM_MC_REL_JMP_S8(i8Imm);
2066 } IEM_MC_ENDIF();
2067 IEM_MC_END();
2068 return VINF_SUCCESS;
2069}
2070
2071/** Opcode 0x72. */
2072FNIEMOP_DEF(iemOp_jc_Jb)
2073{
2074 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2075 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2077 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2078
2079 IEM_MC_BEGIN(0, 0);
2080 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2081 IEM_MC_REL_JMP_S8(i8Imm);
2082 } IEM_MC_ELSE() {
2083 IEM_MC_ADVANCE_RIP();
2084 } IEM_MC_ENDIF();
2085 IEM_MC_END();
2086 return VINF_SUCCESS;
2087}
2088
2089
2090/** Opcode 0x73. */
2091FNIEMOP_DEF(iemOp_jnc_Jb)
2092{
2093 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2094 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2096 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2097
2098 IEM_MC_BEGIN(0, 0);
2099 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2100 IEM_MC_ADVANCE_RIP();
2101 } IEM_MC_ELSE() {
2102 IEM_MC_REL_JMP_S8(i8Imm);
2103 } IEM_MC_ENDIF();
2104 IEM_MC_END();
2105 return VINF_SUCCESS;
2106}
2107
2108
2109/** Opcode 0x74. */
2110FNIEMOP_DEF(iemOp_je_Jb)
2111{
2112 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2113 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2115 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2116
2117 IEM_MC_BEGIN(0, 0);
2118 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2119 IEM_MC_REL_JMP_S8(i8Imm);
2120 } IEM_MC_ELSE() {
2121 IEM_MC_ADVANCE_RIP();
2122 } IEM_MC_ENDIF();
2123 IEM_MC_END();
2124 return VINF_SUCCESS;
2125}
2126
2127
2128/** Opcode 0x75. */
2129FNIEMOP_DEF(iemOp_jne_Jb)
2130{
2131 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2132 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2134 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2135
2136 IEM_MC_BEGIN(0, 0);
2137 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2138 IEM_MC_ADVANCE_RIP();
2139 } IEM_MC_ELSE() {
2140 IEM_MC_REL_JMP_S8(i8Imm);
2141 } IEM_MC_ENDIF();
2142 IEM_MC_END();
2143 return VINF_SUCCESS;
2144}
2145
2146
2147/** Opcode 0x76. */
2148FNIEMOP_DEF(iemOp_jbe_Jb)
2149{
2150 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2151 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2153 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2154
2155 IEM_MC_BEGIN(0, 0);
2156 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2157 IEM_MC_REL_JMP_S8(i8Imm);
2158 } IEM_MC_ELSE() {
2159 IEM_MC_ADVANCE_RIP();
2160 } IEM_MC_ENDIF();
2161 IEM_MC_END();
2162 return VINF_SUCCESS;
2163}
2164
2165
2166/** Opcode 0x77. */
2167FNIEMOP_DEF(iemOp_jnbe_Jb)
2168{
2169 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2170 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2172 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2173
2174 IEM_MC_BEGIN(0, 0);
2175 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2176 IEM_MC_ADVANCE_RIP();
2177 } IEM_MC_ELSE() {
2178 IEM_MC_REL_JMP_S8(i8Imm);
2179 } IEM_MC_ENDIF();
2180 IEM_MC_END();
2181 return VINF_SUCCESS;
2182}
2183
2184
2185/** Opcode 0x78. */
2186FNIEMOP_DEF(iemOp_js_Jb)
2187{
2188 IEMOP_MNEMONIC(js_Jb, "js Jb");
2189 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2191 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2192
2193 IEM_MC_BEGIN(0, 0);
2194 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2195 IEM_MC_REL_JMP_S8(i8Imm);
2196 } IEM_MC_ELSE() {
2197 IEM_MC_ADVANCE_RIP();
2198 } IEM_MC_ENDIF();
2199 IEM_MC_END();
2200 return VINF_SUCCESS;
2201}
2202
2203
2204/** Opcode 0x79. */
2205FNIEMOP_DEF(iemOp_jns_Jb)
2206{
2207 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2208 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2210 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2211
2212 IEM_MC_BEGIN(0, 0);
2213 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2214 IEM_MC_ADVANCE_RIP();
2215 } IEM_MC_ELSE() {
2216 IEM_MC_REL_JMP_S8(i8Imm);
2217 } IEM_MC_ENDIF();
2218 IEM_MC_END();
2219 return VINF_SUCCESS;
2220}
2221
2222
2223/** Opcode 0x7a. */
2224FNIEMOP_DEF(iemOp_jp_Jb)
2225{
2226 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2227 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2230
2231 IEM_MC_BEGIN(0, 0);
2232 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2233 IEM_MC_REL_JMP_S8(i8Imm);
2234 } IEM_MC_ELSE() {
2235 IEM_MC_ADVANCE_RIP();
2236 } IEM_MC_ENDIF();
2237 IEM_MC_END();
2238 return VINF_SUCCESS;
2239}
2240
2241
2242/** Opcode 0x7b. */
2243FNIEMOP_DEF(iemOp_jnp_Jb)
2244{
2245 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2246 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2248 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2249
2250 IEM_MC_BEGIN(0, 0);
2251 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2252 IEM_MC_ADVANCE_RIP();
2253 } IEM_MC_ELSE() {
2254 IEM_MC_REL_JMP_S8(i8Imm);
2255 } IEM_MC_ENDIF();
2256 IEM_MC_END();
2257 return VINF_SUCCESS;
2258}
2259
2260
2261/** Opcode 0x7c. */
2262FNIEMOP_DEF(iemOp_jl_Jb)
2263{
2264 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2265 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2267 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2268
2269 IEM_MC_BEGIN(0, 0);
2270 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2271 IEM_MC_REL_JMP_S8(i8Imm);
2272 } IEM_MC_ELSE() {
2273 IEM_MC_ADVANCE_RIP();
2274 } IEM_MC_ENDIF();
2275 IEM_MC_END();
2276 return VINF_SUCCESS;
2277}
2278
2279
2280/** Opcode 0x7d. */
2281FNIEMOP_DEF(iemOp_jnl_Jb)
2282{
2283 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
2284 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2286 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2287
2288 IEM_MC_BEGIN(0, 0);
2289 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2290 IEM_MC_ADVANCE_RIP();
2291 } IEM_MC_ELSE() {
2292 IEM_MC_REL_JMP_S8(i8Imm);
2293 } IEM_MC_ENDIF();
2294 IEM_MC_END();
2295 return VINF_SUCCESS;
2296}
2297
2298
2299/** Opcode 0x7e. */
2300FNIEMOP_DEF(iemOp_jle_Jb)
2301{
2302 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
2303 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2305 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2306
2307 IEM_MC_BEGIN(0, 0);
2308 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2309 IEM_MC_REL_JMP_S8(i8Imm);
2310 } IEM_MC_ELSE() {
2311 IEM_MC_ADVANCE_RIP();
2312 } IEM_MC_ENDIF();
2313 IEM_MC_END();
2314 return VINF_SUCCESS;
2315}
2316
2317
2318/** Opcode 0x7f. */
2319FNIEMOP_DEF(iemOp_jnle_Jb)
2320{
2321 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
2322 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2324 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2325
2326 IEM_MC_BEGIN(0, 0);
2327 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2328 IEM_MC_ADVANCE_RIP();
2329 } IEM_MC_ELSE() {
2330 IEM_MC_REL_JMP_S8(i8Imm);
2331 } IEM_MC_ENDIF();
2332 IEM_MC_END();
2333 return VINF_SUCCESS;
2334}
2335
2336
2337/** Opcode 0x80. */
2338FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
2339{
2340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2341 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2342 {
2343 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
2344 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
2345 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
2346 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
2347 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
2348 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
2349 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
2350 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
2351 }
2352 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2353
2354 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2355 {
2356 /* register target */
2357 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2359 IEM_MC_BEGIN(3, 0);
2360 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2361 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2362 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2363
2364 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2365 IEM_MC_REF_EFLAGS(pEFlags);
2366 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2367
2368 IEM_MC_ADVANCE_RIP();
2369 IEM_MC_END();
2370 }
2371 else
2372 {
2373 /* memory target */
2374 uint32_t fAccess;
2375 if (pImpl->pfnLockedU8)
2376 fAccess = IEM_ACCESS_DATA_RW;
2377 else /* CMP */
2378 fAccess = IEM_ACCESS_DATA_R;
2379 IEM_MC_BEGIN(3, 2);
2380 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2381 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2383
2384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2385 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2386 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2387 if (pImpl->pfnLockedU8)
2388 IEMOP_HLP_DONE_DECODING();
2389 else
2390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2391
2392 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2393 IEM_MC_FETCH_EFLAGS(EFlags);
2394 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2395 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2396 else
2397 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
2398
2399 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
2400 IEM_MC_COMMIT_EFLAGS(EFlags);
2401 IEM_MC_ADVANCE_RIP();
2402 IEM_MC_END();
2403 }
2404 return VINF_SUCCESS;
2405}
2406
2407
2408/** Opcode 0x81. */
2409FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
2410{
2411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2412 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2413 {
2414 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
2415 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
2416 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
2417 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
2418 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
2419 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
2420 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
2421 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
2422 }
2423 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2424
2425 switch (pVCpu->iem.s.enmEffOpSize)
2426 {
2427 case IEMMODE_16BIT:
2428 {
2429 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2430 {
2431 /* register target */
2432 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2434 IEM_MC_BEGIN(3, 0);
2435 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2436 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
2437 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2438
2439 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2440 IEM_MC_REF_EFLAGS(pEFlags);
2441 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2442
2443 IEM_MC_ADVANCE_RIP();
2444 IEM_MC_END();
2445 }
2446 else
2447 {
2448 /* memory target */
2449 uint32_t fAccess;
2450 if (pImpl->pfnLockedU16)
2451 fAccess = IEM_ACCESS_DATA_RW;
2452 else /* CMP, TEST */
2453 fAccess = IEM_ACCESS_DATA_R;
2454 IEM_MC_BEGIN(3, 2);
2455 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2456 IEM_MC_ARG(uint16_t, u16Src, 1);
2457 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2459
2460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2461 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2462 IEM_MC_ASSIGN(u16Src, u16Imm);
2463 if (pImpl->pfnLockedU16)
2464 IEMOP_HLP_DONE_DECODING();
2465 else
2466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2467 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2468 IEM_MC_FETCH_EFLAGS(EFlags);
2469 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2470 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2471 else
2472 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2473
2474 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2475 IEM_MC_COMMIT_EFLAGS(EFlags);
2476 IEM_MC_ADVANCE_RIP();
2477 IEM_MC_END();
2478 }
2479 break;
2480 }
2481
2482 case IEMMODE_32BIT:
2483 {
2484 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2485 {
2486 /* register target */
2487 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2489 IEM_MC_BEGIN(3, 0);
2490 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2491 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
2492 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2493
2494 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2495 IEM_MC_REF_EFLAGS(pEFlags);
2496 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2497 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
2498
2499 IEM_MC_ADVANCE_RIP();
2500 IEM_MC_END();
2501 }
2502 else
2503 {
2504 /* memory target */
2505 uint32_t fAccess;
2506 if (pImpl->pfnLockedU32)
2507 fAccess = IEM_ACCESS_DATA_RW;
2508 else /* CMP, TEST */
2509 fAccess = IEM_ACCESS_DATA_R;
2510 IEM_MC_BEGIN(3, 2);
2511 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2512 IEM_MC_ARG(uint32_t, u32Src, 1);
2513 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2515
2516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2517 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2518 IEM_MC_ASSIGN(u32Src, u32Imm);
2519 if (pImpl->pfnLockedU32)
2520 IEMOP_HLP_DONE_DECODING();
2521 else
2522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2523 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2524 IEM_MC_FETCH_EFLAGS(EFlags);
2525 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2526 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2527 else
2528 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
2529
2530 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
2531 IEM_MC_COMMIT_EFLAGS(EFlags);
2532 IEM_MC_ADVANCE_RIP();
2533 IEM_MC_END();
2534 }
2535 break;
2536 }
2537
2538 case IEMMODE_64BIT:
2539 {
2540 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2541 {
2542 /* register target */
2543 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2545 IEM_MC_BEGIN(3, 0);
2546 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2547 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
2548 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2549
2550 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2551 IEM_MC_REF_EFLAGS(pEFlags);
2552 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2553
2554 IEM_MC_ADVANCE_RIP();
2555 IEM_MC_END();
2556 }
2557 else
2558 {
2559 /* memory target */
2560 uint32_t fAccess;
2561 if (pImpl->pfnLockedU64)
2562 fAccess = IEM_ACCESS_DATA_RW;
2563 else /* CMP */
2564 fAccess = IEM_ACCESS_DATA_R;
2565 IEM_MC_BEGIN(3, 2);
2566 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2567 IEM_MC_ARG(uint64_t, u64Src, 1);
2568 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2570
2571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2572 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2573 if (pImpl->pfnLockedU64)
2574 IEMOP_HLP_DONE_DECODING();
2575 else
2576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2577 IEM_MC_ASSIGN(u64Src, u64Imm);
2578 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2579 IEM_MC_FETCH_EFLAGS(EFlags);
2580 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2581 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2582 else
2583 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
2584
2585 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
2586 IEM_MC_COMMIT_EFLAGS(EFlags);
2587 IEM_MC_ADVANCE_RIP();
2588 IEM_MC_END();
2589 }
2590 break;
2591 }
2592 }
2593 return VINF_SUCCESS;
2594}
2595
2596
2597/** Opcode 0x82. */
2598FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
2599{
2600 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
2601 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
2602}
2603
2604
2605/** Opcode 0x83. */
2606FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
2607{
2608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2609 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2610 {
2611 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
2612 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
2613 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
2614 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
2615 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
2616 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
2617 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
2618 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
2619 }
2620 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
2621 to the 386 even if absent in the intel reference manuals and some
2622 3rd party opcode listings. */
2623 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2624
2625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2626 {
2627 /*
2628 * Register target
2629 */
2630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2631 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2632 switch (pVCpu->iem.s.enmEffOpSize)
2633 {
2634 case IEMMODE_16BIT:
2635 {
2636 IEM_MC_BEGIN(3, 0);
2637 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2638 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
2639 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2640
2641 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2642 IEM_MC_REF_EFLAGS(pEFlags);
2643 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2644
2645 IEM_MC_ADVANCE_RIP();
2646 IEM_MC_END();
2647 break;
2648 }
2649
2650 case IEMMODE_32BIT:
2651 {
2652 IEM_MC_BEGIN(3, 0);
2653 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2654 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
2655 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2656
2657 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2658 IEM_MC_REF_EFLAGS(pEFlags);
2659 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2660 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
2661
2662 IEM_MC_ADVANCE_RIP();
2663 IEM_MC_END();
2664 break;
2665 }
2666
2667 case IEMMODE_64BIT:
2668 {
2669 IEM_MC_BEGIN(3, 0);
2670 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2671 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
2672 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2673
2674 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2675 IEM_MC_REF_EFLAGS(pEFlags);
2676 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2677
2678 IEM_MC_ADVANCE_RIP();
2679 IEM_MC_END();
2680 break;
2681 }
2682 }
2683 }
2684 else
2685 {
2686 /*
2687 * Memory target.
2688 */
2689 uint32_t fAccess;
2690 if (pImpl->pfnLockedU16)
2691 fAccess = IEM_ACCESS_DATA_RW;
2692 else /* CMP */
2693 fAccess = IEM_ACCESS_DATA_R;
2694
2695 switch (pVCpu->iem.s.enmEffOpSize)
2696 {
2697 case IEMMODE_16BIT:
2698 {
2699 IEM_MC_BEGIN(3, 2);
2700 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2701 IEM_MC_ARG(uint16_t, u16Src, 1);
2702 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2704
2705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2706 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2707 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
2708 if (pImpl->pfnLockedU16)
2709 IEMOP_HLP_DONE_DECODING();
2710 else
2711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2712 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2713 IEM_MC_FETCH_EFLAGS(EFlags);
2714 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2715 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2716 else
2717 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2718
2719 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2720 IEM_MC_COMMIT_EFLAGS(EFlags);
2721 IEM_MC_ADVANCE_RIP();
2722 IEM_MC_END();
2723 break;
2724 }
2725
2726 case IEMMODE_32BIT:
2727 {
2728 IEM_MC_BEGIN(3, 2);
2729 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2730 IEM_MC_ARG(uint32_t, u32Src, 1);
2731 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2733
2734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2735 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2736 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
2737 if (pImpl->pfnLockedU32)
2738 IEMOP_HLP_DONE_DECODING();
2739 else
2740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2741 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2742 IEM_MC_FETCH_EFLAGS(EFlags);
2743 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2744 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2745 else
2746 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
2747
2748 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
2749 IEM_MC_COMMIT_EFLAGS(EFlags);
2750 IEM_MC_ADVANCE_RIP();
2751 IEM_MC_END();
2752 break;
2753 }
2754
2755 case IEMMODE_64BIT:
2756 {
2757 IEM_MC_BEGIN(3, 2);
2758 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2759 IEM_MC_ARG(uint64_t, u64Src, 1);
2760 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2762
2763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2764 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2765 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
2766 if (pImpl->pfnLockedU64)
2767 IEMOP_HLP_DONE_DECODING();
2768 else
2769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2770 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2771 IEM_MC_FETCH_EFLAGS(EFlags);
2772 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2773 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2774 else
2775 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
2776
2777 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
2778 IEM_MC_COMMIT_EFLAGS(EFlags);
2779 IEM_MC_ADVANCE_RIP();
2780 IEM_MC_END();
2781 break;
2782 }
2783 }
2784 }
2785 return VINF_SUCCESS;
2786}
2787
2788
2789/** Opcode 0x84. */
2790FNIEMOP_DEF(iemOp_test_Eb_Gb)
2791{
2792 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
2793 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
2794 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
2795}
2796
2797
2798/** Opcode 0x85. */
2799FNIEMOP_DEF(iemOp_test_Ev_Gv)
2800{
2801 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
2802 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
2803 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
2804}
2805
2806
2807/** Opcode 0x86. */
2808FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
2809{
2810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2811 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
2812
2813 /*
2814 * If rm is denoting a register, no more instruction bytes.
2815 */
2816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2817 {
2818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2819
2820 IEM_MC_BEGIN(0, 2);
2821 IEM_MC_LOCAL(uint8_t, uTmp1);
2822 IEM_MC_LOCAL(uint8_t, uTmp2);
2823
2824 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2825 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2826 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2827 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2828
2829 IEM_MC_ADVANCE_RIP();
2830 IEM_MC_END();
2831 }
2832 else
2833 {
2834 /*
2835 * We're accessing memory.
2836 */
2837/** @todo the register must be committed separately! */
2838 IEM_MC_BEGIN(2, 2);
2839 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
2840 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
2841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2842
2843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2844 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2845 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2846 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
2847 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
2848
2849 IEM_MC_ADVANCE_RIP();
2850 IEM_MC_END();
2851 }
2852 return VINF_SUCCESS;
2853}
2854
2855
2856/** Opcode 0x87. */
2857FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
2858{
2859 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
2860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2861
2862 /*
2863 * If rm is denoting a register, no more instruction bytes.
2864 */
2865 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2866 {
2867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2868
2869 switch (pVCpu->iem.s.enmEffOpSize)
2870 {
2871 case IEMMODE_16BIT:
2872 IEM_MC_BEGIN(0, 2);
2873 IEM_MC_LOCAL(uint16_t, uTmp1);
2874 IEM_MC_LOCAL(uint16_t, uTmp2);
2875
2876 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2877 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2878 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2879 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2880
2881 IEM_MC_ADVANCE_RIP();
2882 IEM_MC_END();
2883 return VINF_SUCCESS;
2884
2885 case IEMMODE_32BIT:
2886 IEM_MC_BEGIN(0, 2);
2887 IEM_MC_LOCAL(uint32_t, uTmp1);
2888 IEM_MC_LOCAL(uint32_t, uTmp2);
2889
2890 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2891 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2892 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2893 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2894
2895 IEM_MC_ADVANCE_RIP();
2896 IEM_MC_END();
2897 return VINF_SUCCESS;
2898
2899 case IEMMODE_64BIT:
2900 IEM_MC_BEGIN(0, 2);
2901 IEM_MC_LOCAL(uint64_t, uTmp1);
2902 IEM_MC_LOCAL(uint64_t, uTmp2);
2903
2904 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2905 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2906 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2907 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2908
2909 IEM_MC_ADVANCE_RIP();
2910 IEM_MC_END();
2911 return VINF_SUCCESS;
2912
2913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2914 }
2915 }
2916 else
2917 {
2918 /*
2919 * We're accessing memory.
2920 */
2921 switch (pVCpu->iem.s.enmEffOpSize)
2922 {
2923/** @todo the register must be committed separately! */
2924 case IEMMODE_16BIT:
2925 IEM_MC_BEGIN(2, 2);
2926 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
2927 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
2928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2929
2930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2931 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2932 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2933 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
2934 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
2935
2936 IEM_MC_ADVANCE_RIP();
2937 IEM_MC_END();
2938 return VINF_SUCCESS;
2939
2940 case IEMMODE_32BIT:
2941 IEM_MC_BEGIN(2, 2);
2942 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
2943 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
2944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2945
2946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2947 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2948 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2949 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
2950 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
2951
2952 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
2953 IEM_MC_ADVANCE_RIP();
2954 IEM_MC_END();
2955 return VINF_SUCCESS;
2956
2957 case IEMMODE_64BIT:
2958 IEM_MC_BEGIN(2, 2);
2959 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
2960 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
2961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2962
2963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2964 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2965 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2966 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
2967 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
2968
2969 IEM_MC_ADVANCE_RIP();
2970 IEM_MC_END();
2971 return VINF_SUCCESS;
2972
2973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2974 }
2975 }
2976}
2977
2978
2979/** Opcode 0x88. */
2980FNIEMOP_DEF(iemOp_mov_Eb_Gb)
2981{
2982 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
2983
2984 uint8_t bRm;
2985 IEM_OPCODE_GET_NEXT_U8(&bRm);
2986
2987 /*
2988 * If rm is denoting a register, no more instruction bytes.
2989 */
2990 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2991 {
2992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2993 IEM_MC_BEGIN(0, 1);
2994 IEM_MC_LOCAL(uint8_t, u8Value);
2995 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2996 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
2997 IEM_MC_ADVANCE_RIP();
2998 IEM_MC_END();
2999 }
3000 else
3001 {
3002 /*
3003 * We're writing a register to memory.
3004 */
3005 IEM_MC_BEGIN(0, 2);
3006 IEM_MC_LOCAL(uint8_t, u8Value);
3007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3010 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3011 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3012 IEM_MC_ADVANCE_RIP();
3013 IEM_MC_END();
3014 }
3015 return VINF_SUCCESS;
3016
3017}
3018
3019
3020/** Opcode 0x89. */
3021FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3022{
3023 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3024
3025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3026
3027 /*
3028 * If rm is denoting a register, no more instruction bytes.
3029 */
3030 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3031 {
3032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3033 switch (pVCpu->iem.s.enmEffOpSize)
3034 {
3035 case IEMMODE_16BIT:
3036 IEM_MC_BEGIN(0, 1);
3037 IEM_MC_LOCAL(uint16_t, u16Value);
3038 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3039 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3040 IEM_MC_ADVANCE_RIP();
3041 IEM_MC_END();
3042 break;
3043
3044 case IEMMODE_32BIT:
3045 IEM_MC_BEGIN(0, 1);
3046 IEM_MC_LOCAL(uint32_t, u32Value);
3047 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3048 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3049 IEM_MC_ADVANCE_RIP();
3050 IEM_MC_END();
3051 break;
3052
3053 case IEMMODE_64BIT:
3054 IEM_MC_BEGIN(0, 1);
3055 IEM_MC_LOCAL(uint64_t, u64Value);
3056 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3057 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3058 IEM_MC_ADVANCE_RIP();
3059 IEM_MC_END();
3060 break;
3061 }
3062 }
3063 else
3064 {
3065 /*
3066 * We're writing a register to memory.
3067 */
3068 switch (pVCpu->iem.s.enmEffOpSize)
3069 {
3070 case IEMMODE_16BIT:
3071 IEM_MC_BEGIN(0, 2);
3072 IEM_MC_LOCAL(uint16_t, u16Value);
3073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3076 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3077 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3078 IEM_MC_ADVANCE_RIP();
3079 IEM_MC_END();
3080 break;
3081
3082 case IEMMODE_32BIT:
3083 IEM_MC_BEGIN(0, 2);
3084 IEM_MC_LOCAL(uint32_t, u32Value);
3085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3088 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3089 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3090 IEM_MC_ADVANCE_RIP();
3091 IEM_MC_END();
3092 break;
3093
3094 case IEMMODE_64BIT:
3095 IEM_MC_BEGIN(0, 2);
3096 IEM_MC_LOCAL(uint64_t, u64Value);
3097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3100 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3101 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3102 IEM_MC_ADVANCE_RIP();
3103 IEM_MC_END();
3104 break;
3105 }
3106 }
3107 return VINF_SUCCESS;
3108}
3109
3110
3111/** Opcode 0x8a. */
3112FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3113{
3114 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3115
3116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3117
3118 /*
3119 * If rm is denoting a register, no more instruction bytes.
3120 */
3121 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3122 {
3123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3124 IEM_MC_BEGIN(0, 1);
3125 IEM_MC_LOCAL(uint8_t, u8Value);
3126 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3127 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3128 IEM_MC_ADVANCE_RIP();
3129 IEM_MC_END();
3130 }
3131 else
3132 {
3133 /*
3134 * We're loading a register from memory.
3135 */
3136 IEM_MC_BEGIN(0, 2);
3137 IEM_MC_LOCAL(uint8_t, u8Value);
3138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3141 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3142 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3143 IEM_MC_ADVANCE_RIP();
3144 IEM_MC_END();
3145 }
3146 return VINF_SUCCESS;
3147}
3148
3149
3150/** Opcode 0x8b. */
3151FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3152{
3153 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3154
3155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3156
3157 /*
3158 * If rm is denoting a register, no more instruction bytes.
3159 */
3160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3161 {
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 switch (pVCpu->iem.s.enmEffOpSize)
3164 {
3165 case IEMMODE_16BIT:
3166 IEM_MC_BEGIN(0, 1);
3167 IEM_MC_LOCAL(uint16_t, u16Value);
3168 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3169 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3170 IEM_MC_ADVANCE_RIP();
3171 IEM_MC_END();
3172 break;
3173
3174 case IEMMODE_32BIT:
3175 IEM_MC_BEGIN(0, 1);
3176 IEM_MC_LOCAL(uint32_t, u32Value);
3177 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3178 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3179 IEM_MC_ADVANCE_RIP();
3180 IEM_MC_END();
3181 break;
3182
3183 case IEMMODE_64BIT:
3184 IEM_MC_BEGIN(0, 1);
3185 IEM_MC_LOCAL(uint64_t, u64Value);
3186 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3187 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3188 IEM_MC_ADVANCE_RIP();
3189 IEM_MC_END();
3190 break;
3191 }
3192 }
3193 else
3194 {
3195 /*
3196 * We're loading a register from memory.
3197 */
3198 switch (pVCpu->iem.s.enmEffOpSize)
3199 {
3200 case IEMMODE_16BIT:
3201 IEM_MC_BEGIN(0, 2);
3202 IEM_MC_LOCAL(uint16_t, u16Value);
3203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3206 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3207 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3208 IEM_MC_ADVANCE_RIP();
3209 IEM_MC_END();
3210 break;
3211
3212 case IEMMODE_32BIT:
3213 IEM_MC_BEGIN(0, 2);
3214 IEM_MC_LOCAL(uint32_t, u32Value);
3215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3218 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3219 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3220 IEM_MC_ADVANCE_RIP();
3221 IEM_MC_END();
3222 break;
3223
3224 case IEMMODE_64BIT:
3225 IEM_MC_BEGIN(0, 2);
3226 IEM_MC_LOCAL(uint64_t, u64Value);
3227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3230 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3231 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3232 IEM_MC_ADVANCE_RIP();
3233 IEM_MC_END();
3234 break;
3235 }
3236 }
3237 return VINF_SUCCESS;
3238}
3239
3240
3241/** Opcode 0x63. */
3242FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
3243{
3244 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
3245 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
3246 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
3247 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
3248 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
3249}
3250
3251
3252/** Opcode 0x8c. */
3253FNIEMOP_DEF(iemOp_mov_Ev_Sw)
3254{
3255 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
3256
3257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3258
3259 /*
3260 * Check that the destination register exists. The REX.R prefix is ignored.
3261 */
3262 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3263 if ( iSegReg > X86_SREG_GS)
3264 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3265
3266 /*
3267 * If rm is denoting a register, no more instruction bytes.
3268 * In that case, the operand size is respected and the upper bits are
3269 * cleared (starting with some pentium).
3270 */
3271 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3272 {
3273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3274 switch (pVCpu->iem.s.enmEffOpSize)
3275 {
3276 case IEMMODE_16BIT:
3277 IEM_MC_BEGIN(0, 1);
3278 IEM_MC_LOCAL(uint16_t, u16Value);
3279 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3280 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3281 IEM_MC_ADVANCE_RIP();
3282 IEM_MC_END();
3283 break;
3284
3285 case IEMMODE_32BIT:
3286 IEM_MC_BEGIN(0, 1);
3287 IEM_MC_LOCAL(uint32_t, u32Value);
3288 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
3289 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3290 IEM_MC_ADVANCE_RIP();
3291 IEM_MC_END();
3292 break;
3293
3294 case IEMMODE_64BIT:
3295 IEM_MC_BEGIN(0, 1);
3296 IEM_MC_LOCAL(uint64_t, u64Value);
3297 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
3298 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3299 IEM_MC_ADVANCE_RIP();
3300 IEM_MC_END();
3301 break;
3302 }
3303 }
3304 else
3305 {
3306 /*
3307 * We're saving the register to memory. The access is word sized
3308 * regardless of operand size prefixes.
3309 */
3310#if 0 /* not necessary */
3311 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3312#endif
3313 IEM_MC_BEGIN(0, 2);
3314 IEM_MC_LOCAL(uint16_t, u16Value);
3315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3318 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3319 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3320 IEM_MC_ADVANCE_RIP();
3321 IEM_MC_END();
3322 }
3323 return VINF_SUCCESS;
3324}
3325
3326
3327
3328
3329/** Opcode 0x8d. */
3330FNIEMOP_DEF(iemOp_lea_Gv_M)
3331{
3332 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
3333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3335 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
3336
3337 switch (pVCpu->iem.s.enmEffOpSize)
3338 {
3339 case IEMMODE_16BIT:
3340 IEM_MC_BEGIN(0, 2);
3341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3342 IEM_MC_LOCAL(uint16_t, u16Cast);
3343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3345 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
3346 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
3347 IEM_MC_ADVANCE_RIP();
3348 IEM_MC_END();
3349 return VINF_SUCCESS;
3350
3351 case IEMMODE_32BIT:
3352 IEM_MC_BEGIN(0, 2);
3353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3354 IEM_MC_LOCAL(uint32_t, u32Cast);
3355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
3358 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
3359 IEM_MC_ADVANCE_RIP();
3360 IEM_MC_END();
3361 return VINF_SUCCESS;
3362
3363 case IEMMODE_64BIT:
3364 IEM_MC_BEGIN(0, 1);
3365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3368 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
3369 IEM_MC_ADVANCE_RIP();
3370 IEM_MC_END();
3371 return VINF_SUCCESS;
3372 }
3373 AssertFailedReturn(VERR_IEM_IPE_7);
3374}
3375
3376
3377/** Opcode 0x8e. */
3378FNIEMOP_DEF(iemOp_mov_Sw_Ev)
3379{
3380 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
3381
3382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3383
3384 /*
3385 * The practical operand size is 16-bit.
3386 */
3387#if 0 /* not necessary */
3388 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3389#endif
3390
3391 /*
3392 * Check that the destination register exists and can be used with this
3393 * instruction. The REX.R prefix is ignored.
3394 */
3395 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3396 if ( iSegReg == X86_SREG_CS
3397 || iSegReg > X86_SREG_GS)
3398 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3399
3400 /*
3401 * If rm is denoting a register, no more instruction bytes.
3402 */
3403 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3404 {
3405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3406 IEM_MC_BEGIN(2, 0);
3407 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3408 IEM_MC_ARG(uint16_t, u16Value, 1);
3409 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3410 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3411 IEM_MC_END();
3412 }
3413 else
3414 {
3415 /*
3416 * We're loading the register from memory. The access is word sized
3417 * regardless of operand size prefixes.
3418 */
3419 IEM_MC_BEGIN(2, 1);
3420 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3421 IEM_MC_ARG(uint16_t, u16Value, 1);
3422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3425 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3426 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3427 IEM_MC_END();
3428 }
3429 return VINF_SUCCESS;
3430}
3431
3432
3433/** Opcode 0x8f /0. */
3434FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
3435{
3436 /* This bugger is rather annoying as it requires rSP to be updated before
3437 doing the effective address calculations. Will eventually require a
3438 split between the R/M+SIB decoding and the effective address
3439 calculation - which is something that is required for any attempt at
3440 reusing this code for a recompiler. It may also be good to have if we
3441 need to delay #UD exception caused by invalid lock prefixes.
3442
3443 For now, we'll do a mostly safe interpreter-only implementation here. */
3444 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
3445 * now until tests show it's checked.. */
3446 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
3447
3448 /* Register access is relatively easy and can share code. */
3449 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3450 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3451
3452 /*
3453 * Memory target.
3454 *
3455 * Intel says that RSP is incremented before it's used in any effective
3456 * address calcuations. This means some serious extra annoyance here since
3457 * we decode and calculate the effective address in one step and like to
3458 * delay committing registers till everything is done.
3459 *
3460 * So, we'll decode and calculate the effective address twice. This will
3461 * require some recoding if turned into a recompiler.
3462 */
3463 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
3464
3465#ifndef TST_IEM_CHECK_MC
3466 /* Calc effective address with modified ESP. */
3467/** @todo testcase */
3468 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
3469 RTGCPTR GCPtrEff;
3470 VBOXSTRICTRC rcStrict;
3471 switch (pVCpu->iem.s.enmEffOpSize)
3472 {
3473 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
3474 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
3475 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
3476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3477 }
3478 if (rcStrict != VINF_SUCCESS)
3479 return rcStrict;
3480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3481
3482 /* Perform the operation - this should be CImpl. */
3483 RTUINT64U TmpRsp;
3484 TmpRsp.u = pCtx->rsp;
3485 switch (pVCpu->iem.s.enmEffOpSize)
3486 {
3487 case IEMMODE_16BIT:
3488 {
3489 uint16_t u16Value;
3490 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
3491 if (rcStrict == VINF_SUCCESS)
3492 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
3493 break;
3494 }
3495
3496 case IEMMODE_32BIT:
3497 {
3498 uint32_t u32Value;
3499 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
3500 if (rcStrict == VINF_SUCCESS)
3501 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
3502 break;
3503 }
3504
3505 case IEMMODE_64BIT:
3506 {
3507 uint64_t u64Value;
3508 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
3509 if (rcStrict == VINF_SUCCESS)
3510 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
3511 break;
3512 }
3513
3514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3515 }
3516 if (rcStrict == VINF_SUCCESS)
3517 {
3518 pCtx->rsp = TmpRsp.u;
3519 iemRegUpdateRipAndClearRF(pVCpu);
3520 }
3521 return rcStrict;
3522
3523#else
3524 return VERR_IEM_IPE_2;
3525#endif
3526}
3527
3528
3529/** Opcode 0x8f. */
3530FNIEMOP_DEF(iemOp_Grp1A)
3531{
3532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3533 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
3534 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
3535
3536 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
3537 /** @todo XOP decoding. */
3538 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
3539 return IEMOP_RAISE_INVALID_OPCODE();
3540}
3541
3542
3543/**
3544 * Common 'xchg reg,rAX' helper.
3545 */
3546FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
3547{
3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3549
3550 iReg |= pVCpu->iem.s.uRexB;
3551 switch (pVCpu->iem.s.enmEffOpSize)
3552 {
3553 case IEMMODE_16BIT:
3554 IEM_MC_BEGIN(0, 2);
3555 IEM_MC_LOCAL(uint16_t, u16Tmp1);
3556 IEM_MC_LOCAL(uint16_t, u16Tmp2);
3557 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
3558 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
3559 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
3560 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
3561 IEM_MC_ADVANCE_RIP();
3562 IEM_MC_END();
3563 return VINF_SUCCESS;
3564
3565 case IEMMODE_32BIT:
3566 IEM_MC_BEGIN(0, 2);
3567 IEM_MC_LOCAL(uint32_t, u32Tmp1);
3568 IEM_MC_LOCAL(uint32_t, u32Tmp2);
3569 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
3570 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
3571 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
3572 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
3573 IEM_MC_ADVANCE_RIP();
3574 IEM_MC_END();
3575 return VINF_SUCCESS;
3576
3577 case IEMMODE_64BIT:
3578 IEM_MC_BEGIN(0, 2);
3579 IEM_MC_LOCAL(uint64_t, u64Tmp1);
3580 IEM_MC_LOCAL(uint64_t, u64Tmp2);
3581 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
3582 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
3583 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
3584 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
3585 IEM_MC_ADVANCE_RIP();
3586 IEM_MC_END();
3587 return VINF_SUCCESS;
3588
3589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3590 }
3591}
3592
3593
3594/** Opcode 0x90. */
3595FNIEMOP_DEF(iemOp_nop)
3596{
3597 /* R8/R8D and RAX/EAX can be exchanged. */
3598 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
3599 {
3600 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
3601 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
3602 }
3603
3604 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3605 IEMOP_MNEMONIC(pause, "pause");
3606 else
3607 IEMOP_MNEMONIC(nop, "nop");
3608 IEM_MC_BEGIN(0, 0);
3609 IEM_MC_ADVANCE_RIP();
3610 IEM_MC_END();
3611 return VINF_SUCCESS;
3612}
3613
3614
3615/** Opcode 0x91. */
3616FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
3617{
3618 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
3619 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
3620}
3621
3622
3623/** Opcode 0x92. */
3624FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
3625{
3626 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
3627 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
3628}
3629
3630
3631/** Opcode 0x93. */
3632FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
3633{
3634 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
3635 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
3636}
3637
3638
3639/** Opcode 0x94. */
3640FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
3641{
3642 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
3643 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
3644}
3645
3646
3647/** Opcode 0x95. */
3648FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
3649{
3650 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
3651 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
3652}
3653
3654
3655/** Opcode 0x96. */
3656FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
3657{
3658 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
3659 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
3660}
3661
3662
3663/** Opcode 0x97. */
3664FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
3665{
3666 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
3667 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
3668}
3669
3670
3671/** Opcode 0x98. */
3672FNIEMOP_DEF(iemOp_cbw)
3673{
3674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3675 switch (pVCpu->iem.s.enmEffOpSize)
3676 {
3677 case IEMMODE_16BIT:
3678 IEMOP_MNEMONIC(cbw, "cbw");
3679 IEM_MC_BEGIN(0, 1);
3680 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
3681 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
3682 } IEM_MC_ELSE() {
3683 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
3684 } IEM_MC_ENDIF();
3685 IEM_MC_ADVANCE_RIP();
3686 IEM_MC_END();
3687 return VINF_SUCCESS;
3688
3689 case IEMMODE_32BIT:
3690 IEMOP_MNEMONIC(cwde, "cwde");
3691 IEM_MC_BEGIN(0, 1);
3692 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
3693 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
3694 } IEM_MC_ELSE() {
3695 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
3696 } IEM_MC_ENDIF();
3697 IEM_MC_ADVANCE_RIP();
3698 IEM_MC_END();
3699 return VINF_SUCCESS;
3700
3701 case IEMMODE_64BIT:
3702 IEMOP_MNEMONIC(cdqe, "cdqe");
3703 IEM_MC_BEGIN(0, 1);
3704 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
3705 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
3706 } IEM_MC_ELSE() {
3707 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
3708 } IEM_MC_ENDIF();
3709 IEM_MC_ADVANCE_RIP();
3710 IEM_MC_END();
3711 return VINF_SUCCESS;
3712
3713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3714 }
3715}
3716
3717
3718/** Opcode 0x99. */
3719FNIEMOP_DEF(iemOp_cwd)
3720{
3721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3722 switch (pVCpu->iem.s.enmEffOpSize)
3723 {
3724 case IEMMODE_16BIT:
3725 IEMOP_MNEMONIC(cwd, "cwd");
3726 IEM_MC_BEGIN(0, 1);
3727 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
3728 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
3729 } IEM_MC_ELSE() {
3730 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
3731 } IEM_MC_ENDIF();
3732 IEM_MC_ADVANCE_RIP();
3733 IEM_MC_END();
3734 return VINF_SUCCESS;
3735
3736 case IEMMODE_32BIT:
3737 IEMOP_MNEMONIC(cdq, "cdq");
3738 IEM_MC_BEGIN(0, 1);
3739 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
3740 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
3741 } IEM_MC_ELSE() {
3742 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
3743 } IEM_MC_ENDIF();
3744 IEM_MC_ADVANCE_RIP();
3745 IEM_MC_END();
3746 return VINF_SUCCESS;
3747
3748 case IEMMODE_64BIT:
3749 IEMOP_MNEMONIC(cqo, "cqo");
3750 IEM_MC_BEGIN(0, 1);
3751 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
3752 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
3753 } IEM_MC_ELSE() {
3754 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
3755 } IEM_MC_ENDIF();
3756 IEM_MC_ADVANCE_RIP();
3757 IEM_MC_END();
3758 return VINF_SUCCESS;
3759
3760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3761 }
3762}
3763
3764
3765/** Opcode 0x9a. */
3766FNIEMOP_DEF(iemOp_call_Ap)
3767{
3768 IEMOP_MNEMONIC(call_Ap, "call Ap");
3769 IEMOP_HLP_NO_64BIT();
3770
3771 /* Decode the far pointer address and pass it on to the far call C implementation. */
3772 uint32_t offSeg;
3773 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
3774 IEM_OPCODE_GET_NEXT_U32(&offSeg);
3775 else
3776 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
3777 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
3778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3779 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
3780}
3781
3782
3783/** Opcode 0x9b. (aka fwait) */
3784FNIEMOP_DEF(iemOp_wait)
3785{
3786 IEMOP_MNEMONIC(wait, "wait");
3787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3788
3789 IEM_MC_BEGIN(0, 0);
3790 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
3791 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3792 IEM_MC_ADVANCE_RIP();
3793 IEM_MC_END();
3794 return VINF_SUCCESS;
3795}
3796
3797
3798/** Opcode 0x9c. */
3799FNIEMOP_DEF(iemOp_pushf_Fv)
3800{
3801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3802 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3803 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
3804}
3805
3806
3807/** Opcode 0x9d. */
3808FNIEMOP_DEF(iemOp_popf_Fv)
3809{
3810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3811 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3812 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
3813}
3814
3815
3816/** Opcode 0x9e. */
3817FNIEMOP_DEF(iemOp_sahf)
3818{
3819 IEMOP_MNEMONIC(sahf, "sahf");
3820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3821 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
3822 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
3823 return IEMOP_RAISE_INVALID_OPCODE();
3824 IEM_MC_BEGIN(0, 2);
3825 IEM_MC_LOCAL(uint32_t, u32Flags);
3826 IEM_MC_LOCAL(uint32_t, EFlags);
3827 IEM_MC_FETCH_EFLAGS(EFlags);
3828 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
3829 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
3830 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
3831 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
3832 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
3833 IEM_MC_COMMIT_EFLAGS(EFlags);
3834 IEM_MC_ADVANCE_RIP();
3835 IEM_MC_END();
3836 return VINF_SUCCESS;
3837}
3838
3839
3840/** Opcode 0x9f. */
3841FNIEMOP_DEF(iemOp_lahf)
3842{
3843 IEMOP_MNEMONIC(lahf, "lahf");
3844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3845 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
3846 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
3847 return IEMOP_RAISE_INVALID_OPCODE();
3848 IEM_MC_BEGIN(0, 1);
3849 IEM_MC_LOCAL(uint8_t, u8Flags);
3850 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
3851 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
3852 IEM_MC_ADVANCE_RIP();
3853 IEM_MC_END();
3854 return VINF_SUCCESS;
3855}
3856
3857
3858/**
3859 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
3860 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
3861 * prefixes. Will return on failures.
3862 * @param a_GCPtrMemOff The variable to store the offset in.
3863 */
3864#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
3865 do \
3866 { \
3867 switch (pVCpu->iem.s.enmEffAddrMode) \
3868 { \
3869 case IEMMODE_16BIT: \
3870 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
3871 break; \
3872 case IEMMODE_32BIT: \
3873 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
3874 break; \
3875 case IEMMODE_64BIT: \
3876 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
3877 break; \
3878 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3879 } \
3880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3881 } while (0)
3882
3883/** Opcode 0xa0. */
3884FNIEMOP_DEF(iemOp_mov_Al_Ob)
3885{
3886 /*
3887 * Get the offset and fend of lock prefixes.
3888 */
3889 RTGCPTR GCPtrMemOff;
3890 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
3891
3892 /*
3893 * Fetch AL.
3894 */
3895 IEM_MC_BEGIN(0,1);
3896 IEM_MC_LOCAL(uint8_t, u8Tmp);
3897 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3898 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
3899 IEM_MC_ADVANCE_RIP();
3900 IEM_MC_END();
3901 return VINF_SUCCESS;
3902}
3903
3904
3905/** Opcode 0xa1. */
3906FNIEMOP_DEF(iemOp_mov_rAX_Ov)
3907{
3908 /*
3909 * Get the offset and fend of lock prefixes.
3910 */
3911 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
3912 RTGCPTR GCPtrMemOff;
3913 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
3914
3915 /*
3916 * Fetch rAX.
3917 */
3918 switch (pVCpu->iem.s.enmEffOpSize)
3919 {
3920 case IEMMODE_16BIT:
3921 IEM_MC_BEGIN(0,1);
3922 IEM_MC_LOCAL(uint16_t, u16Tmp);
3923 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3924 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
3925 IEM_MC_ADVANCE_RIP();
3926 IEM_MC_END();
3927 return VINF_SUCCESS;
3928
3929 case IEMMODE_32BIT:
3930 IEM_MC_BEGIN(0,1);
3931 IEM_MC_LOCAL(uint32_t, u32Tmp);
3932 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3933 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
3934 IEM_MC_ADVANCE_RIP();
3935 IEM_MC_END();
3936 return VINF_SUCCESS;
3937
3938 case IEMMODE_64BIT:
3939 IEM_MC_BEGIN(0,1);
3940 IEM_MC_LOCAL(uint64_t, u64Tmp);
3941 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3942 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
3943 IEM_MC_ADVANCE_RIP();
3944 IEM_MC_END();
3945 return VINF_SUCCESS;
3946
3947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3948 }
3949}
3950
3951
3952/** Opcode 0xa2. */
3953FNIEMOP_DEF(iemOp_mov_Ob_AL)
3954{
3955 /*
3956 * Get the offset and fend of lock prefixes.
3957 */
3958 RTGCPTR GCPtrMemOff;
3959 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
3960
3961 /*
3962 * Store AL.
3963 */
3964 IEM_MC_BEGIN(0,1);
3965 IEM_MC_LOCAL(uint8_t, u8Tmp);
3966 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
3967 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
3968 IEM_MC_ADVANCE_RIP();
3969 IEM_MC_END();
3970 return VINF_SUCCESS;
3971}
3972
3973
3974/** Opcode 0xa3. */
3975FNIEMOP_DEF(iemOp_mov_Ov_rAX)
3976{
3977 /*
3978 * Get the offset and fend of lock prefixes.
3979 */
3980 RTGCPTR GCPtrMemOff;
3981 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
3982
3983 /*
3984 * Store rAX.
3985 */
3986 switch (pVCpu->iem.s.enmEffOpSize)
3987 {
3988 case IEMMODE_16BIT:
3989 IEM_MC_BEGIN(0,1);
3990 IEM_MC_LOCAL(uint16_t, u16Tmp);
3991 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
3992 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
3993 IEM_MC_ADVANCE_RIP();
3994 IEM_MC_END();
3995 return VINF_SUCCESS;
3996
3997 case IEMMODE_32BIT:
3998 IEM_MC_BEGIN(0,1);
3999 IEM_MC_LOCAL(uint32_t, u32Tmp);
4000 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4001 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4002 IEM_MC_ADVANCE_RIP();
4003 IEM_MC_END();
4004 return VINF_SUCCESS;
4005
4006 case IEMMODE_64BIT:
4007 IEM_MC_BEGIN(0,1);
4008 IEM_MC_LOCAL(uint64_t, u64Tmp);
4009 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4010 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4011 IEM_MC_ADVANCE_RIP();
4012 IEM_MC_END();
4013 return VINF_SUCCESS;
4014
4015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4016 }
4017}
4018
4019/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4020#define IEM_MOVS_CASE(ValBits, AddrBits) \
4021 IEM_MC_BEGIN(0, 2); \
4022 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4023 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4024 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4025 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4026 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4027 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4028 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4029 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4030 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4031 } IEM_MC_ELSE() { \
4032 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4033 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4034 } IEM_MC_ENDIF(); \
4035 IEM_MC_ADVANCE_RIP(); \
4036 IEM_MC_END();
4037
4038/** Opcode 0xa4. */
4039FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4040{
4041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4042
4043 /*
4044 * Use the C implementation if a repeat prefix is encountered.
4045 */
4046 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4047 {
4048 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4049 switch (pVCpu->iem.s.enmEffAddrMode)
4050 {
4051 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4052 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4053 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4055 }
4056 }
4057 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4058
4059 /*
4060 * Sharing case implementation with movs[wdq] below.
4061 */
4062 switch (pVCpu->iem.s.enmEffAddrMode)
4063 {
4064 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4065 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4066 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4068 }
4069 return VINF_SUCCESS;
4070}
4071
4072
4073/** Opcode 0xa5. */
4074FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4075{
4076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4077
4078 /*
4079 * Use the C implementation if a repeat prefix is encountered.
4080 */
4081 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4082 {
4083 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4084 switch (pVCpu->iem.s.enmEffOpSize)
4085 {
4086 case IEMMODE_16BIT:
4087 switch (pVCpu->iem.s.enmEffAddrMode)
4088 {
4089 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4090 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4091 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4093 }
4094 break;
4095 case IEMMODE_32BIT:
4096 switch (pVCpu->iem.s.enmEffAddrMode)
4097 {
4098 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4099 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4100 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4101 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4102 }
4103 case IEMMODE_64BIT:
4104 switch (pVCpu->iem.s.enmEffAddrMode)
4105 {
4106 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4107 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4108 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4110 }
4111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4112 }
4113 }
4114 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4115
4116 /*
4117 * Annoying double switch here.
4118 * Using ugly macro for implementing the cases, sharing it with movsb.
4119 */
4120 switch (pVCpu->iem.s.enmEffOpSize)
4121 {
4122 case IEMMODE_16BIT:
4123 switch (pVCpu->iem.s.enmEffAddrMode)
4124 {
4125 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4126 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4127 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4129 }
4130 break;
4131
4132 case IEMMODE_32BIT:
4133 switch (pVCpu->iem.s.enmEffAddrMode)
4134 {
4135 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4136 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4137 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
4138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4139 }
4140 break;
4141
4142 case IEMMODE_64BIT:
4143 switch (pVCpu->iem.s.enmEffAddrMode)
4144 {
4145 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4146 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
4147 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
4148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4149 }
4150 break;
4151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4152 }
4153 return VINF_SUCCESS;
4154}
4155
4156#undef IEM_MOVS_CASE
4157
4158/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
4159#define IEM_CMPS_CASE(ValBits, AddrBits) \
4160 IEM_MC_BEGIN(3, 3); \
4161 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
4162 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
4163 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4164 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
4165 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4166 \
4167 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4168 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
4169 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4170 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
4171 IEM_MC_REF_LOCAL(puValue1, uValue1); \
4172 IEM_MC_REF_EFLAGS(pEFlags); \
4173 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
4174 \
4175 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4176 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4177 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4178 } IEM_MC_ELSE() { \
4179 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4180 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4181 } IEM_MC_ENDIF(); \
4182 IEM_MC_ADVANCE_RIP(); \
4183 IEM_MC_END(); \
4184
4185/** Opcode 0xa6. */
4186FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
4187{
4188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4189
4190 /*
4191 * Use the C implementation if a repeat prefix is encountered.
4192 */
4193 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4194 {
4195 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
4196 switch (pVCpu->iem.s.enmEffAddrMode)
4197 {
4198 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4199 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4200 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4202 }
4203 }
4204 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4205 {
4206 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
4207 switch (pVCpu->iem.s.enmEffAddrMode)
4208 {
4209 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4210 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4211 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4213 }
4214 }
4215 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
4216
4217 /*
4218 * Sharing case implementation with cmps[wdq] below.
4219 */
4220 switch (pVCpu->iem.s.enmEffAddrMode)
4221 {
4222 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
4223 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
4224 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
4225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4226 }
4227 return VINF_SUCCESS;
4228
4229}
4230
4231
4232/** Opcode 0xa7. */
4233FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
4234{
4235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4236
4237 /*
4238 * Use the C implementation if a repeat prefix is encountered.
4239 */
4240 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4241 {
4242 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
4243 switch (pVCpu->iem.s.enmEffOpSize)
4244 {
4245 case IEMMODE_16BIT:
4246 switch (pVCpu->iem.s.enmEffAddrMode)
4247 {
4248 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4249 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4250 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4252 }
4253 break;
4254 case IEMMODE_32BIT:
4255 switch (pVCpu->iem.s.enmEffAddrMode)
4256 {
4257 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4258 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4259 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4261 }
4262 case IEMMODE_64BIT:
4263 switch (pVCpu->iem.s.enmEffAddrMode)
4264 {
4265 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
4266 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4267 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4269 }
4270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4271 }
4272 }
4273
4274 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4275 {
4276 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
4277 switch (pVCpu->iem.s.enmEffOpSize)
4278 {
4279 case IEMMODE_16BIT:
4280 switch (pVCpu->iem.s.enmEffAddrMode)
4281 {
4282 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4283 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4284 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4286 }
4287 break;
4288 case IEMMODE_32BIT:
4289 switch (pVCpu->iem.s.enmEffAddrMode)
4290 {
4291 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4292 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4293 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4295 }
4296 case IEMMODE_64BIT:
4297 switch (pVCpu->iem.s.enmEffAddrMode)
4298 {
4299 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
4300 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4301 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4303 }
4304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4305 }
4306 }
4307
4308 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
4309
4310 /*
4311 * Annoying double switch here.
4312 * Using ugly macro for implementing the cases, sharing it with cmpsb.
4313 */
4314 switch (pVCpu->iem.s.enmEffOpSize)
4315 {
4316 case IEMMODE_16BIT:
4317 switch (pVCpu->iem.s.enmEffAddrMode)
4318 {
4319 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
4320 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
4321 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
4322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4323 }
4324 break;
4325
4326 case IEMMODE_32BIT:
4327 switch (pVCpu->iem.s.enmEffAddrMode)
4328 {
4329 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
4330 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
4331 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
4332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4333 }
4334 break;
4335
4336 case IEMMODE_64BIT:
4337 switch (pVCpu->iem.s.enmEffAddrMode)
4338 {
4339 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4340 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
4341 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
4342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4343 }
4344 break;
4345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4346 }
4347 return VINF_SUCCESS;
4348
4349}
4350
4351#undef IEM_CMPS_CASE
4352
4353/** Opcode 0xa8. */
4354FNIEMOP_DEF(iemOp_test_AL_Ib)
4355{
4356 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
4357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
4359}
4360
4361
4362/** Opcode 0xa9. */
4363FNIEMOP_DEF(iemOp_test_eAX_Iz)
4364{
4365 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
4366 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4367 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
4368}
4369
4370
4371/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
4372#define IEM_STOS_CASE(ValBits, AddrBits) \
4373 IEM_MC_BEGIN(0, 2); \
4374 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4375 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4376 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
4377 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4378 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4379 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4380 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4381 } IEM_MC_ELSE() { \
4382 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4383 } IEM_MC_ENDIF(); \
4384 IEM_MC_ADVANCE_RIP(); \
4385 IEM_MC_END(); \
4386
4387/** Opcode 0xaa. */
4388FNIEMOP_DEF(iemOp_stosb_Yb_AL)
4389{
4390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4391
4392 /*
4393 * Use the C implementation if a repeat prefix is encountered.
4394 */
4395 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4396 {
4397 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
4398 switch (pVCpu->iem.s.enmEffAddrMode)
4399 {
4400 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
4401 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
4402 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
4403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4404 }
4405 }
4406 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
4407
4408 /*
4409 * Sharing case implementation with stos[wdq] below.
4410 */
4411 switch (pVCpu->iem.s.enmEffAddrMode)
4412 {
4413 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
4414 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
4415 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
4416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4417 }
4418 return VINF_SUCCESS;
4419}
4420
4421
4422/** Opcode 0xab. */
4423FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
4424{
4425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4426
4427 /*
4428 * Use the C implementation if a repeat prefix is encountered.
4429 */
4430 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4431 {
4432 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
4433 switch (pVCpu->iem.s.enmEffOpSize)
4434 {
4435 case IEMMODE_16BIT:
4436 switch (pVCpu->iem.s.enmEffAddrMode)
4437 {
4438 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
4439 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
4440 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
4441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4442 }
4443 break;
4444 case IEMMODE_32BIT:
4445 switch (pVCpu->iem.s.enmEffAddrMode)
4446 {
4447 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
4448 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
4449 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
4450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4451 }
4452 case IEMMODE_64BIT:
4453 switch (pVCpu->iem.s.enmEffAddrMode)
4454 {
4455 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
4456 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
4457 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
4458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4459 }
4460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4461 }
4462 }
4463 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
4464
4465 /*
4466 * Annoying double switch here.
4467 * Using ugly macro for implementing the cases, sharing it with stosb.
4468 */
4469 switch (pVCpu->iem.s.enmEffOpSize)
4470 {
4471 case IEMMODE_16BIT:
4472 switch (pVCpu->iem.s.enmEffAddrMode)
4473 {
4474 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
4475 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
4476 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
4477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4478 }
4479 break;
4480
4481 case IEMMODE_32BIT:
4482 switch (pVCpu->iem.s.enmEffAddrMode)
4483 {
4484 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
4485 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
4486 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
4487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4488 }
4489 break;
4490
4491 case IEMMODE_64BIT:
4492 switch (pVCpu->iem.s.enmEffAddrMode)
4493 {
4494 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4495 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
4496 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
4497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4498 }
4499 break;
4500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4501 }
4502 return VINF_SUCCESS;
4503}
4504
4505#undef IEM_STOS_CASE
4506
4507/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
4508#define IEM_LODS_CASE(ValBits, AddrBits) \
4509 IEM_MC_BEGIN(0, 2); \
4510 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4511 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4512 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4513 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4514 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
4515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4516 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4517 } IEM_MC_ELSE() { \
4518 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4519 } IEM_MC_ENDIF(); \
4520 IEM_MC_ADVANCE_RIP(); \
4521 IEM_MC_END();
4522
4523/** Opcode 0xac. */
4524FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
4525{
4526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4527
4528 /*
4529 * Use the C implementation if a repeat prefix is encountered.
4530 */
4531 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4532 {
4533 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
4534 switch (pVCpu->iem.s.enmEffAddrMode)
4535 {
4536 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
4537 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
4538 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
4539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4540 }
4541 }
4542 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
4543
4544 /*
4545 * Sharing case implementation with stos[wdq] below.
4546 */
4547 switch (pVCpu->iem.s.enmEffAddrMode)
4548 {
4549 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
4550 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
4551 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
4552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4553 }
4554 return VINF_SUCCESS;
4555}
4556
4557
4558/** Opcode 0xad. */
4559FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
4560{
4561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4562
4563 /*
4564 * Use the C implementation if a repeat prefix is encountered.
4565 */
4566 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4567 {
4568 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
4569 switch (pVCpu->iem.s.enmEffOpSize)
4570 {
4571 case IEMMODE_16BIT:
4572 switch (pVCpu->iem.s.enmEffAddrMode)
4573 {
4574 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
4575 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
4576 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
4577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4578 }
4579 break;
4580 case IEMMODE_32BIT:
4581 switch (pVCpu->iem.s.enmEffAddrMode)
4582 {
4583 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
4584 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
4585 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
4586 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4587 }
4588 case IEMMODE_64BIT:
4589 switch (pVCpu->iem.s.enmEffAddrMode)
4590 {
4591 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
4592 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
4593 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
4594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4595 }
4596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4597 }
4598 }
4599 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
4600
4601 /*
4602 * Annoying double switch here.
4603 * Using ugly macro for implementing the cases, sharing it with lodsb.
4604 */
4605 switch (pVCpu->iem.s.enmEffOpSize)
4606 {
4607 case IEMMODE_16BIT:
4608 switch (pVCpu->iem.s.enmEffAddrMode)
4609 {
4610 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
4611 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
4612 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
4613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4614 }
4615 break;
4616
4617 case IEMMODE_32BIT:
4618 switch (pVCpu->iem.s.enmEffAddrMode)
4619 {
4620 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
4621 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
4622 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
4623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4624 }
4625 break;
4626
4627 case IEMMODE_64BIT:
4628 switch (pVCpu->iem.s.enmEffAddrMode)
4629 {
4630 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4631 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
4632 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
4633 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4634 }
4635 break;
4636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4637 }
4638 return VINF_SUCCESS;
4639}
4640
4641#undef IEM_LODS_CASE
4642
4643/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
4644#define IEM_SCAS_CASE(ValBits, AddrBits) \
4645 IEM_MC_BEGIN(3, 2); \
4646 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
4647 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
4648 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4649 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4650 \
4651 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4652 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
4653 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
4654 IEM_MC_REF_EFLAGS(pEFlags); \
4655 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
4656 \
4657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4658 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4659 } IEM_MC_ELSE() { \
4660 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4661 } IEM_MC_ENDIF(); \
4662 IEM_MC_ADVANCE_RIP(); \
4663 IEM_MC_END();
4664
4665/** Opcode 0xae. */
4666FNIEMOP_DEF(iemOp_scasb_AL_Xb)
4667{
4668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4669
4670 /*
4671 * Use the C implementation if a repeat prefix is encountered.
4672 */
4673 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4674 {
4675 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
4676 switch (pVCpu->iem.s.enmEffAddrMode)
4677 {
4678 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
4679 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
4680 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
4681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4682 }
4683 }
4684 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4685 {
4686 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
4687 switch (pVCpu->iem.s.enmEffAddrMode)
4688 {
4689 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
4690 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
4691 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
4692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4693 }
4694 }
4695 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
4696
4697 /*
4698 * Sharing case implementation with stos[wdq] below.
4699 */
4700 switch (pVCpu->iem.s.enmEffAddrMode)
4701 {
4702 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
4703 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
4704 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
4705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4706 }
4707 return VINF_SUCCESS;
4708}
4709
4710
4711/** Opcode 0xaf. */
4712FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
4713{
4714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4715
4716 /*
4717 * Use the C implementation if a repeat prefix is encountered.
4718 */
4719 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4720 {
4721 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
4722 switch (pVCpu->iem.s.enmEffOpSize)
4723 {
4724 case IEMMODE_16BIT:
4725 switch (pVCpu->iem.s.enmEffAddrMode)
4726 {
4727 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
4728 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
4729 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
4730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4731 }
4732 break;
4733 case IEMMODE_32BIT:
4734 switch (pVCpu->iem.s.enmEffAddrMode)
4735 {
4736 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
4737 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
4738 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
4739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4740 }
4741 case IEMMODE_64BIT:
4742 switch (pVCpu->iem.s.enmEffAddrMode)
4743 {
4744 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
4745 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
4746 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
4747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4748 }
4749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4750 }
4751 }
4752 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4753 {
4754 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
4755 switch (pVCpu->iem.s.enmEffOpSize)
4756 {
4757 case IEMMODE_16BIT:
4758 switch (pVCpu->iem.s.enmEffAddrMode)
4759 {
4760 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
4761 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
4762 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
4763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4764 }
4765 break;
4766 case IEMMODE_32BIT:
4767 switch (pVCpu->iem.s.enmEffAddrMode)
4768 {
4769 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
4770 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
4771 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
4772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4773 }
4774 case IEMMODE_64BIT:
4775 switch (pVCpu->iem.s.enmEffAddrMode)
4776 {
4777 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
4778 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
4779 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
4780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4781 }
4782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4783 }
4784 }
4785 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
4786
4787 /*
4788 * Annoying double switch here.
4789 * Using ugly macro for implementing the cases, sharing it with scasb.
4790 */
4791 switch (pVCpu->iem.s.enmEffOpSize)
4792 {
4793 case IEMMODE_16BIT:
4794 switch (pVCpu->iem.s.enmEffAddrMode)
4795 {
4796 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
4797 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
4798 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
4799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4800 }
4801 break;
4802
4803 case IEMMODE_32BIT:
4804 switch (pVCpu->iem.s.enmEffAddrMode)
4805 {
4806 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
4807 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
4808 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
4809 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4810 }
4811 break;
4812
4813 case IEMMODE_64BIT:
4814 switch (pVCpu->iem.s.enmEffAddrMode)
4815 {
4816 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4817 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
4818 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
4819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4820 }
4821 break;
4822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4823 }
4824 return VINF_SUCCESS;
4825}
4826
4827#undef IEM_SCAS_CASE
4828
4829/**
4830 * Common 'mov r8, imm8' helper.
4831 */
4832FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
4833{
4834 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
4835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4836
4837 IEM_MC_BEGIN(0, 1);
4838 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
4839 IEM_MC_STORE_GREG_U8(iReg, u8Value);
4840 IEM_MC_ADVANCE_RIP();
4841 IEM_MC_END();
4842
4843 return VINF_SUCCESS;
4844}
4845
4846
4847/** Opcode 0xb0. */
4848FNIEMOP_DEF(iemOp_mov_AL_Ib)
4849{
4850 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
4851 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
4852}
4853
4854
4855/** Opcode 0xb1. */
4856FNIEMOP_DEF(iemOp_CL_Ib)
4857{
4858 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
4859 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
4860}
4861
4862
4863/** Opcode 0xb2. */
4864FNIEMOP_DEF(iemOp_DL_Ib)
4865{
4866 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
4867 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
4868}
4869
4870
4871/** Opcode 0xb3. */
4872FNIEMOP_DEF(iemOp_BL_Ib)
4873{
4874 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
4875 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
4876}
4877
4878
4879/** Opcode 0xb4. */
4880FNIEMOP_DEF(iemOp_mov_AH_Ib)
4881{
4882 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
4883 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
4884}
4885
4886
4887/** Opcode 0xb5. */
4888FNIEMOP_DEF(iemOp_CH_Ib)
4889{
4890 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
4891 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
4892}
4893
4894
4895/** Opcode 0xb6. */
4896FNIEMOP_DEF(iemOp_DH_Ib)
4897{
4898 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
4899 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
4900}
4901
4902
4903/** Opcode 0xb7. */
4904FNIEMOP_DEF(iemOp_BH_Ib)
4905{
4906 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
4907 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
4908}
4909
4910
4911/**
4912 * Common 'mov regX,immX' helper.
4913 */
4914FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
4915{
4916 switch (pVCpu->iem.s.enmEffOpSize)
4917 {
4918 case IEMMODE_16BIT:
4919 {
4920 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
4921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4922
4923 IEM_MC_BEGIN(0, 1);
4924 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
4925 IEM_MC_STORE_GREG_U16(iReg, u16Value);
4926 IEM_MC_ADVANCE_RIP();
4927 IEM_MC_END();
4928 break;
4929 }
4930
4931 case IEMMODE_32BIT:
4932 {
4933 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4935
4936 IEM_MC_BEGIN(0, 1);
4937 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
4938 IEM_MC_STORE_GREG_U32(iReg, u32Value);
4939 IEM_MC_ADVANCE_RIP();
4940 IEM_MC_END();
4941 break;
4942 }
4943 case IEMMODE_64BIT:
4944 {
4945 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
4946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4947
4948 IEM_MC_BEGIN(0, 1);
4949 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
4950 IEM_MC_STORE_GREG_U64(iReg, u64Value);
4951 IEM_MC_ADVANCE_RIP();
4952 IEM_MC_END();
4953 break;
4954 }
4955 }
4956
4957 return VINF_SUCCESS;
4958}
4959
4960
4961/** Opcode 0xb8. */
4962FNIEMOP_DEF(iemOp_eAX_Iv)
4963{
4964 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
4965 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
4966}
4967
4968
4969/** Opcode 0xb9. */
4970FNIEMOP_DEF(iemOp_eCX_Iv)
4971{
4972 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
4973 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
4974}
4975
4976
4977/** Opcode 0xba. */
4978FNIEMOP_DEF(iemOp_eDX_Iv)
4979{
4980 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
4981 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
4982}
4983
4984
4985/** Opcode 0xbb. */
4986FNIEMOP_DEF(iemOp_eBX_Iv)
4987{
4988 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
4989 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
4990}
4991
4992
4993/** Opcode 0xbc. */
4994FNIEMOP_DEF(iemOp_eSP_Iv)
4995{
4996 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
4997 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
4998}
4999
5000
5001/** Opcode 0xbd. */
5002FNIEMOP_DEF(iemOp_eBP_Iv)
5003{
5004 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5005 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5006}
5007
5008
5009/** Opcode 0xbe. */
5010FNIEMOP_DEF(iemOp_eSI_Iv)
5011{
5012 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5013 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5014}
5015
5016
5017/** Opcode 0xbf. */
5018FNIEMOP_DEF(iemOp_eDI_Iv)
5019{
5020 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5021 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5022}
5023
5024
5025/** Opcode 0xc0. */
5026FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5027{
5028 IEMOP_HLP_MIN_186();
5029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5030 PCIEMOPSHIFTSIZES pImpl;
5031 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5032 {
5033 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5034 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5035 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5036 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5037 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5038 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5039 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5040 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5041 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5042 }
5043 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5044
5045 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5046 {
5047 /* register */
5048 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5050 IEM_MC_BEGIN(3, 0);
5051 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5052 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5053 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5054 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5055 IEM_MC_REF_EFLAGS(pEFlags);
5056 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5057 IEM_MC_ADVANCE_RIP();
5058 IEM_MC_END();
5059 }
5060 else
5061 {
5062 /* memory */
5063 IEM_MC_BEGIN(3, 2);
5064 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5065 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5066 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5068
5069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5070 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5071 IEM_MC_ASSIGN(cShiftArg, cShift);
5072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5073 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5074 IEM_MC_FETCH_EFLAGS(EFlags);
5075 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5076
5077 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5078 IEM_MC_COMMIT_EFLAGS(EFlags);
5079 IEM_MC_ADVANCE_RIP();
5080 IEM_MC_END();
5081 }
5082 return VINF_SUCCESS;
5083}
5084
5085
5086/** Opcode 0xc1. */
5087FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
5088{
5089 IEMOP_HLP_MIN_186();
5090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5091 PCIEMOPSHIFTSIZES pImpl;
5092 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5093 {
5094 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
5095 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
5096 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
5097 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
5098 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
5099 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
5100 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
5101 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5102 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5103 }
5104 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5105
5106 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5107 {
5108 /* register */
5109 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5111 switch (pVCpu->iem.s.enmEffOpSize)
5112 {
5113 case IEMMODE_16BIT:
5114 IEM_MC_BEGIN(3, 0);
5115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5116 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5118 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5119 IEM_MC_REF_EFLAGS(pEFlags);
5120 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5121 IEM_MC_ADVANCE_RIP();
5122 IEM_MC_END();
5123 return VINF_SUCCESS;
5124
5125 case IEMMODE_32BIT:
5126 IEM_MC_BEGIN(3, 0);
5127 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5128 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5129 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5130 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5131 IEM_MC_REF_EFLAGS(pEFlags);
5132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5134 IEM_MC_ADVANCE_RIP();
5135 IEM_MC_END();
5136 return VINF_SUCCESS;
5137
5138 case IEMMODE_64BIT:
5139 IEM_MC_BEGIN(3, 0);
5140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5141 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5143 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5144 IEM_MC_REF_EFLAGS(pEFlags);
5145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5146 IEM_MC_ADVANCE_RIP();
5147 IEM_MC_END();
5148 return VINF_SUCCESS;
5149
5150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5151 }
5152 }
5153 else
5154 {
5155 /* memory */
5156 switch (pVCpu->iem.s.enmEffOpSize)
5157 {
5158 case IEMMODE_16BIT:
5159 IEM_MC_BEGIN(3, 2);
5160 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5161 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5162 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5164
5165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5166 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5167 IEM_MC_ASSIGN(cShiftArg, cShift);
5168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5169 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5170 IEM_MC_FETCH_EFLAGS(EFlags);
5171 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5172
5173 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5174 IEM_MC_COMMIT_EFLAGS(EFlags);
5175 IEM_MC_ADVANCE_RIP();
5176 IEM_MC_END();
5177 return VINF_SUCCESS;
5178
5179 case IEMMODE_32BIT:
5180 IEM_MC_BEGIN(3, 2);
5181 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5182 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5183 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5185
5186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5187 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5188 IEM_MC_ASSIGN(cShiftArg, cShift);
5189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5190 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5191 IEM_MC_FETCH_EFLAGS(EFlags);
5192 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5193
5194 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5195 IEM_MC_COMMIT_EFLAGS(EFlags);
5196 IEM_MC_ADVANCE_RIP();
5197 IEM_MC_END();
5198 return VINF_SUCCESS;
5199
5200 case IEMMODE_64BIT:
5201 IEM_MC_BEGIN(3, 2);
5202 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5203 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5204 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5206
5207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5208 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5209 IEM_MC_ASSIGN(cShiftArg, cShift);
5210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5211 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5212 IEM_MC_FETCH_EFLAGS(EFlags);
5213 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5214
5215 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5216 IEM_MC_COMMIT_EFLAGS(EFlags);
5217 IEM_MC_ADVANCE_RIP();
5218 IEM_MC_END();
5219 return VINF_SUCCESS;
5220
5221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5222 }
5223 }
5224}
5225
5226
5227/** Opcode 0xc2. */
5228FNIEMOP_DEF(iemOp_retn_Iw)
5229{
5230 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
5231 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5233 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5234 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
5235}
5236
5237
5238/** Opcode 0xc3. */
5239FNIEMOP_DEF(iemOp_retn)
5240{
5241 IEMOP_MNEMONIC(retn, "retn");
5242 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5244 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
5245}
5246
5247
5248/** Opcode 0xc4. */
5249FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
5250{
5251 /* The LES instruction is invalid 64-bit mode. In legacy and
5252 compatability mode it is invalid with MOD=3.
5253 The use as a VEX prefix is made possible by assigning the inverted
5254 REX.R to the top MOD bit, and the top bit in the inverted register
5255 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
5256 to accessing registers 0..7 in this VEX form. */
5257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5258 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5259 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5260 {
5261 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
5262 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5263 {
5264 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5265 if ( ( pVCpu->iem.s.fPrefixes
5266 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5267 == 0)
5268 {
5269 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5270 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5271 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
5272 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
5273 pVCpu->iem.s.idxPrefix = bRm & 0x3;
5274
5275 /** @todo VEX: Just use new tables and decoders. */
5276 IEMOP_BITCH_ABOUT_STUB();
5277 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5278 }
5279 Log(("VEX2: Invalid prefix mix!\n"));
5280 }
5281 else
5282 Log(("VEX2: AVX support disabled!\n"));
5283
5284 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
5285 return IEMOP_RAISE_INVALID_OPCODE();
5286 }
5287 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
5288 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
5289}
5290
5291
5292/** Opcode 0xc5. */
5293FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
5294{
5295 /* The LDS instruction is invalid 64-bit mode. In legacy and
5296 compatability mode it is invalid with MOD=3.
5297 The use as a VEX prefix is made possible by assigning the inverted
5298 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
5299 outside of 64-bit mode. VEX is not available in real or v86 mode. */
5300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5301 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
5302 {
5303 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5304 {
5305 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
5306 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
5307 }
5308 IEMOP_HLP_NO_REAL_OR_V86_MODE();
5309 }
5310
5311 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
5312 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5313 {
5314 /** @todo Test when exctly the VEX conformance checks kick in during
5315 * instruction decoding and fetching (using \#PF). */
5316 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
5317 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5318 if ( ( pVCpu->iem.s.fPrefixes
5319 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5320 == 0)
5321 {
5322 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5323 if (bVex2 & 0x80 /* VEX.W */)
5324 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5325 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5326 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
5327 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
5328 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
5329 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
5330 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
5331
5332 /** @todo VEX: Just use new tables and decoders. */
5333 switch (bRm & 0xf)
5334 {
5335 case 1: /* 0x0f lead opcode byte. */
5336 IEMOP_BITCH_ABOUT_STUB();
5337 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5338
5339 case 2: /* 0x0f 0x38 lead opcode bytes. */
5340 IEMOP_BITCH_ABOUT_STUB();
5341 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5342
5343 case 3: /* 0x0f 0x3a lead opcode bytes. */
5344 IEMOP_BITCH_ABOUT_STUB();
5345 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5346
5347 default:
5348 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0xf));
5349 return IEMOP_RAISE_INVALID_OPCODE();
5350 }
5351 }
5352 else
5353 Log(("VEX3: Invalid prefix mix!\n"));
5354 }
5355 else
5356 Log(("VEX3: AVX support disabled!\n"));
5357 return IEMOP_RAISE_INVALID_OPCODE();
5358}
5359
5360
5361/** Opcode 0xc6. */
5362FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
5363{
5364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5365 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
5366 return IEMOP_RAISE_INVALID_OPCODE();
5367 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
5368
5369 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5370 {
5371 /* register access */
5372 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5374 IEM_MC_BEGIN(0, 0);
5375 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
5376 IEM_MC_ADVANCE_RIP();
5377 IEM_MC_END();
5378 }
5379 else
5380 {
5381 /* memory access. */
5382 IEM_MC_BEGIN(0, 1);
5383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5385 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5387 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
5388 IEM_MC_ADVANCE_RIP();
5389 IEM_MC_END();
5390 }
5391 return VINF_SUCCESS;
5392}
5393
5394
5395/** Opcode 0xc7. */
5396FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
5397{
5398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5399 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
5400 return IEMOP_RAISE_INVALID_OPCODE();
5401 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
5402
5403 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5404 {
5405 /* register access */
5406 switch (pVCpu->iem.s.enmEffOpSize)
5407 {
5408 case IEMMODE_16BIT:
5409 IEM_MC_BEGIN(0, 0);
5410 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5412 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
5413 IEM_MC_ADVANCE_RIP();
5414 IEM_MC_END();
5415 return VINF_SUCCESS;
5416
5417 case IEMMODE_32BIT:
5418 IEM_MC_BEGIN(0, 0);
5419 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5421 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
5422 IEM_MC_ADVANCE_RIP();
5423 IEM_MC_END();
5424 return VINF_SUCCESS;
5425
5426 case IEMMODE_64BIT:
5427 IEM_MC_BEGIN(0, 0);
5428 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
5429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5430 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
5431 IEM_MC_ADVANCE_RIP();
5432 IEM_MC_END();
5433 return VINF_SUCCESS;
5434
5435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5436 }
5437 }
5438 else
5439 {
5440 /* memory access. */
5441 switch (pVCpu->iem.s.enmEffOpSize)
5442 {
5443 case IEMMODE_16BIT:
5444 IEM_MC_BEGIN(0, 1);
5445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
5447 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5449 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
5450 IEM_MC_ADVANCE_RIP();
5451 IEM_MC_END();
5452 return VINF_SUCCESS;
5453
5454 case IEMMODE_32BIT:
5455 IEM_MC_BEGIN(0, 1);
5456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
5458 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5460 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
5461 IEM_MC_ADVANCE_RIP();
5462 IEM_MC_END();
5463 return VINF_SUCCESS;
5464
5465 case IEMMODE_64BIT:
5466 IEM_MC_BEGIN(0, 1);
5467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
5469 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
5470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5471 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
5472 IEM_MC_ADVANCE_RIP();
5473 IEM_MC_END();
5474 return VINF_SUCCESS;
5475
5476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5477 }
5478 }
5479}
5480
5481
5482
5483
5484/** Opcode 0xc8. */
5485FNIEMOP_DEF(iemOp_enter_Iw_Ib)
5486{
5487 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
5488 IEMOP_HLP_MIN_186();
5489 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5490 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
5491 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
5492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5493 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
5494}
5495
5496
5497/** Opcode 0xc9. */
5498FNIEMOP_DEF(iemOp_leave)
5499{
5500 IEMOP_MNEMONIC(leave, "leave");
5501 IEMOP_HLP_MIN_186();
5502 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5504 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
5505}
5506
5507
5508/** Opcode 0xca. */
5509FNIEMOP_DEF(iemOp_retf_Iw)
5510{
5511 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
5512 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5515 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
5516}
5517
5518
5519/** Opcode 0xcb. */
5520FNIEMOP_DEF(iemOp_retf)
5521{
5522 IEMOP_MNEMONIC(retf, "retf");
5523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5524 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5525 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
5526}
5527
5528
5529/** Opcode 0xcc. */
5530FNIEMOP_DEF(iemOp_int_3)
5531{
5532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5533 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
5534}
5535
5536
5537/** Opcode 0xcd. */
5538FNIEMOP_DEF(iemOp_int_Ib)
5539{
5540 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
5541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5542 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
5543}
5544
5545
5546/** Opcode 0xce. */
5547FNIEMOP_DEF(iemOp_into)
5548{
5549 IEMOP_MNEMONIC(into, "into");
5550 IEMOP_HLP_NO_64BIT();
5551
5552 IEM_MC_BEGIN(2, 0);
5553 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
5554 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
5555 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
5556 IEM_MC_END();
5557 return VINF_SUCCESS;
5558}
5559
5560
5561/** Opcode 0xcf. */
5562FNIEMOP_DEF(iemOp_iret)
5563{
5564 IEMOP_MNEMONIC(iret, "iret");
5565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5566 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
5567}
5568
5569
5570/** Opcode 0xd0. */
5571FNIEMOP_DEF(iemOp_Grp2_Eb_1)
5572{
5573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5574 PCIEMOPSHIFTSIZES pImpl;
5575 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5576 {
5577 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
5578 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
5579 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
5580 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
5581 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
5582 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
5583 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
5584 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5585 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
5586 }
5587 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5588
5589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5590 {
5591 /* register */
5592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5593 IEM_MC_BEGIN(3, 0);
5594 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5595 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
5596 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5597 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5598 IEM_MC_REF_EFLAGS(pEFlags);
5599 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5600 IEM_MC_ADVANCE_RIP();
5601 IEM_MC_END();
5602 }
5603 else
5604 {
5605 /* memory */
5606 IEM_MC_BEGIN(3, 2);
5607 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5608 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
5609 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5611
5612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5614 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5615 IEM_MC_FETCH_EFLAGS(EFlags);
5616 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5617
5618 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5619 IEM_MC_COMMIT_EFLAGS(EFlags);
5620 IEM_MC_ADVANCE_RIP();
5621 IEM_MC_END();
5622 }
5623 return VINF_SUCCESS;
5624}
5625
5626
5627
5628/** Opcode 0xd1. */
5629FNIEMOP_DEF(iemOp_Grp2_Ev_1)
5630{
5631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5632 PCIEMOPSHIFTSIZES pImpl;
5633 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5634 {
5635 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
5636 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
5637 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
5638 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
5639 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
5640 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
5641 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
5642 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5643 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
5644 }
5645 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5646
5647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5648 {
5649 /* register */
5650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5651 switch (pVCpu->iem.s.enmEffOpSize)
5652 {
5653 case IEMMODE_16BIT:
5654 IEM_MC_BEGIN(3, 0);
5655 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5656 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5657 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5658 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5659 IEM_MC_REF_EFLAGS(pEFlags);
5660 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5661 IEM_MC_ADVANCE_RIP();
5662 IEM_MC_END();
5663 return VINF_SUCCESS;
5664
5665 case IEMMODE_32BIT:
5666 IEM_MC_BEGIN(3, 0);
5667 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5668 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5669 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5670 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5671 IEM_MC_REF_EFLAGS(pEFlags);
5672 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5673 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5674 IEM_MC_ADVANCE_RIP();
5675 IEM_MC_END();
5676 return VINF_SUCCESS;
5677
5678 case IEMMODE_64BIT:
5679 IEM_MC_BEGIN(3, 0);
5680 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5681 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5682 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5683 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5684 IEM_MC_REF_EFLAGS(pEFlags);
5685 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5686 IEM_MC_ADVANCE_RIP();
5687 IEM_MC_END();
5688 return VINF_SUCCESS;
5689
5690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5691 }
5692 }
5693 else
5694 {
5695 /* memory */
5696 switch (pVCpu->iem.s.enmEffOpSize)
5697 {
5698 case IEMMODE_16BIT:
5699 IEM_MC_BEGIN(3, 2);
5700 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5701 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5702 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5704
5705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5707 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5708 IEM_MC_FETCH_EFLAGS(EFlags);
5709 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5710
5711 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5712 IEM_MC_COMMIT_EFLAGS(EFlags);
5713 IEM_MC_ADVANCE_RIP();
5714 IEM_MC_END();
5715 return VINF_SUCCESS;
5716
5717 case IEMMODE_32BIT:
5718 IEM_MC_BEGIN(3, 2);
5719 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5720 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5721 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5723
5724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5726 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5727 IEM_MC_FETCH_EFLAGS(EFlags);
5728 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5729
5730 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5731 IEM_MC_COMMIT_EFLAGS(EFlags);
5732 IEM_MC_ADVANCE_RIP();
5733 IEM_MC_END();
5734 return VINF_SUCCESS;
5735
5736 case IEMMODE_64BIT:
5737 IEM_MC_BEGIN(3, 2);
5738 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5739 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5740 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5742
5743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5745 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5746 IEM_MC_FETCH_EFLAGS(EFlags);
5747 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5748
5749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5750 IEM_MC_COMMIT_EFLAGS(EFlags);
5751 IEM_MC_ADVANCE_RIP();
5752 IEM_MC_END();
5753 return VINF_SUCCESS;
5754
5755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5756 }
5757 }
5758}
5759
5760
5761/** Opcode 0xd2. */
5762FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
5763{
5764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5765 PCIEMOPSHIFTSIZES pImpl;
5766 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5767 {
5768 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
5769 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
5770 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
5771 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
5772 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
5773 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
5774 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
5775 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5776 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
5777 }
5778 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5779
5780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5781 {
5782 /* register */
5783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5784 IEM_MC_BEGIN(3, 0);
5785 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5786 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5787 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5788 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5789 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5790 IEM_MC_REF_EFLAGS(pEFlags);
5791 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5792 IEM_MC_ADVANCE_RIP();
5793 IEM_MC_END();
5794 }
5795 else
5796 {
5797 /* memory */
5798 IEM_MC_BEGIN(3, 2);
5799 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5800 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5801 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5803
5804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5806 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5807 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5808 IEM_MC_FETCH_EFLAGS(EFlags);
5809 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5810
5811 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5812 IEM_MC_COMMIT_EFLAGS(EFlags);
5813 IEM_MC_ADVANCE_RIP();
5814 IEM_MC_END();
5815 }
5816 return VINF_SUCCESS;
5817}
5818
5819
5820/** Opcode 0xd3. */
5821FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
5822{
5823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5824 PCIEMOPSHIFTSIZES pImpl;
5825 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5826 {
5827 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
5828 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
5829 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
5830 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
5831 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
5832 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
5833 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
5834 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5835 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5836 }
5837 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5838
5839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5840 {
5841 /* register */
5842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5843 switch (pVCpu->iem.s.enmEffOpSize)
5844 {
5845 case IEMMODE_16BIT:
5846 IEM_MC_BEGIN(3, 0);
5847 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5848 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5849 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5850 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5851 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5852 IEM_MC_REF_EFLAGS(pEFlags);
5853 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5854 IEM_MC_ADVANCE_RIP();
5855 IEM_MC_END();
5856 return VINF_SUCCESS;
5857
5858 case IEMMODE_32BIT:
5859 IEM_MC_BEGIN(3, 0);
5860 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5861 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5862 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5863 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5864 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5865 IEM_MC_REF_EFLAGS(pEFlags);
5866 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5867 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5868 IEM_MC_ADVANCE_RIP();
5869 IEM_MC_END();
5870 return VINF_SUCCESS;
5871
5872 case IEMMODE_64BIT:
5873 IEM_MC_BEGIN(3, 0);
5874 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5875 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5876 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5877 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5878 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5879 IEM_MC_REF_EFLAGS(pEFlags);
5880 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5881 IEM_MC_ADVANCE_RIP();
5882 IEM_MC_END();
5883 return VINF_SUCCESS;
5884
5885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5886 }
5887 }
5888 else
5889 {
5890 /* memory */
5891 switch (pVCpu->iem.s.enmEffOpSize)
5892 {
5893 case IEMMODE_16BIT:
5894 IEM_MC_BEGIN(3, 2);
5895 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5896 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5897 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5899
5900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5902 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5903 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5904 IEM_MC_FETCH_EFLAGS(EFlags);
5905 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5906
5907 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5908 IEM_MC_COMMIT_EFLAGS(EFlags);
5909 IEM_MC_ADVANCE_RIP();
5910 IEM_MC_END();
5911 return VINF_SUCCESS;
5912
5913 case IEMMODE_32BIT:
5914 IEM_MC_BEGIN(3, 2);
5915 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5916 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5917 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5919
5920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5922 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5923 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5924 IEM_MC_FETCH_EFLAGS(EFlags);
5925 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5926
5927 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5928 IEM_MC_COMMIT_EFLAGS(EFlags);
5929 IEM_MC_ADVANCE_RIP();
5930 IEM_MC_END();
5931 return VINF_SUCCESS;
5932
5933 case IEMMODE_64BIT:
5934 IEM_MC_BEGIN(3, 2);
5935 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5936 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5937 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5939
5940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5942 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5943 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5944 IEM_MC_FETCH_EFLAGS(EFlags);
5945 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5946
5947 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5948 IEM_MC_COMMIT_EFLAGS(EFlags);
5949 IEM_MC_ADVANCE_RIP();
5950 IEM_MC_END();
5951 return VINF_SUCCESS;
5952
5953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5954 }
5955 }
5956}
5957
5958/** Opcode 0xd4. */
5959FNIEMOP_DEF(iemOp_aam_Ib)
5960{
5961 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
5962 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5964 IEMOP_HLP_NO_64BIT();
5965 if (!bImm)
5966 return IEMOP_RAISE_DIVIDE_ERROR();
5967 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
5968}
5969
5970
5971/** Opcode 0xd5. */
5972FNIEMOP_DEF(iemOp_aad_Ib)
5973{
5974 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
5975 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5977 IEMOP_HLP_NO_64BIT();
5978 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
5979}
5980
5981
5982/** Opcode 0xd6. */
5983FNIEMOP_DEF(iemOp_salc)
5984{
5985 IEMOP_MNEMONIC(salc, "salc");
5986 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
5987 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5989 IEMOP_HLP_NO_64BIT();
5990
5991 IEM_MC_BEGIN(0, 0);
5992 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5993 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
5994 } IEM_MC_ELSE() {
5995 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
5996 } IEM_MC_ENDIF();
5997 IEM_MC_ADVANCE_RIP();
5998 IEM_MC_END();
5999 return VINF_SUCCESS;
6000}
6001
6002
6003/** Opcode 0xd7. */
6004FNIEMOP_DEF(iemOp_xlat)
6005{
6006 IEMOP_MNEMONIC(xlat, "xlat");
6007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6008 switch (pVCpu->iem.s.enmEffAddrMode)
6009 {
6010 case IEMMODE_16BIT:
6011 IEM_MC_BEGIN(2, 0);
6012 IEM_MC_LOCAL(uint8_t, u8Tmp);
6013 IEM_MC_LOCAL(uint16_t, u16Addr);
6014 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6015 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6016 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6017 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6018 IEM_MC_ADVANCE_RIP();
6019 IEM_MC_END();
6020 return VINF_SUCCESS;
6021
6022 case IEMMODE_32BIT:
6023 IEM_MC_BEGIN(2, 0);
6024 IEM_MC_LOCAL(uint8_t, u8Tmp);
6025 IEM_MC_LOCAL(uint32_t, u32Addr);
6026 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6027 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6028 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6029 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6030 IEM_MC_ADVANCE_RIP();
6031 IEM_MC_END();
6032 return VINF_SUCCESS;
6033
6034 case IEMMODE_64BIT:
6035 IEM_MC_BEGIN(2, 0);
6036 IEM_MC_LOCAL(uint8_t, u8Tmp);
6037 IEM_MC_LOCAL(uint64_t, u64Addr);
6038 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
6039 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
6040 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
6041 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6042 IEM_MC_ADVANCE_RIP();
6043 IEM_MC_END();
6044 return VINF_SUCCESS;
6045
6046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6047 }
6048}
6049
6050
6051/**
6052 * Common worker for FPU instructions working on ST0 and STn, and storing the
6053 * result in ST0.
6054 *
6055 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6056 */
6057FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6058{
6059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6060
6061 IEM_MC_BEGIN(3, 1);
6062 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6063 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6064 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6065 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6066
6067 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6068 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6069 IEM_MC_PREPARE_FPU_USAGE();
6070 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6071 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6072 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6073 IEM_MC_ELSE()
6074 IEM_MC_FPU_STACK_UNDERFLOW(0);
6075 IEM_MC_ENDIF();
6076 IEM_MC_ADVANCE_RIP();
6077
6078 IEM_MC_END();
6079 return VINF_SUCCESS;
6080}
6081
6082
6083/**
6084 * Common worker for FPU instructions working on ST0 and STn, and only affecting
6085 * flags.
6086 *
6087 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6088 */
6089FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6090{
6091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6092
6093 IEM_MC_BEGIN(3, 1);
6094 IEM_MC_LOCAL(uint16_t, u16Fsw);
6095 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6096 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6097 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6098
6099 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6100 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6101 IEM_MC_PREPARE_FPU_USAGE();
6102 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6103 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6104 IEM_MC_UPDATE_FSW(u16Fsw);
6105 IEM_MC_ELSE()
6106 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6107 IEM_MC_ENDIF();
6108 IEM_MC_ADVANCE_RIP();
6109
6110 IEM_MC_END();
6111 return VINF_SUCCESS;
6112}
6113
6114
6115/**
6116 * Common worker for FPU instructions working on ST0 and STn, only affecting
6117 * flags, and popping when done.
6118 *
6119 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6120 */
6121FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6122{
6123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6124
6125 IEM_MC_BEGIN(3, 1);
6126 IEM_MC_LOCAL(uint16_t, u16Fsw);
6127 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6128 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6129 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6130
6131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6133 IEM_MC_PREPARE_FPU_USAGE();
6134 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6135 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6136 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6137 IEM_MC_ELSE()
6138 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
6139 IEM_MC_ENDIF();
6140 IEM_MC_ADVANCE_RIP();
6141
6142 IEM_MC_END();
6143 return VINF_SUCCESS;
6144}
6145
6146
6147/** Opcode 0xd8 11/0. */
6148FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
6149{
6150 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
6151 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
6152}
6153
6154
6155/** Opcode 0xd8 11/1. */
6156FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
6157{
6158 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
6159 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
6160}
6161
6162
6163/** Opcode 0xd8 11/2. */
6164FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
6165{
6166 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
6167 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
6168}
6169
6170
6171/** Opcode 0xd8 11/3. */
6172FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
6173{
6174 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
6175 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
6176}
6177
6178
6179/** Opcode 0xd8 11/4. */
6180FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
6181{
6182 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
6183 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
6184}
6185
6186
6187/** Opcode 0xd8 11/5. */
6188FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
6189{
6190 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
6191 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
6192}
6193
6194
6195/** Opcode 0xd8 11/6. */
6196FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
6197{
6198 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
6199 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
6200}
6201
6202
6203/** Opcode 0xd8 11/7. */
6204FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
6205{
6206 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
6207 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
6208}
6209
6210
6211/**
6212 * Common worker for FPU instructions working on ST0 and an m32r, and storing
6213 * the result in ST0.
6214 *
6215 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6216 */
6217FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
6218{
6219 IEM_MC_BEGIN(3, 3);
6220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6221 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6222 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6223 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6224 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6225 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6226
6227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6229
6230 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6231 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6232 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6233
6234 IEM_MC_PREPARE_FPU_USAGE();
6235 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6236 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
6237 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6238 IEM_MC_ELSE()
6239 IEM_MC_FPU_STACK_UNDERFLOW(0);
6240 IEM_MC_ENDIF();
6241 IEM_MC_ADVANCE_RIP();
6242
6243 IEM_MC_END();
6244 return VINF_SUCCESS;
6245}
6246
6247
6248/** Opcode 0xd8 !11/0. */
6249FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
6250{
6251 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
6252 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
6253}
6254
6255
6256/** Opcode 0xd8 !11/1. */
6257FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
6258{
6259 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
6260 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
6261}
6262
6263
6264/** Opcode 0xd8 !11/2. */
6265FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
6266{
6267 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
6268
6269 IEM_MC_BEGIN(3, 3);
6270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6271 IEM_MC_LOCAL(uint16_t, u16Fsw);
6272 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6273 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6274 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6275 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6276
6277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6279
6280 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6281 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6282 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6283
6284 IEM_MC_PREPARE_FPU_USAGE();
6285 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6286 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6287 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6288 IEM_MC_ELSE()
6289 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6290 IEM_MC_ENDIF();
6291 IEM_MC_ADVANCE_RIP();
6292
6293 IEM_MC_END();
6294 return VINF_SUCCESS;
6295}
6296
6297
6298/** Opcode 0xd8 !11/3. */
6299FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
6300{
6301 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
6302
6303 IEM_MC_BEGIN(3, 3);
6304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6305 IEM_MC_LOCAL(uint16_t, u16Fsw);
6306 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6307 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6308 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6309 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6310
6311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6313
6314 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6315 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6316 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6317
6318 IEM_MC_PREPARE_FPU_USAGE();
6319 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6320 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6321 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6322 IEM_MC_ELSE()
6323 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6324 IEM_MC_ENDIF();
6325 IEM_MC_ADVANCE_RIP();
6326
6327 IEM_MC_END();
6328 return VINF_SUCCESS;
6329}
6330
6331
6332/** Opcode 0xd8 !11/4. */
6333FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
6334{
6335 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
6336 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
6337}
6338
6339
6340/** Opcode 0xd8 !11/5. */
6341FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
6342{
6343 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
6344 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
6345}
6346
6347
6348/** Opcode 0xd8 !11/6. */
6349FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
6350{
6351 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
6352 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
6353}
6354
6355
6356/** Opcode 0xd8 !11/7. */
6357FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
6358{
6359 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
6360 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
6361}
6362
6363
6364/** Opcode 0xd8. */
6365FNIEMOP_DEF(iemOp_EscF0)
6366{
6367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6368 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
6369
6370 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6371 {
6372 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6373 {
6374 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
6375 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
6376 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
6377 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
6378 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
6379 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
6380 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
6381 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
6382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6383 }
6384 }
6385 else
6386 {
6387 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6388 {
6389 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
6390 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
6391 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
6392 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
6393 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
6394 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
6395 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
6396 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
6397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6398 }
6399 }
6400}
6401
6402
6403/** Opcode 0xd9 /0 mem32real
6404 * @sa iemOp_fld_m64r */
6405FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
6406{
6407 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
6408
6409 IEM_MC_BEGIN(2, 3);
6410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6411 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6412 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
6413 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6414 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
6415
6416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6418
6419 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6420 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6421 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6422
6423 IEM_MC_PREPARE_FPU_USAGE();
6424 IEM_MC_IF_FPUREG_IS_EMPTY(7)
6425 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
6426 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6427 IEM_MC_ELSE()
6428 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6429 IEM_MC_ENDIF();
6430 IEM_MC_ADVANCE_RIP();
6431
6432 IEM_MC_END();
6433 return VINF_SUCCESS;
6434}
6435
6436
6437/** Opcode 0xd9 !11/2 mem32real */
6438FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
6439{
6440 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
6441 IEM_MC_BEGIN(3, 2);
6442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6443 IEM_MC_LOCAL(uint16_t, u16Fsw);
6444 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6445 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
6446 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
6447
6448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6450 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6451 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6452
6453 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
6454 IEM_MC_PREPARE_FPU_USAGE();
6455 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6456 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
6457 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
6458 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6459 IEM_MC_ELSE()
6460 IEM_MC_IF_FCW_IM()
6461 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
6462 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
6463 IEM_MC_ENDIF();
6464 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6465 IEM_MC_ENDIF();
6466 IEM_MC_ADVANCE_RIP();
6467
6468 IEM_MC_END();
6469 return VINF_SUCCESS;
6470}
6471
6472
6473/** Opcode 0xd9 !11/3 */
6474FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
6475{
6476 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
6477 IEM_MC_BEGIN(3, 2);
6478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6479 IEM_MC_LOCAL(uint16_t, u16Fsw);
6480 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6481 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
6482 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
6483
6484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6487 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6488
6489 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
6490 IEM_MC_PREPARE_FPU_USAGE();
6491 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6492 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
6493 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
6494 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6495 IEM_MC_ELSE()
6496 IEM_MC_IF_FCW_IM()
6497 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
6498 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
6499 IEM_MC_ENDIF();
6500 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6501 IEM_MC_ENDIF();
6502 IEM_MC_ADVANCE_RIP();
6503
6504 IEM_MC_END();
6505 return VINF_SUCCESS;
6506}
6507
6508
6509/** Opcode 0xd9 !11/4 */
6510FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
6511{
6512 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
6513 IEM_MC_BEGIN(3, 0);
6514 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
6515 IEM_MC_ARG(uint8_t, iEffSeg, 1);
6516 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
6517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6520 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6521 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6522 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
6523 IEM_MC_END();
6524 return VINF_SUCCESS;
6525}
6526
6527
6528/** Opcode 0xd9 !11/5 */
6529FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
6530{
6531 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
6532 IEM_MC_BEGIN(1, 1);
6533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6534 IEM_MC_ARG(uint16_t, u16Fsw, 0);
6535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6537 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6538 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6539 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6540 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
6541 IEM_MC_END();
6542 return VINF_SUCCESS;
6543}
6544
6545
6546/** Opcode 0xd9 !11/6 */
6547FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
6548{
6549 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
6550 IEM_MC_BEGIN(3, 0);
6551 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
6552 IEM_MC_ARG(uint8_t, iEffSeg, 1);
6553 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
6554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6556 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6557 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6558 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6559 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
6560 IEM_MC_END();
6561 return VINF_SUCCESS;
6562}
6563
6564
6565/** Opcode 0xd9 !11/7 */
6566FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
6567{
6568 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
6569 IEM_MC_BEGIN(2, 0);
6570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6571 IEM_MC_LOCAL(uint16_t, u16Fcw);
6572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6575 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6576 IEM_MC_FETCH_FCW(u16Fcw);
6577 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
6578 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
6579 IEM_MC_END();
6580 return VINF_SUCCESS;
6581}
6582
6583
6584/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
6585FNIEMOP_DEF(iemOp_fnop)
6586{
6587 IEMOP_MNEMONIC(fnop, "fnop");
6588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6589
6590 IEM_MC_BEGIN(0, 0);
6591 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6592 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6593 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6594 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
6595 * intel optimizations. Investigate. */
6596 IEM_MC_UPDATE_FPU_OPCODE_IP();
6597 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
6598 IEM_MC_END();
6599 return VINF_SUCCESS;
6600}
6601
6602
6603/** Opcode 0xd9 11/0 stN */
6604FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
6605{
6606 IEMOP_MNEMONIC(fld_stN, "fld stN");
6607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6608
6609 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
6610 * indicates that it does. */
6611 IEM_MC_BEGIN(0, 2);
6612 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
6613 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6614 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6615 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6616
6617 IEM_MC_PREPARE_FPU_USAGE();
6618 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
6619 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
6620 IEM_MC_PUSH_FPU_RESULT(FpuRes);
6621 IEM_MC_ELSE()
6622 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
6623 IEM_MC_ENDIF();
6624
6625 IEM_MC_ADVANCE_RIP();
6626 IEM_MC_END();
6627
6628 return VINF_SUCCESS;
6629}
6630
6631
6632/** Opcode 0xd9 11/3 stN */
6633FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
6634{
6635 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
6636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6637
6638 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
6639 * indicates that it does. */
6640 IEM_MC_BEGIN(1, 3);
6641 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
6642 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
6643 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6644 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
6645 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6646 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6647
6648 IEM_MC_PREPARE_FPU_USAGE();
6649 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6650 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
6651 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
6652 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6653 IEM_MC_ELSE()
6654 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
6655 IEM_MC_ENDIF();
6656
6657 IEM_MC_ADVANCE_RIP();
6658 IEM_MC_END();
6659
6660 return VINF_SUCCESS;
6661}
6662
6663
6664/** Opcode 0xd9 11/4, 0xdd 11/2. */
6665FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
6666{
6667 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
6668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6669
6670 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
6671 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
6672 if (!iDstReg)
6673 {
6674 IEM_MC_BEGIN(0, 1);
6675 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
6676 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6677 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6678
6679 IEM_MC_PREPARE_FPU_USAGE();
6680 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
6681 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6682 IEM_MC_ELSE()
6683 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
6684 IEM_MC_ENDIF();
6685
6686 IEM_MC_ADVANCE_RIP();
6687 IEM_MC_END();
6688 }
6689 else
6690 {
6691 IEM_MC_BEGIN(0, 2);
6692 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
6693 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6694 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6695 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6696
6697 IEM_MC_PREPARE_FPU_USAGE();
6698 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6699 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
6700 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
6701 IEM_MC_ELSE()
6702 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
6703 IEM_MC_ENDIF();
6704
6705 IEM_MC_ADVANCE_RIP();
6706 IEM_MC_END();
6707 }
6708 return VINF_SUCCESS;
6709}
6710
6711
6712/**
6713 * Common worker for FPU instructions working on ST0 and replaces it with the
6714 * result, i.e. unary operators.
6715 *
6716 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6717 */
6718FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
6719{
6720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6721
6722 IEM_MC_BEGIN(2, 1);
6723 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6724 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6725 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
6726
6727 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6728 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6729 IEM_MC_PREPARE_FPU_USAGE();
6730 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6731 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
6732 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6733 IEM_MC_ELSE()
6734 IEM_MC_FPU_STACK_UNDERFLOW(0);
6735 IEM_MC_ENDIF();
6736 IEM_MC_ADVANCE_RIP();
6737
6738 IEM_MC_END();
6739 return VINF_SUCCESS;
6740}
6741
6742
6743/** Opcode 0xd9 0xe0. */
6744FNIEMOP_DEF(iemOp_fchs)
6745{
6746 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
6747 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
6748}
6749
6750
6751/** Opcode 0xd9 0xe1. */
6752FNIEMOP_DEF(iemOp_fabs)
6753{
6754 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
6755 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
6756}
6757
6758
6759/**
6760 * Common worker for FPU instructions working on ST0 and only returns FSW.
6761 *
6762 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6763 */
6764FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
6765{
6766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6767
6768 IEM_MC_BEGIN(2, 1);
6769 IEM_MC_LOCAL(uint16_t, u16Fsw);
6770 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6771 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
6772
6773 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6774 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6775 IEM_MC_PREPARE_FPU_USAGE();
6776 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6777 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
6778 IEM_MC_UPDATE_FSW(u16Fsw);
6779 IEM_MC_ELSE()
6780 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6781 IEM_MC_ENDIF();
6782 IEM_MC_ADVANCE_RIP();
6783
6784 IEM_MC_END();
6785 return VINF_SUCCESS;
6786}
6787
6788
6789/** Opcode 0xd9 0xe4. */
6790FNIEMOP_DEF(iemOp_ftst)
6791{
6792 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
6793 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
6794}
6795
6796
6797/** Opcode 0xd9 0xe5. */
6798FNIEMOP_DEF(iemOp_fxam)
6799{
6800 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
6801 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
6802}
6803
6804
6805/**
6806 * Common worker for FPU instructions pushing a constant onto the FPU stack.
6807 *
6808 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6809 */
6810FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
6811{
6812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6813
6814 IEM_MC_BEGIN(1, 1);
6815 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6816 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6817
6818 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6819 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6820 IEM_MC_PREPARE_FPU_USAGE();
6821 IEM_MC_IF_FPUREG_IS_EMPTY(7)
6822 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
6823 IEM_MC_PUSH_FPU_RESULT(FpuRes);
6824 IEM_MC_ELSE()
6825 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
6826 IEM_MC_ENDIF();
6827 IEM_MC_ADVANCE_RIP();
6828
6829 IEM_MC_END();
6830 return VINF_SUCCESS;
6831}
6832
6833
6834/** Opcode 0xd9 0xe8. */
6835FNIEMOP_DEF(iemOp_fld1)
6836{
6837 IEMOP_MNEMONIC(fld1, "fld1");
6838 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
6839}
6840
6841
6842/** Opcode 0xd9 0xe9. */
6843FNIEMOP_DEF(iemOp_fldl2t)
6844{
6845 IEMOP_MNEMONIC(fldl2t, "fldl2t");
6846 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
6847}
6848
6849
6850/** Opcode 0xd9 0xea. */
6851FNIEMOP_DEF(iemOp_fldl2e)
6852{
6853 IEMOP_MNEMONIC(fldl2e, "fldl2e");
6854 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
6855}
6856
6857/** Opcode 0xd9 0xeb. */
6858FNIEMOP_DEF(iemOp_fldpi)
6859{
6860 IEMOP_MNEMONIC(fldpi, "fldpi");
6861 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
6862}
6863
6864
6865/** Opcode 0xd9 0xec. */
6866FNIEMOP_DEF(iemOp_fldlg2)
6867{
6868 IEMOP_MNEMONIC(fldlg2, "fldlg2");
6869 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
6870}
6871
6872/** Opcode 0xd9 0xed. */
6873FNIEMOP_DEF(iemOp_fldln2)
6874{
6875 IEMOP_MNEMONIC(fldln2, "fldln2");
6876 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
6877}
6878
6879
6880/** Opcode 0xd9 0xee. */
6881FNIEMOP_DEF(iemOp_fldz)
6882{
6883 IEMOP_MNEMONIC(fldz, "fldz");
6884 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
6885}
6886
6887
6888/** Opcode 0xd9 0xf0. */
6889FNIEMOP_DEF(iemOp_f2xm1)
6890{
6891 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
6892 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
6893}
6894
6895
6896/**
6897 * Common worker for FPU instructions working on STn and ST0, storing the result
6898 * in STn, and popping the stack unless IE, DE or ZE was raised.
6899 *
6900 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6901 */
6902FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6903{
6904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6905
6906 IEM_MC_BEGIN(3, 1);
6907 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6908 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6909 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6910 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6911
6912 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6913 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6914
6915 IEM_MC_PREPARE_FPU_USAGE();
6916 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
6917 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6918 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
6919 IEM_MC_ELSE()
6920 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
6921 IEM_MC_ENDIF();
6922 IEM_MC_ADVANCE_RIP();
6923
6924 IEM_MC_END();
6925 return VINF_SUCCESS;
6926}
6927
6928
6929/** Opcode 0xd9 0xf1. */
6930FNIEMOP_DEF(iemOp_fyl2x)
6931{
6932 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
6933 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
6934}
6935
6936
6937/**
6938 * Common worker for FPU instructions working on ST0 and having two outputs, one
6939 * replacing ST0 and one pushed onto the stack.
6940 *
6941 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6942 */
6943FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
6944{
6945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6946
6947 IEM_MC_BEGIN(2, 1);
6948 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
6949 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
6950 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
6951
6952 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6953 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6954 IEM_MC_PREPARE_FPU_USAGE();
6955 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6956 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
6957 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
6958 IEM_MC_ELSE()
6959 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
6960 IEM_MC_ENDIF();
6961 IEM_MC_ADVANCE_RIP();
6962
6963 IEM_MC_END();
6964 return VINF_SUCCESS;
6965}
6966
6967
6968/** Opcode 0xd9 0xf2. */
6969FNIEMOP_DEF(iemOp_fptan)
6970{
6971 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
6972 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
6973}
6974
6975
6976/** Opcode 0xd9 0xf3. */
6977FNIEMOP_DEF(iemOp_fpatan)
6978{
6979 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
6980 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
6981}
6982
6983
6984/** Opcode 0xd9 0xf4. */
6985FNIEMOP_DEF(iemOp_fxtract)
6986{
6987 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
6988 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
6989}
6990
6991
6992/** Opcode 0xd9 0xf5. */
6993FNIEMOP_DEF(iemOp_fprem1)
6994{
6995 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
6996 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
6997}
6998
6999
7000/** Opcode 0xd9 0xf6. */
7001FNIEMOP_DEF(iemOp_fdecstp)
7002{
7003 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7005 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7006 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7007 * FINCSTP and FDECSTP. */
7008
7009 IEM_MC_BEGIN(0,0);
7010
7011 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7012 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7013
7014 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7015 IEM_MC_FPU_STACK_DEC_TOP();
7016 IEM_MC_UPDATE_FSW_CONST(0);
7017
7018 IEM_MC_ADVANCE_RIP();
7019 IEM_MC_END();
7020 return VINF_SUCCESS;
7021}
7022
7023
7024/** Opcode 0xd9 0xf7. */
7025FNIEMOP_DEF(iemOp_fincstp)
7026{
7027 IEMOP_MNEMONIC(fincstp, "fincstp");
7028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7029 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7030 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7031 * FINCSTP and FDECSTP. */
7032
7033 IEM_MC_BEGIN(0,0);
7034
7035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7037
7038 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7039 IEM_MC_FPU_STACK_INC_TOP();
7040 IEM_MC_UPDATE_FSW_CONST(0);
7041
7042 IEM_MC_ADVANCE_RIP();
7043 IEM_MC_END();
7044 return VINF_SUCCESS;
7045}
7046
7047
7048/** Opcode 0xd9 0xf8. */
7049FNIEMOP_DEF(iemOp_fprem)
7050{
7051 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
7052 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
7053}
7054
7055
7056/** Opcode 0xd9 0xf9. */
7057FNIEMOP_DEF(iemOp_fyl2xp1)
7058{
7059 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
7060 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
7061}
7062
7063
7064/** Opcode 0xd9 0xfa. */
7065FNIEMOP_DEF(iemOp_fsqrt)
7066{
7067 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
7068 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
7069}
7070
7071
7072/** Opcode 0xd9 0xfb. */
7073FNIEMOP_DEF(iemOp_fsincos)
7074{
7075 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
7076 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
7077}
7078
7079
7080/** Opcode 0xd9 0xfc. */
7081FNIEMOP_DEF(iemOp_frndint)
7082{
7083 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
7084 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
7085}
7086
7087
7088/** Opcode 0xd9 0xfd. */
7089FNIEMOP_DEF(iemOp_fscale)
7090{
7091 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
7092 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
7093}
7094
7095
7096/** Opcode 0xd9 0xfe. */
7097FNIEMOP_DEF(iemOp_fsin)
7098{
7099 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
7100 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
7101}
7102
7103
7104/** Opcode 0xd9 0xff. */
7105FNIEMOP_DEF(iemOp_fcos)
7106{
7107 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
7108 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
7109}
7110
7111
7112/** Used by iemOp_EscF1. */
7113IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
7114{
7115 /* 0xe0 */ iemOp_fchs,
7116 /* 0xe1 */ iemOp_fabs,
7117 /* 0xe2 */ iemOp_Invalid,
7118 /* 0xe3 */ iemOp_Invalid,
7119 /* 0xe4 */ iemOp_ftst,
7120 /* 0xe5 */ iemOp_fxam,
7121 /* 0xe6 */ iemOp_Invalid,
7122 /* 0xe7 */ iemOp_Invalid,
7123 /* 0xe8 */ iemOp_fld1,
7124 /* 0xe9 */ iemOp_fldl2t,
7125 /* 0xea */ iemOp_fldl2e,
7126 /* 0xeb */ iemOp_fldpi,
7127 /* 0xec */ iemOp_fldlg2,
7128 /* 0xed */ iemOp_fldln2,
7129 /* 0xee */ iemOp_fldz,
7130 /* 0xef */ iemOp_Invalid,
7131 /* 0xf0 */ iemOp_f2xm1,
7132 /* 0xf1 */ iemOp_fyl2x,
7133 /* 0xf2 */ iemOp_fptan,
7134 /* 0xf3 */ iemOp_fpatan,
7135 /* 0xf4 */ iemOp_fxtract,
7136 /* 0xf5 */ iemOp_fprem1,
7137 /* 0xf6 */ iemOp_fdecstp,
7138 /* 0xf7 */ iemOp_fincstp,
7139 /* 0xf8 */ iemOp_fprem,
7140 /* 0xf9 */ iemOp_fyl2xp1,
7141 /* 0xfa */ iemOp_fsqrt,
7142 /* 0xfb */ iemOp_fsincos,
7143 /* 0xfc */ iemOp_frndint,
7144 /* 0xfd */ iemOp_fscale,
7145 /* 0xfe */ iemOp_fsin,
7146 /* 0xff */ iemOp_fcos
7147};
7148
7149
7150/** Opcode 0xd9. */
7151FNIEMOP_DEF(iemOp_EscF1)
7152{
7153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7154 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
7155
7156 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7157 {
7158 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7159 {
7160 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
7161 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
7162 case 2:
7163 if (bRm == 0xd0)
7164 return FNIEMOP_CALL(iemOp_fnop);
7165 return IEMOP_RAISE_INVALID_OPCODE();
7166 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
7167 case 4:
7168 case 5:
7169 case 6:
7170 case 7:
7171 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
7172 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
7173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7174 }
7175 }
7176 else
7177 {
7178 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7179 {
7180 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
7181 case 1: return IEMOP_RAISE_INVALID_OPCODE();
7182 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
7183 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
7184 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
7185 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
7186 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
7187 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
7188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7189 }
7190 }
7191}
7192
7193
7194/** Opcode 0xda 11/0. */
7195FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
7196{
7197 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
7198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7199
7200 IEM_MC_BEGIN(0, 1);
7201 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7202
7203 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7204 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7205
7206 IEM_MC_PREPARE_FPU_USAGE();
7207 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7208 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
7209 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7210 IEM_MC_ENDIF();
7211 IEM_MC_UPDATE_FPU_OPCODE_IP();
7212 IEM_MC_ELSE()
7213 IEM_MC_FPU_STACK_UNDERFLOW(0);
7214 IEM_MC_ENDIF();
7215 IEM_MC_ADVANCE_RIP();
7216
7217 IEM_MC_END();
7218 return VINF_SUCCESS;
7219}
7220
7221
7222/** Opcode 0xda 11/1. */
7223FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
7224{
7225 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
7226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7227
7228 IEM_MC_BEGIN(0, 1);
7229 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7230
7231 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7232 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7233
7234 IEM_MC_PREPARE_FPU_USAGE();
7235 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7236 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
7237 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7238 IEM_MC_ENDIF();
7239 IEM_MC_UPDATE_FPU_OPCODE_IP();
7240 IEM_MC_ELSE()
7241 IEM_MC_FPU_STACK_UNDERFLOW(0);
7242 IEM_MC_ENDIF();
7243 IEM_MC_ADVANCE_RIP();
7244
7245 IEM_MC_END();
7246 return VINF_SUCCESS;
7247}
7248
7249
7250/** Opcode 0xda 11/2. */
7251FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
7252{
7253 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
7254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7255
7256 IEM_MC_BEGIN(0, 1);
7257 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7258
7259 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7260 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7261
7262 IEM_MC_PREPARE_FPU_USAGE();
7263 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7264 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7265 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7266 IEM_MC_ENDIF();
7267 IEM_MC_UPDATE_FPU_OPCODE_IP();
7268 IEM_MC_ELSE()
7269 IEM_MC_FPU_STACK_UNDERFLOW(0);
7270 IEM_MC_ENDIF();
7271 IEM_MC_ADVANCE_RIP();
7272
7273 IEM_MC_END();
7274 return VINF_SUCCESS;
7275}
7276
7277
7278/** Opcode 0xda 11/3. */
7279FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
7280{
7281 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
7282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7283
7284 IEM_MC_BEGIN(0, 1);
7285 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7286
7287 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7288 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7289
7290 IEM_MC_PREPARE_FPU_USAGE();
7291 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7292 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
7293 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7294 IEM_MC_ENDIF();
7295 IEM_MC_UPDATE_FPU_OPCODE_IP();
7296 IEM_MC_ELSE()
7297 IEM_MC_FPU_STACK_UNDERFLOW(0);
7298 IEM_MC_ENDIF();
7299 IEM_MC_ADVANCE_RIP();
7300
7301 IEM_MC_END();
7302 return VINF_SUCCESS;
7303}
7304
7305
7306/**
7307 * Common worker for FPU instructions working on ST0 and STn, only affecting
7308 * flags, and popping twice when done.
7309 *
7310 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7311 */
7312FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7313{
7314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7315
7316 IEM_MC_BEGIN(3, 1);
7317 IEM_MC_LOCAL(uint16_t, u16Fsw);
7318 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7319 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7320 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7321
7322 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7323 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7324
7325 IEM_MC_PREPARE_FPU_USAGE();
7326 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
7327 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7328 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
7329 IEM_MC_ELSE()
7330 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
7331 IEM_MC_ENDIF();
7332 IEM_MC_ADVANCE_RIP();
7333
7334 IEM_MC_END();
7335 return VINF_SUCCESS;
7336}
7337
7338
7339/** Opcode 0xda 0xe9. */
7340FNIEMOP_DEF(iemOp_fucompp)
7341{
7342 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
7343 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
7344}
7345
7346
7347/**
7348 * Common worker for FPU instructions working on ST0 and an m32i, and storing
7349 * the result in ST0.
7350 *
7351 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7352 */
7353FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
7354{
7355 IEM_MC_BEGIN(3, 3);
7356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7357 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7358 IEM_MC_LOCAL(int32_t, i32Val2);
7359 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7360 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7361 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7362
7363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7365
7366 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7367 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7368 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7369
7370 IEM_MC_PREPARE_FPU_USAGE();
7371 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7372 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
7373 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7374 IEM_MC_ELSE()
7375 IEM_MC_FPU_STACK_UNDERFLOW(0);
7376 IEM_MC_ENDIF();
7377 IEM_MC_ADVANCE_RIP();
7378
7379 IEM_MC_END();
7380 return VINF_SUCCESS;
7381}
7382
7383
7384/** Opcode 0xda !11/0. */
7385FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
7386{
7387 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
7388 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
7389}
7390
7391
7392/** Opcode 0xda !11/1. */
7393FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
7394{
7395 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
7396 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
7397}
7398
7399
7400/** Opcode 0xda !11/2. */
7401FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
7402{
7403 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
7404
7405 IEM_MC_BEGIN(3, 3);
7406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7407 IEM_MC_LOCAL(uint16_t, u16Fsw);
7408 IEM_MC_LOCAL(int32_t, i32Val2);
7409 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7410 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7411 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7412
7413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7415
7416 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7417 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7418 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7419
7420 IEM_MC_PREPARE_FPU_USAGE();
7421 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7422 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
7423 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7424 IEM_MC_ELSE()
7425 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7426 IEM_MC_ENDIF();
7427 IEM_MC_ADVANCE_RIP();
7428
7429 IEM_MC_END();
7430 return VINF_SUCCESS;
7431}
7432
7433
7434/** Opcode 0xda !11/3. */
7435FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
7436{
7437 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
7438
7439 IEM_MC_BEGIN(3, 3);
7440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7441 IEM_MC_LOCAL(uint16_t, u16Fsw);
7442 IEM_MC_LOCAL(int32_t, i32Val2);
7443 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7444 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7445 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7446
7447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7449
7450 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7451 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7452 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7453
7454 IEM_MC_PREPARE_FPU_USAGE();
7455 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7456 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
7457 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7458 IEM_MC_ELSE()
7459 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7460 IEM_MC_ENDIF();
7461 IEM_MC_ADVANCE_RIP();
7462
7463 IEM_MC_END();
7464 return VINF_SUCCESS;
7465}
7466
7467
7468/** Opcode 0xda !11/4. */
7469FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
7470{
7471 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
7472 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
7473}
7474
7475
7476/** Opcode 0xda !11/5. */
7477FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
7478{
7479 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
7480 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
7481}
7482
7483
7484/** Opcode 0xda !11/6. */
7485FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
7486{
7487 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
7488 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
7489}
7490
7491
7492/** Opcode 0xda !11/7. */
7493FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
7494{
7495 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
7496 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
7497}
7498
7499
7500/** Opcode 0xda. */
7501FNIEMOP_DEF(iemOp_EscF2)
7502{
7503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7504 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
7505 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7506 {
7507 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7508 {
7509 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
7510 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
7511 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
7512 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
7513 case 4: return IEMOP_RAISE_INVALID_OPCODE();
7514 case 5:
7515 if (bRm == 0xe9)
7516 return FNIEMOP_CALL(iemOp_fucompp);
7517 return IEMOP_RAISE_INVALID_OPCODE();
7518 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7519 case 7: return IEMOP_RAISE_INVALID_OPCODE();
7520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7521 }
7522 }
7523 else
7524 {
7525 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7526 {
7527 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
7528 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
7529 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
7530 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
7531 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
7532 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
7533 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
7534 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
7535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7536 }
7537 }
7538}
7539
7540
7541/** Opcode 0xdb !11/0. */
7542FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
7543{
7544 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
7545
7546 IEM_MC_BEGIN(2, 3);
7547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7548 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7549 IEM_MC_LOCAL(int32_t, i32Val);
7550 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7551 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
7552
7553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7555
7556 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7557 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7558 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7559
7560 IEM_MC_PREPARE_FPU_USAGE();
7561 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7562 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
7563 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7564 IEM_MC_ELSE()
7565 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7566 IEM_MC_ENDIF();
7567 IEM_MC_ADVANCE_RIP();
7568
7569 IEM_MC_END();
7570 return VINF_SUCCESS;
7571}
7572
7573
7574/** Opcode 0xdb !11/1. */
7575FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
7576{
7577 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
7578 IEM_MC_BEGIN(3, 2);
7579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7580 IEM_MC_LOCAL(uint16_t, u16Fsw);
7581 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7582 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7583 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7584
7585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7587 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7588 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7589
7590 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7591 IEM_MC_PREPARE_FPU_USAGE();
7592 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7593 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7594 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7595 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7596 IEM_MC_ELSE()
7597 IEM_MC_IF_FCW_IM()
7598 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7599 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7600 IEM_MC_ENDIF();
7601 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7602 IEM_MC_ENDIF();
7603 IEM_MC_ADVANCE_RIP();
7604
7605 IEM_MC_END();
7606 return VINF_SUCCESS;
7607}
7608
7609
7610/** Opcode 0xdb !11/2. */
7611FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
7612{
7613 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
7614 IEM_MC_BEGIN(3, 2);
7615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7616 IEM_MC_LOCAL(uint16_t, u16Fsw);
7617 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7618 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7619 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7620
7621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7623 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7624 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7625
7626 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7627 IEM_MC_PREPARE_FPU_USAGE();
7628 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7629 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7630 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7631 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7632 IEM_MC_ELSE()
7633 IEM_MC_IF_FCW_IM()
7634 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7635 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7636 IEM_MC_ENDIF();
7637 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7638 IEM_MC_ENDIF();
7639 IEM_MC_ADVANCE_RIP();
7640
7641 IEM_MC_END();
7642 return VINF_SUCCESS;
7643}
7644
7645
7646/** Opcode 0xdb !11/3. */
7647FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
7648{
7649 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
7650 IEM_MC_BEGIN(3, 2);
7651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7652 IEM_MC_LOCAL(uint16_t, u16Fsw);
7653 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7654 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7655 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7656
7657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7659 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7660 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7661
7662 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7663 IEM_MC_PREPARE_FPU_USAGE();
7664 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7665 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7666 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7667 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7668 IEM_MC_ELSE()
7669 IEM_MC_IF_FCW_IM()
7670 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7671 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7672 IEM_MC_ENDIF();
7673 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7674 IEM_MC_ENDIF();
7675 IEM_MC_ADVANCE_RIP();
7676
7677 IEM_MC_END();
7678 return VINF_SUCCESS;
7679}
7680
7681
7682/** Opcode 0xdb !11/5. */
7683FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
7684{
7685 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
7686
7687 IEM_MC_BEGIN(2, 3);
7688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7689 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7690 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
7691 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7692 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
7693
7694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7696
7697 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7698 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7699 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7700
7701 IEM_MC_PREPARE_FPU_USAGE();
7702 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7703 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
7704 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7705 IEM_MC_ELSE()
7706 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7707 IEM_MC_ENDIF();
7708 IEM_MC_ADVANCE_RIP();
7709
7710 IEM_MC_END();
7711 return VINF_SUCCESS;
7712}
7713
7714
7715/** Opcode 0xdb !11/7. */
7716FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
7717{
7718 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
7719 IEM_MC_BEGIN(3, 2);
7720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7721 IEM_MC_LOCAL(uint16_t, u16Fsw);
7722 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7723 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
7724 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7725
7726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7728 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7729 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7730
7731 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7732 IEM_MC_PREPARE_FPU_USAGE();
7733 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7734 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
7735 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
7736 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7737 IEM_MC_ELSE()
7738 IEM_MC_IF_FCW_IM()
7739 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
7740 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
7741 IEM_MC_ENDIF();
7742 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7743 IEM_MC_ENDIF();
7744 IEM_MC_ADVANCE_RIP();
7745
7746 IEM_MC_END();
7747 return VINF_SUCCESS;
7748}
7749
7750
7751/** Opcode 0xdb 11/0. */
7752FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
7753{
7754 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
7755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7756
7757 IEM_MC_BEGIN(0, 1);
7758 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7759
7760 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7761 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7762
7763 IEM_MC_PREPARE_FPU_USAGE();
7764 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7765 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
7766 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7767 IEM_MC_ENDIF();
7768 IEM_MC_UPDATE_FPU_OPCODE_IP();
7769 IEM_MC_ELSE()
7770 IEM_MC_FPU_STACK_UNDERFLOW(0);
7771 IEM_MC_ENDIF();
7772 IEM_MC_ADVANCE_RIP();
7773
7774 IEM_MC_END();
7775 return VINF_SUCCESS;
7776}
7777
7778
7779/** Opcode 0xdb 11/1. */
7780FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
7781{
7782 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
7783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7784
7785 IEM_MC_BEGIN(0, 1);
7786 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7787
7788 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7789 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7790
7791 IEM_MC_PREPARE_FPU_USAGE();
7792 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7793 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7794 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7795 IEM_MC_ENDIF();
7796 IEM_MC_UPDATE_FPU_OPCODE_IP();
7797 IEM_MC_ELSE()
7798 IEM_MC_FPU_STACK_UNDERFLOW(0);
7799 IEM_MC_ENDIF();
7800 IEM_MC_ADVANCE_RIP();
7801
7802 IEM_MC_END();
7803 return VINF_SUCCESS;
7804}
7805
7806
7807/** Opcode 0xdb 11/2. */
7808FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
7809{
7810 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
7811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7812
7813 IEM_MC_BEGIN(0, 1);
7814 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7815
7816 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7817 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7818
7819 IEM_MC_PREPARE_FPU_USAGE();
7820 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7821 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7822 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7823 IEM_MC_ENDIF();
7824 IEM_MC_UPDATE_FPU_OPCODE_IP();
7825 IEM_MC_ELSE()
7826 IEM_MC_FPU_STACK_UNDERFLOW(0);
7827 IEM_MC_ENDIF();
7828 IEM_MC_ADVANCE_RIP();
7829
7830 IEM_MC_END();
7831 return VINF_SUCCESS;
7832}
7833
7834
7835/** Opcode 0xdb 11/3. */
7836FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
7837{
7838 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
7839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7840
7841 IEM_MC_BEGIN(0, 1);
7842 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7843
7844 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7845 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7846
7847 IEM_MC_PREPARE_FPU_USAGE();
7848 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7849 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
7850 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7851 IEM_MC_ENDIF();
7852 IEM_MC_UPDATE_FPU_OPCODE_IP();
7853 IEM_MC_ELSE()
7854 IEM_MC_FPU_STACK_UNDERFLOW(0);
7855 IEM_MC_ENDIF();
7856 IEM_MC_ADVANCE_RIP();
7857
7858 IEM_MC_END();
7859 return VINF_SUCCESS;
7860}
7861
7862
7863/** Opcode 0xdb 0xe0. */
7864FNIEMOP_DEF(iemOp_fneni)
7865{
7866 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
7867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7868 IEM_MC_BEGIN(0,0);
7869 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7870 IEM_MC_ADVANCE_RIP();
7871 IEM_MC_END();
7872 return VINF_SUCCESS;
7873}
7874
7875
7876/** Opcode 0xdb 0xe1. */
7877FNIEMOP_DEF(iemOp_fndisi)
7878{
7879 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
7880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7881 IEM_MC_BEGIN(0,0);
7882 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7883 IEM_MC_ADVANCE_RIP();
7884 IEM_MC_END();
7885 return VINF_SUCCESS;
7886}
7887
7888
7889/** Opcode 0xdb 0xe2. */
7890FNIEMOP_DEF(iemOp_fnclex)
7891{
7892 IEMOP_MNEMONIC(fnclex, "fnclex");
7893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7894
7895 IEM_MC_BEGIN(0,0);
7896 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7897 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7898 IEM_MC_CLEAR_FSW_EX();
7899 IEM_MC_ADVANCE_RIP();
7900 IEM_MC_END();
7901 return VINF_SUCCESS;
7902}
7903
7904
7905/** Opcode 0xdb 0xe3. */
7906FNIEMOP_DEF(iemOp_fninit)
7907{
7908 IEMOP_MNEMONIC(fninit, "fninit");
7909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7910 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
7911}
7912
7913
7914/** Opcode 0xdb 0xe4. */
7915FNIEMOP_DEF(iemOp_fnsetpm)
7916{
7917 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
7918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7919 IEM_MC_BEGIN(0,0);
7920 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7921 IEM_MC_ADVANCE_RIP();
7922 IEM_MC_END();
7923 return VINF_SUCCESS;
7924}
7925
7926
7927/** Opcode 0xdb 0xe5. */
7928FNIEMOP_DEF(iemOp_frstpm)
7929{
7930 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
7931#if 0 /* #UDs on newer CPUs */
7932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7933 IEM_MC_BEGIN(0,0);
7934 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7935 IEM_MC_ADVANCE_RIP();
7936 IEM_MC_END();
7937 return VINF_SUCCESS;
7938#else
7939 return IEMOP_RAISE_INVALID_OPCODE();
7940#endif
7941}
7942
7943
7944/** Opcode 0xdb 11/5. */
7945FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
7946{
7947 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
7948 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
7949}
7950
7951
7952/** Opcode 0xdb 11/6. */
7953FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
7954{
7955 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
7956 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
7957}
7958
7959
7960/** Opcode 0xdb. */
7961FNIEMOP_DEF(iemOp_EscF3)
7962{
7963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7964 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
7965 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7966 {
7967 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7968 {
7969 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
7970 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
7971 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
7972 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
7973 case 4:
7974 switch (bRm)
7975 {
7976 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
7977 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
7978 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
7979 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
7980 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
7981 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
7982 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
7983 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
7984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7985 }
7986 break;
7987 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
7988 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
7989 case 7: return IEMOP_RAISE_INVALID_OPCODE();
7990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7991 }
7992 }
7993 else
7994 {
7995 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7996 {
7997 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
7998 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
7999 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8000 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8001 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8002 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8003 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8004 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8006 }
8007 }
8008}
8009
8010
8011/**
8012 * Common worker for FPU instructions working on STn and ST0, and storing the
8013 * result in STn unless IE, DE or ZE was raised.
8014 *
8015 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8016 */
8017FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8018{
8019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8020
8021 IEM_MC_BEGIN(3, 1);
8022 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8023 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8024 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8025 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8026
8027 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8028 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8029
8030 IEM_MC_PREPARE_FPU_USAGE();
8031 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
8032 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8033 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8034 IEM_MC_ELSE()
8035 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8036 IEM_MC_ENDIF();
8037 IEM_MC_ADVANCE_RIP();
8038
8039 IEM_MC_END();
8040 return VINF_SUCCESS;
8041}
8042
8043
8044/** Opcode 0xdc 11/0. */
8045FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
8046{
8047 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
8048 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
8049}
8050
8051
8052/** Opcode 0xdc 11/1. */
8053FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
8054{
8055 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
8056 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
8057}
8058
8059
8060/** Opcode 0xdc 11/4. */
8061FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
8062{
8063 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
8064 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
8065}
8066
8067
8068/** Opcode 0xdc 11/5. */
8069FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
8070{
8071 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
8072 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
8073}
8074
8075
8076/** Opcode 0xdc 11/6. */
8077FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
8078{
8079 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
8080 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
8081}
8082
8083
8084/** Opcode 0xdc 11/7. */
8085FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
8086{
8087 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
8088 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
8089}
8090
8091
8092/**
8093 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
8094 * memory operand, and storing the result in ST0.
8095 *
8096 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8097 */
8098FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
8099{
8100 IEM_MC_BEGIN(3, 3);
8101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8102 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8103 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
8104 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8105 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
8106 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
8107
8108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8110 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8111 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8112
8113 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8114 IEM_MC_PREPARE_FPU_USAGE();
8115 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
8116 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
8117 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8118 IEM_MC_ELSE()
8119 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8120 IEM_MC_ENDIF();
8121 IEM_MC_ADVANCE_RIP();
8122
8123 IEM_MC_END();
8124 return VINF_SUCCESS;
8125}
8126
8127
8128/** Opcode 0xdc !11/0. */
8129FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
8130{
8131 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
8132 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
8133}
8134
8135
8136/** Opcode 0xdc !11/1. */
8137FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
8138{
8139 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
8140 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
8141}
8142
8143
8144/** Opcode 0xdc !11/2. */
8145FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
8146{
8147 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
8148
8149 IEM_MC_BEGIN(3, 3);
8150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8151 IEM_MC_LOCAL(uint16_t, u16Fsw);
8152 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8153 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8154 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8155 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8156
8157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8159
8160 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8161 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8162 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8163
8164 IEM_MC_PREPARE_FPU_USAGE();
8165 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8166 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8167 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8168 IEM_MC_ELSE()
8169 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8170 IEM_MC_ENDIF();
8171 IEM_MC_ADVANCE_RIP();
8172
8173 IEM_MC_END();
8174 return VINF_SUCCESS;
8175}
8176
8177
8178/** Opcode 0xdc !11/3. */
8179FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
8180{
8181 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
8182
8183 IEM_MC_BEGIN(3, 3);
8184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8185 IEM_MC_LOCAL(uint16_t, u16Fsw);
8186 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8187 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8188 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8189 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8190
8191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8193
8194 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8195 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8196 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8197
8198 IEM_MC_PREPARE_FPU_USAGE();
8199 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8200 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8201 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8202 IEM_MC_ELSE()
8203 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8204 IEM_MC_ENDIF();
8205 IEM_MC_ADVANCE_RIP();
8206
8207 IEM_MC_END();
8208 return VINF_SUCCESS;
8209}
8210
8211
8212/** Opcode 0xdc !11/4. */
8213FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
8214{
8215 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
8216 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
8217}
8218
8219
8220/** Opcode 0xdc !11/5. */
8221FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
8222{
8223 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
8224 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
8225}
8226
8227
8228/** Opcode 0xdc !11/6. */
8229FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
8230{
8231 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
8232 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
8233}
8234
8235
8236/** Opcode 0xdc !11/7. */
8237FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
8238{
8239 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
8240 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
8241}
8242
8243
8244/** Opcode 0xdc. */
8245FNIEMOP_DEF(iemOp_EscF4)
8246{
8247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8248 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
8249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8250 {
8251 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8252 {
8253 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
8254 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
8255 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
8256 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
8257 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
8258 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
8259 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
8260 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
8261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8262 }
8263 }
8264 else
8265 {
8266 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8267 {
8268 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
8269 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
8270 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
8271 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
8272 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
8273 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
8274 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
8275 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
8276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8277 }
8278 }
8279}
8280
8281
8282/** Opcode 0xdd !11/0.
8283 * @sa iemOp_fld_m32r */
8284FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
8285{
8286 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
8287
8288 IEM_MC_BEGIN(2, 3);
8289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8290 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8291 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
8292 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8293 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
8294
8295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8297 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8298 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8299
8300 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8301 IEM_MC_PREPARE_FPU_USAGE();
8302 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8303 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
8304 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8305 IEM_MC_ELSE()
8306 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8307 IEM_MC_ENDIF();
8308 IEM_MC_ADVANCE_RIP();
8309
8310 IEM_MC_END();
8311 return VINF_SUCCESS;
8312}
8313
8314
8315/** Opcode 0xdd !11/0. */
8316FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
8317{
8318 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
8319 IEM_MC_BEGIN(3, 2);
8320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8321 IEM_MC_LOCAL(uint16_t, u16Fsw);
8322 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8323 IEM_MC_ARG(int64_t *, pi64Dst, 1);
8324 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8325
8326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8328 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8329 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8330
8331 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8332 IEM_MC_PREPARE_FPU_USAGE();
8333 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8334 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
8335 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8336 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8337 IEM_MC_ELSE()
8338 IEM_MC_IF_FCW_IM()
8339 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
8340 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
8341 IEM_MC_ENDIF();
8342 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8343 IEM_MC_ENDIF();
8344 IEM_MC_ADVANCE_RIP();
8345
8346 IEM_MC_END();
8347 return VINF_SUCCESS;
8348}
8349
8350
8351/** Opcode 0xdd !11/0. */
8352FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
8353{
8354 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
8355 IEM_MC_BEGIN(3, 2);
8356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8357 IEM_MC_LOCAL(uint16_t, u16Fsw);
8358 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8359 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
8360 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8361
8362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8365 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8366
8367 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8368 IEM_MC_PREPARE_FPU_USAGE();
8369 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8370 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
8371 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8372 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8373 IEM_MC_ELSE()
8374 IEM_MC_IF_FCW_IM()
8375 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
8376 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
8377 IEM_MC_ENDIF();
8378 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8379 IEM_MC_ENDIF();
8380 IEM_MC_ADVANCE_RIP();
8381
8382 IEM_MC_END();
8383 return VINF_SUCCESS;
8384}
8385
8386
8387
8388
8389/** Opcode 0xdd !11/0. */
8390FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
8391{
8392 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
8393 IEM_MC_BEGIN(3, 2);
8394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8395 IEM_MC_LOCAL(uint16_t, u16Fsw);
8396 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8397 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
8398 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8399
8400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8402 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8403 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8404
8405 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8406 IEM_MC_PREPARE_FPU_USAGE();
8407 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8408 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
8409 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8410 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8411 IEM_MC_ELSE()
8412 IEM_MC_IF_FCW_IM()
8413 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
8414 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
8415 IEM_MC_ENDIF();
8416 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8417 IEM_MC_ENDIF();
8418 IEM_MC_ADVANCE_RIP();
8419
8420 IEM_MC_END();
8421 return VINF_SUCCESS;
8422}
8423
8424
8425/** Opcode 0xdd !11/0. */
8426FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
8427{
8428 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
8429 IEM_MC_BEGIN(3, 0);
8430 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8431 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8432 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8435 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8436 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8437 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8438 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8439 IEM_MC_END();
8440 return VINF_SUCCESS;
8441}
8442
8443
8444/** Opcode 0xdd !11/0. */
8445FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
8446{
8447 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
8448 IEM_MC_BEGIN(3, 0);
8449 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8450 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8451 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8454 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8455 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8456 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8457 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
8458 IEM_MC_END();
8459 return VINF_SUCCESS;
8460
8461}
8462
8463/** Opcode 0xdd !11/0. */
8464FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
8465{
8466 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
8467
8468 IEM_MC_BEGIN(0, 2);
8469 IEM_MC_LOCAL(uint16_t, u16Tmp);
8470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8471
8472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8474 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8475
8476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8477 IEM_MC_FETCH_FSW(u16Tmp);
8478 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
8479 IEM_MC_ADVANCE_RIP();
8480
8481/** @todo Debug / drop a hint to the verifier that things may differ
8482 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
8483 * NT4SP1. (X86_FSW_PE) */
8484 IEM_MC_END();
8485 return VINF_SUCCESS;
8486}
8487
8488
8489/** Opcode 0xdd 11/0. */
8490FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
8491{
8492 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
8493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8494 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
8495 unmodified. */
8496
8497 IEM_MC_BEGIN(0, 0);
8498
8499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8500 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8501
8502 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8503 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
8504 IEM_MC_UPDATE_FPU_OPCODE_IP();
8505
8506 IEM_MC_ADVANCE_RIP();
8507 IEM_MC_END();
8508 return VINF_SUCCESS;
8509}
8510
8511
8512/** Opcode 0xdd 11/1. */
8513FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
8514{
8515 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
8516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8517
8518 IEM_MC_BEGIN(0, 2);
8519 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8520 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8521 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8522 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8523
8524 IEM_MC_PREPARE_FPU_USAGE();
8525 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8526 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8527 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8528 IEM_MC_ELSE()
8529 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8530 IEM_MC_ENDIF();
8531
8532 IEM_MC_ADVANCE_RIP();
8533 IEM_MC_END();
8534 return VINF_SUCCESS;
8535}
8536
8537
8538/** Opcode 0xdd 11/3. */
8539FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
8540{
8541 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
8542 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
8543}
8544
8545
8546/** Opcode 0xdd 11/4. */
8547FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
8548{
8549 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
8550 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
8551}
8552
8553
8554/** Opcode 0xdd. */
8555FNIEMOP_DEF(iemOp_EscF5)
8556{
8557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8558 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
8559 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8560 {
8561 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8562 {
8563 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
8564 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
8565 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
8566 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
8567 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
8568 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
8569 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8570 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8572 }
8573 }
8574 else
8575 {
8576 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8577 {
8578 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
8579 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
8580 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
8581 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
8582 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
8583 case 5: return IEMOP_RAISE_INVALID_OPCODE();
8584 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
8585 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
8586 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8587 }
8588 }
8589}
8590
8591
8592/** Opcode 0xde 11/0. */
8593FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
8594{
8595 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
8596 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
8597}
8598
8599
8600/** Opcode 0xde 11/0. */
8601FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
8602{
8603 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
8604 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
8605}
8606
8607
8608/** Opcode 0xde 0xd9. */
8609FNIEMOP_DEF(iemOp_fcompp)
8610{
8611 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
8612 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
8613}
8614
8615
8616/** Opcode 0xde 11/4. */
8617FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
8618{
8619 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
8620 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
8621}
8622
8623
8624/** Opcode 0xde 11/5. */
8625FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
8626{
8627 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
8628 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
8629}
8630
8631
8632/** Opcode 0xde 11/6. */
8633FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
8634{
8635 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
8636 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
8637}
8638
8639
8640/** Opcode 0xde 11/7. */
8641FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
8642{
8643 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
8644 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
8645}
8646
8647
8648/**
8649 * Common worker for FPU instructions working on ST0 and an m16i, and storing
8650 * the result in ST0.
8651 *
8652 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8653 */
8654FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
8655{
8656 IEM_MC_BEGIN(3, 3);
8657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8658 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8659 IEM_MC_LOCAL(int16_t, i16Val2);
8660 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8661 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8662 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8663
8664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8666
8667 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8668 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8669 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8670
8671 IEM_MC_PREPARE_FPU_USAGE();
8672 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8673 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
8674 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8675 IEM_MC_ELSE()
8676 IEM_MC_FPU_STACK_UNDERFLOW(0);
8677 IEM_MC_ENDIF();
8678 IEM_MC_ADVANCE_RIP();
8679
8680 IEM_MC_END();
8681 return VINF_SUCCESS;
8682}
8683
8684
8685/** Opcode 0xde !11/0. */
8686FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
8687{
8688 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
8689 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
8690}
8691
8692
8693/** Opcode 0xde !11/1. */
8694FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
8695{
8696 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
8697 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
8698}
8699
8700
8701/** Opcode 0xde !11/2. */
8702FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
8703{
8704 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
8705
8706 IEM_MC_BEGIN(3, 3);
8707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8708 IEM_MC_LOCAL(uint16_t, u16Fsw);
8709 IEM_MC_LOCAL(int16_t, i16Val2);
8710 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8711 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8712 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8713
8714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8716
8717 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8718 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8719 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8720
8721 IEM_MC_PREPARE_FPU_USAGE();
8722 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8723 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
8724 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8725 IEM_MC_ELSE()
8726 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8727 IEM_MC_ENDIF();
8728 IEM_MC_ADVANCE_RIP();
8729
8730 IEM_MC_END();
8731 return VINF_SUCCESS;
8732}
8733
8734
8735/** Opcode 0xde !11/3. */
8736FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
8737{
8738 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
8739
8740 IEM_MC_BEGIN(3, 3);
8741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8742 IEM_MC_LOCAL(uint16_t, u16Fsw);
8743 IEM_MC_LOCAL(int16_t, i16Val2);
8744 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8745 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8746 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8747
8748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8750
8751 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8752 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8753 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8754
8755 IEM_MC_PREPARE_FPU_USAGE();
8756 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8757 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
8758 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8759 IEM_MC_ELSE()
8760 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8761 IEM_MC_ENDIF();
8762 IEM_MC_ADVANCE_RIP();
8763
8764 IEM_MC_END();
8765 return VINF_SUCCESS;
8766}
8767
8768
8769/** Opcode 0xde !11/4. */
8770FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
8771{
8772 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
8773 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
8774}
8775
8776
8777/** Opcode 0xde !11/5. */
8778FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
8779{
8780 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
8781 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
8782}
8783
8784
8785/** Opcode 0xde !11/6. */
8786FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
8787{
8788 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
8789 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
8790}
8791
8792
8793/** Opcode 0xde !11/7. */
8794FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
8795{
8796 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
8797 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
8798}
8799
8800
8801/** Opcode 0xde. */
8802FNIEMOP_DEF(iemOp_EscF6)
8803{
8804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8805 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
8806 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8807 {
8808 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8809 {
8810 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
8811 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
8812 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8813 case 3: if (bRm == 0xd9)
8814 return FNIEMOP_CALL(iemOp_fcompp);
8815 return IEMOP_RAISE_INVALID_OPCODE();
8816 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
8817 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
8818 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
8819 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
8820 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8821 }
8822 }
8823 else
8824 {
8825 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8826 {
8827 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
8828 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
8829 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
8830 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
8831 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
8832 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
8833 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
8834 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
8835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8836 }
8837 }
8838}
8839
8840
8841/** Opcode 0xdf 11/0.
8842 * Undocument instruction, assumed to work like ffree + fincstp. */
8843FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
8844{
8845 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
8846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8847
8848 IEM_MC_BEGIN(0, 0);
8849
8850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8851 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8852
8853 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8854 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
8855 IEM_MC_FPU_STACK_INC_TOP();
8856 IEM_MC_UPDATE_FPU_OPCODE_IP();
8857
8858 IEM_MC_ADVANCE_RIP();
8859 IEM_MC_END();
8860 return VINF_SUCCESS;
8861}
8862
8863
8864/** Opcode 0xdf 0xe0. */
8865FNIEMOP_DEF(iemOp_fnstsw_ax)
8866{
8867 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
8868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8869
8870 IEM_MC_BEGIN(0, 1);
8871 IEM_MC_LOCAL(uint16_t, u16Tmp);
8872 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8873 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8874 IEM_MC_FETCH_FSW(u16Tmp);
8875 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
8876 IEM_MC_ADVANCE_RIP();
8877 IEM_MC_END();
8878 return VINF_SUCCESS;
8879}
8880
8881
8882/** Opcode 0xdf 11/5. */
8883FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
8884{
8885 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
8886 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
8887}
8888
8889
8890/** Opcode 0xdf 11/6. */
8891FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
8892{
8893 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
8894 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
8895}
8896
8897
8898/** Opcode 0xdf !11/0. */
8899FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
8900{
8901 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
8902
8903 IEM_MC_BEGIN(2, 3);
8904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8905 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8906 IEM_MC_LOCAL(int16_t, i16Val);
8907 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8908 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
8909
8910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8912
8913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8915 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8916
8917 IEM_MC_PREPARE_FPU_USAGE();
8918 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8919 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
8920 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8921 IEM_MC_ELSE()
8922 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8923 IEM_MC_ENDIF();
8924 IEM_MC_ADVANCE_RIP();
8925
8926 IEM_MC_END();
8927 return VINF_SUCCESS;
8928}
8929
8930
8931/** Opcode 0xdf !11/1. */
8932FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
8933{
8934 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
8935 IEM_MC_BEGIN(3, 2);
8936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8937 IEM_MC_LOCAL(uint16_t, u16Fsw);
8938 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8939 IEM_MC_ARG(int16_t *, pi16Dst, 1);
8940 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8941
8942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8944 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8945 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8946
8947 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8948 IEM_MC_PREPARE_FPU_USAGE();
8949 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8950 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
8951 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
8952 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8953 IEM_MC_ELSE()
8954 IEM_MC_IF_FCW_IM()
8955 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
8956 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
8957 IEM_MC_ENDIF();
8958 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8959 IEM_MC_ENDIF();
8960 IEM_MC_ADVANCE_RIP();
8961
8962 IEM_MC_END();
8963 return VINF_SUCCESS;
8964}
8965
8966
8967/** Opcode 0xdf !11/2. */
8968FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
8969{
8970 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
8971 IEM_MC_BEGIN(3, 2);
8972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8973 IEM_MC_LOCAL(uint16_t, u16Fsw);
8974 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8975 IEM_MC_ARG(int16_t *, pi16Dst, 1);
8976 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8977
8978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8980 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8981 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8982
8983 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8984 IEM_MC_PREPARE_FPU_USAGE();
8985 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8986 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
8987 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
8988 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8989 IEM_MC_ELSE()
8990 IEM_MC_IF_FCW_IM()
8991 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
8992 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
8993 IEM_MC_ENDIF();
8994 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8995 IEM_MC_ENDIF();
8996 IEM_MC_ADVANCE_RIP();
8997
8998 IEM_MC_END();
8999 return VINF_SUCCESS;
9000}
9001
9002
9003/** Opcode 0xdf !11/3. */
9004FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9005{
9006 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9007 IEM_MC_BEGIN(3, 2);
9008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9009 IEM_MC_LOCAL(uint16_t, u16Fsw);
9010 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9011 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9012 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9013
9014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9016 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9017 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9018
9019 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9020 IEM_MC_PREPARE_FPU_USAGE();
9021 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9022 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9023 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9024 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9025 IEM_MC_ELSE()
9026 IEM_MC_IF_FCW_IM()
9027 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9028 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9029 IEM_MC_ENDIF();
9030 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9031 IEM_MC_ENDIF();
9032 IEM_MC_ADVANCE_RIP();
9033
9034 IEM_MC_END();
9035 return VINF_SUCCESS;
9036}
9037
9038
9039/** Opcode 0xdf !11/4. */
9040FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
9041
9042
9043/** Opcode 0xdf !11/5. */
9044FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
9045{
9046 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
9047
9048 IEM_MC_BEGIN(2, 3);
9049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9050 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9051 IEM_MC_LOCAL(int64_t, i64Val);
9052 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9053 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
9054
9055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9057
9058 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9059 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9060 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9061
9062 IEM_MC_PREPARE_FPU_USAGE();
9063 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9064 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
9065 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9066 IEM_MC_ELSE()
9067 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9068 IEM_MC_ENDIF();
9069 IEM_MC_ADVANCE_RIP();
9070
9071 IEM_MC_END();
9072 return VINF_SUCCESS;
9073}
9074
9075
9076/** Opcode 0xdf !11/6. */
9077FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
9078
9079
9080/** Opcode 0xdf !11/7. */
9081FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
9082{
9083 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
9084 IEM_MC_BEGIN(3, 2);
9085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9086 IEM_MC_LOCAL(uint16_t, u16Fsw);
9087 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9088 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9089 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9090
9091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9093 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9094 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9095
9096 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9097 IEM_MC_PREPARE_FPU_USAGE();
9098 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9099 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9100 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9101 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9102 IEM_MC_ELSE()
9103 IEM_MC_IF_FCW_IM()
9104 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9105 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9106 IEM_MC_ENDIF();
9107 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9108 IEM_MC_ENDIF();
9109 IEM_MC_ADVANCE_RIP();
9110
9111 IEM_MC_END();
9112 return VINF_SUCCESS;
9113}
9114
9115
9116/** Opcode 0xdf. */
9117FNIEMOP_DEF(iemOp_EscF7)
9118{
9119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9121 {
9122 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9123 {
9124 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
9125 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
9126 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9127 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9128 case 4: if (bRm == 0xe0)
9129 return FNIEMOP_CALL(iemOp_fnstsw_ax);
9130 return IEMOP_RAISE_INVALID_OPCODE();
9131 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
9132 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
9133 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9135 }
9136 }
9137 else
9138 {
9139 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9140 {
9141 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
9142 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
9143 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
9144 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
9145 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
9146 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
9147 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
9148 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
9149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9150 }
9151 }
9152}
9153
9154
9155/** Opcode 0xe0. */
9156FNIEMOP_DEF(iemOp_loopne_Jb)
9157{
9158 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
9159 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9161 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9162
9163 switch (pVCpu->iem.s.enmEffAddrMode)
9164 {
9165 case IEMMODE_16BIT:
9166 IEM_MC_BEGIN(0,0);
9167 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9168 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9169 IEM_MC_REL_JMP_S8(i8Imm);
9170 } IEM_MC_ELSE() {
9171 IEM_MC_ADVANCE_RIP();
9172 } IEM_MC_ENDIF();
9173 IEM_MC_END();
9174 return VINF_SUCCESS;
9175
9176 case IEMMODE_32BIT:
9177 IEM_MC_BEGIN(0,0);
9178 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9179 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9180 IEM_MC_REL_JMP_S8(i8Imm);
9181 } IEM_MC_ELSE() {
9182 IEM_MC_ADVANCE_RIP();
9183 } IEM_MC_ENDIF();
9184 IEM_MC_END();
9185 return VINF_SUCCESS;
9186
9187 case IEMMODE_64BIT:
9188 IEM_MC_BEGIN(0,0);
9189 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9190 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9191 IEM_MC_REL_JMP_S8(i8Imm);
9192 } IEM_MC_ELSE() {
9193 IEM_MC_ADVANCE_RIP();
9194 } IEM_MC_ENDIF();
9195 IEM_MC_END();
9196 return VINF_SUCCESS;
9197
9198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9199 }
9200}
9201
9202
9203/** Opcode 0xe1. */
9204FNIEMOP_DEF(iemOp_loope_Jb)
9205{
9206 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
9207 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9209 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9210
9211 switch (pVCpu->iem.s.enmEffAddrMode)
9212 {
9213 case IEMMODE_16BIT:
9214 IEM_MC_BEGIN(0,0);
9215 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9216 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9217 IEM_MC_REL_JMP_S8(i8Imm);
9218 } IEM_MC_ELSE() {
9219 IEM_MC_ADVANCE_RIP();
9220 } IEM_MC_ENDIF();
9221 IEM_MC_END();
9222 return VINF_SUCCESS;
9223
9224 case IEMMODE_32BIT:
9225 IEM_MC_BEGIN(0,0);
9226 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9227 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9228 IEM_MC_REL_JMP_S8(i8Imm);
9229 } IEM_MC_ELSE() {
9230 IEM_MC_ADVANCE_RIP();
9231 } IEM_MC_ENDIF();
9232 IEM_MC_END();
9233 return VINF_SUCCESS;
9234
9235 case IEMMODE_64BIT:
9236 IEM_MC_BEGIN(0,0);
9237 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9238 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9239 IEM_MC_REL_JMP_S8(i8Imm);
9240 } IEM_MC_ELSE() {
9241 IEM_MC_ADVANCE_RIP();
9242 } IEM_MC_ENDIF();
9243 IEM_MC_END();
9244 return VINF_SUCCESS;
9245
9246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9247 }
9248}
9249
9250
9251/** Opcode 0xe2. */
9252FNIEMOP_DEF(iemOp_loop_Jb)
9253{
9254 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
9255 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9257 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9258
9259 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
9260 * using the 32-bit operand size override. How can that be restarted? See
9261 * weird pseudo code in intel manual. */
9262 switch (pVCpu->iem.s.enmEffAddrMode)
9263 {
9264 case IEMMODE_16BIT:
9265 IEM_MC_BEGIN(0,0);
9266 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9267 {
9268 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9269 IEM_MC_IF_CX_IS_NZ() {
9270 IEM_MC_REL_JMP_S8(i8Imm);
9271 } IEM_MC_ELSE() {
9272 IEM_MC_ADVANCE_RIP();
9273 } IEM_MC_ENDIF();
9274 }
9275 else
9276 {
9277 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
9278 IEM_MC_ADVANCE_RIP();
9279 }
9280 IEM_MC_END();
9281 return VINF_SUCCESS;
9282
9283 case IEMMODE_32BIT:
9284 IEM_MC_BEGIN(0,0);
9285 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9286 {
9287 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9288 IEM_MC_IF_ECX_IS_NZ() {
9289 IEM_MC_REL_JMP_S8(i8Imm);
9290 } IEM_MC_ELSE() {
9291 IEM_MC_ADVANCE_RIP();
9292 } IEM_MC_ENDIF();
9293 }
9294 else
9295 {
9296 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
9297 IEM_MC_ADVANCE_RIP();
9298 }
9299 IEM_MC_END();
9300 return VINF_SUCCESS;
9301
9302 case IEMMODE_64BIT:
9303 IEM_MC_BEGIN(0,0);
9304 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9305 {
9306 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9307 IEM_MC_IF_RCX_IS_NZ() {
9308 IEM_MC_REL_JMP_S8(i8Imm);
9309 } IEM_MC_ELSE() {
9310 IEM_MC_ADVANCE_RIP();
9311 } IEM_MC_ENDIF();
9312 }
9313 else
9314 {
9315 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
9316 IEM_MC_ADVANCE_RIP();
9317 }
9318 IEM_MC_END();
9319 return VINF_SUCCESS;
9320
9321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9322 }
9323}
9324
9325
9326/** Opcode 0xe3. */
9327FNIEMOP_DEF(iemOp_jecxz_Jb)
9328{
9329 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
9330 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9332 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9333
9334 switch (pVCpu->iem.s.enmEffAddrMode)
9335 {
9336 case IEMMODE_16BIT:
9337 IEM_MC_BEGIN(0,0);
9338 IEM_MC_IF_CX_IS_NZ() {
9339 IEM_MC_ADVANCE_RIP();
9340 } IEM_MC_ELSE() {
9341 IEM_MC_REL_JMP_S8(i8Imm);
9342 } IEM_MC_ENDIF();
9343 IEM_MC_END();
9344 return VINF_SUCCESS;
9345
9346 case IEMMODE_32BIT:
9347 IEM_MC_BEGIN(0,0);
9348 IEM_MC_IF_ECX_IS_NZ() {
9349 IEM_MC_ADVANCE_RIP();
9350 } IEM_MC_ELSE() {
9351 IEM_MC_REL_JMP_S8(i8Imm);
9352 } IEM_MC_ENDIF();
9353 IEM_MC_END();
9354 return VINF_SUCCESS;
9355
9356 case IEMMODE_64BIT:
9357 IEM_MC_BEGIN(0,0);
9358 IEM_MC_IF_RCX_IS_NZ() {
9359 IEM_MC_ADVANCE_RIP();
9360 } IEM_MC_ELSE() {
9361 IEM_MC_REL_JMP_S8(i8Imm);
9362 } IEM_MC_ENDIF();
9363 IEM_MC_END();
9364 return VINF_SUCCESS;
9365
9366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9367 }
9368}
9369
9370
9371/** Opcode 0xe4 */
9372FNIEMOP_DEF(iemOp_in_AL_Ib)
9373{
9374 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
9375 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9377 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
9378}
9379
9380
9381/** Opcode 0xe5 */
9382FNIEMOP_DEF(iemOp_in_eAX_Ib)
9383{
9384 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
9385 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9387 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9388}
9389
9390
9391/** Opcode 0xe6 */
9392FNIEMOP_DEF(iemOp_out_Ib_AL)
9393{
9394 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
9395 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9397 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
9398}
9399
9400
9401/** Opcode 0xe7 */
9402FNIEMOP_DEF(iemOp_out_Ib_eAX)
9403{
9404 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
9405 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9407 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9408}
9409
9410
9411/** Opcode 0xe8. */
9412FNIEMOP_DEF(iemOp_call_Jv)
9413{
9414 IEMOP_MNEMONIC(call_Jv, "call Jv");
9415 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9416 switch (pVCpu->iem.s.enmEffOpSize)
9417 {
9418 case IEMMODE_16BIT:
9419 {
9420 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9421 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
9422 }
9423
9424 case IEMMODE_32BIT:
9425 {
9426 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9427 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
9428 }
9429
9430 case IEMMODE_64BIT:
9431 {
9432 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9433 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
9434 }
9435
9436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9437 }
9438}
9439
9440
9441/** Opcode 0xe9. */
9442FNIEMOP_DEF(iemOp_jmp_Jv)
9443{
9444 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
9445 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9446 switch (pVCpu->iem.s.enmEffOpSize)
9447 {
9448 case IEMMODE_16BIT:
9449 {
9450 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
9451 IEM_MC_BEGIN(0, 0);
9452 IEM_MC_REL_JMP_S16(i16Imm);
9453 IEM_MC_END();
9454 return VINF_SUCCESS;
9455 }
9456
9457 case IEMMODE_64BIT:
9458 case IEMMODE_32BIT:
9459 {
9460 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
9461 IEM_MC_BEGIN(0, 0);
9462 IEM_MC_REL_JMP_S32(i32Imm);
9463 IEM_MC_END();
9464 return VINF_SUCCESS;
9465 }
9466
9467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9468 }
9469}
9470
9471
9472/** Opcode 0xea. */
9473FNIEMOP_DEF(iemOp_jmp_Ap)
9474{
9475 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
9476 IEMOP_HLP_NO_64BIT();
9477
9478 /* Decode the far pointer address and pass it on to the far call C implementation. */
9479 uint32_t offSeg;
9480 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
9481 IEM_OPCODE_GET_NEXT_U32(&offSeg);
9482 else
9483 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
9484 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
9485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9486 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
9487}
9488
9489
9490/** Opcode 0xeb. */
9491FNIEMOP_DEF(iemOp_jmp_Jb)
9492{
9493 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
9494 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9496 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9497
9498 IEM_MC_BEGIN(0, 0);
9499 IEM_MC_REL_JMP_S8(i8Imm);
9500 IEM_MC_END();
9501 return VINF_SUCCESS;
9502}
9503
9504
9505/** Opcode 0xec */
9506FNIEMOP_DEF(iemOp_in_AL_DX)
9507{
9508 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
9509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9510 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
9511}
9512
9513
9514/** Opcode 0xed */
9515FNIEMOP_DEF(iemOp_eAX_DX)
9516{
9517 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
9518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9519 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9520}
9521
9522
9523/** Opcode 0xee */
9524FNIEMOP_DEF(iemOp_out_DX_AL)
9525{
9526 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
9527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9528 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
9529}
9530
9531
9532/** Opcode 0xef */
9533FNIEMOP_DEF(iemOp_out_DX_eAX)
9534{
9535 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
9536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9537 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9538}
9539
9540
9541/** Opcode 0xf0. */
9542FNIEMOP_DEF(iemOp_lock)
9543{
9544 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
9545 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
9546
9547 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9548 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9549}
9550
9551
9552/** Opcode 0xf1. */
9553FNIEMOP_DEF(iemOp_int_1)
9554{
9555 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
9556 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
9557 /** @todo testcase! */
9558 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
9559}
9560
9561
9562/** Opcode 0xf2. */
9563FNIEMOP_DEF(iemOp_repne)
9564{
9565 /* This overrides any previous REPE prefix. */
9566 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
9567 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
9568 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
9569
9570 /* For the 4 entry opcode tables, REPNZ overrides any previous
9571 REPZ and operand size prefixes. */
9572 pVCpu->iem.s.idxPrefix = 3;
9573
9574 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9575 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9576}
9577
9578
9579/** Opcode 0xf3. */
9580FNIEMOP_DEF(iemOp_repe)
9581{
9582 /* This overrides any previous REPNE prefix. */
9583 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
9584 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
9585 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
9586
9587 /* For the 4 entry opcode tables, REPNZ overrides any previous
9588 REPNZ and operand size prefixes. */
9589 pVCpu->iem.s.idxPrefix = 2;
9590
9591 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9592 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9593}
9594
9595
9596/** Opcode 0xf4. */
9597FNIEMOP_DEF(iemOp_hlt)
9598{
9599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9600 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
9601}
9602
9603
9604/** Opcode 0xf5. */
9605FNIEMOP_DEF(iemOp_cmc)
9606{
9607 IEMOP_MNEMONIC(cmc, "cmc");
9608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9609 IEM_MC_BEGIN(0, 0);
9610 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
9611 IEM_MC_ADVANCE_RIP();
9612 IEM_MC_END();
9613 return VINF_SUCCESS;
9614}
9615
9616
9617/**
9618 * Common implementation of 'inc/dec/not/neg Eb'.
9619 *
9620 * @param bRm The RM byte.
9621 * @param pImpl The instruction implementation.
9622 */
9623FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
9624{
9625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9626 {
9627 /* register access */
9628 IEM_MC_BEGIN(2, 0);
9629 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9630 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9631 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9632 IEM_MC_REF_EFLAGS(pEFlags);
9633 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
9634 IEM_MC_ADVANCE_RIP();
9635 IEM_MC_END();
9636 }
9637 else
9638 {
9639 /* memory access. */
9640 IEM_MC_BEGIN(2, 2);
9641 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9642 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9644
9645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9646 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9647 IEM_MC_FETCH_EFLAGS(EFlags);
9648 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9649 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
9650 else
9651 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
9652
9653 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9654 IEM_MC_COMMIT_EFLAGS(EFlags);
9655 IEM_MC_ADVANCE_RIP();
9656 IEM_MC_END();
9657 }
9658 return VINF_SUCCESS;
9659}
9660
9661
9662/**
9663 * Common implementation of 'inc/dec/not/neg Ev'.
9664 *
9665 * @param bRm The RM byte.
9666 * @param pImpl The instruction implementation.
9667 */
9668FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
9669{
9670 /* Registers are handled by a common worker. */
9671 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9672 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9673
9674 /* Memory we do here. */
9675 switch (pVCpu->iem.s.enmEffOpSize)
9676 {
9677 case IEMMODE_16BIT:
9678 IEM_MC_BEGIN(2, 2);
9679 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9680 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9682
9683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9684 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9685 IEM_MC_FETCH_EFLAGS(EFlags);
9686 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9687 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
9688 else
9689 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
9690
9691 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9692 IEM_MC_COMMIT_EFLAGS(EFlags);
9693 IEM_MC_ADVANCE_RIP();
9694 IEM_MC_END();
9695 return VINF_SUCCESS;
9696
9697 case IEMMODE_32BIT:
9698 IEM_MC_BEGIN(2, 2);
9699 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9700 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9702
9703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9704 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9705 IEM_MC_FETCH_EFLAGS(EFlags);
9706 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9707 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
9708 else
9709 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
9710
9711 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9712 IEM_MC_COMMIT_EFLAGS(EFlags);
9713 IEM_MC_ADVANCE_RIP();
9714 IEM_MC_END();
9715 return VINF_SUCCESS;
9716
9717 case IEMMODE_64BIT:
9718 IEM_MC_BEGIN(2, 2);
9719 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9720 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9722
9723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9724 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9725 IEM_MC_FETCH_EFLAGS(EFlags);
9726 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9727 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
9728 else
9729 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
9730
9731 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9732 IEM_MC_COMMIT_EFLAGS(EFlags);
9733 IEM_MC_ADVANCE_RIP();
9734 IEM_MC_END();
9735 return VINF_SUCCESS;
9736
9737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9738 }
9739}
9740
9741
9742/** Opcode 0xf6 /0. */
9743FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
9744{
9745 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
9746 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9747
9748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9749 {
9750 /* register access */
9751 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9753
9754 IEM_MC_BEGIN(3, 0);
9755 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9756 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
9757 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9758 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9759 IEM_MC_REF_EFLAGS(pEFlags);
9760 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
9761 IEM_MC_ADVANCE_RIP();
9762 IEM_MC_END();
9763 }
9764 else
9765 {
9766 /* memory access. */
9767 IEM_MC_BEGIN(3, 2);
9768 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9769 IEM_MC_ARG(uint8_t, u8Src, 1);
9770 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9772
9773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9774 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9775 IEM_MC_ASSIGN(u8Src, u8Imm);
9776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9777 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9778 IEM_MC_FETCH_EFLAGS(EFlags);
9779 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
9780
9781 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
9782 IEM_MC_COMMIT_EFLAGS(EFlags);
9783 IEM_MC_ADVANCE_RIP();
9784 IEM_MC_END();
9785 }
9786 return VINF_SUCCESS;
9787}
9788
9789
9790/** Opcode 0xf7 /0. */
9791FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
9792{
9793 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
9794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9795
9796 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9797 {
9798 /* register access */
9799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9800 switch (pVCpu->iem.s.enmEffOpSize)
9801 {
9802 case IEMMODE_16BIT:
9803 {
9804 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9805 IEM_MC_BEGIN(3, 0);
9806 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9807 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
9808 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9809 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9810 IEM_MC_REF_EFLAGS(pEFlags);
9811 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
9812 IEM_MC_ADVANCE_RIP();
9813 IEM_MC_END();
9814 return VINF_SUCCESS;
9815 }
9816
9817 case IEMMODE_32BIT:
9818 {
9819 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9820 IEM_MC_BEGIN(3, 0);
9821 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9822 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
9823 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9824 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9825 IEM_MC_REF_EFLAGS(pEFlags);
9826 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
9827 /* No clearing the high dword here - test doesn't write back the result. */
9828 IEM_MC_ADVANCE_RIP();
9829 IEM_MC_END();
9830 return VINF_SUCCESS;
9831 }
9832
9833 case IEMMODE_64BIT:
9834 {
9835 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9836 IEM_MC_BEGIN(3, 0);
9837 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9838 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
9839 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9840 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9841 IEM_MC_REF_EFLAGS(pEFlags);
9842 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
9843 IEM_MC_ADVANCE_RIP();
9844 IEM_MC_END();
9845 return VINF_SUCCESS;
9846 }
9847
9848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9849 }
9850 }
9851 else
9852 {
9853 /* memory access. */
9854 switch (pVCpu->iem.s.enmEffOpSize)
9855 {
9856 case IEMMODE_16BIT:
9857 {
9858 IEM_MC_BEGIN(3, 2);
9859 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9860 IEM_MC_ARG(uint16_t, u16Src, 1);
9861 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9863
9864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9865 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9866 IEM_MC_ASSIGN(u16Src, u16Imm);
9867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9868 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9869 IEM_MC_FETCH_EFLAGS(EFlags);
9870 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
9871
9872 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
9873 IEM_MC_COMMIT_EFLAGS(EFlags);
9874 IEM_MC_ADVANCE_RIP();
9875 IEM_MC_END();
9876 return VINF_SUCCESS;
9877 }
9878
9879 case IEMMODE_32BIT:
9880 {
9881 IEM_MC_BEGIN(3, 2);
9882 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9883 IEM_MC_ARG(uint32_t, u32Src, 1);
9884 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9886
9887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9888 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9889 IEM_MC_ASSIGN(u32Src, u32Imm);
9890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9891 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9892 IEM_MC_FETCH_EFLAGS(EFlags);
9893 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
9894
9895 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
9896 IEM_MC_COMMIT_EFLAGS(EFlags);
9897 IEM_MC_ADVANCE_RIP();
9898 IEM_MC_END();
9899 return VINF_SUCCESS;
9900 }
9901
9902 case IEMMODE_64BIT:
9903 {
9904 IEM_MC_BEGIN(3, 2);
9905 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9906 IEM_MC_ARG(uint64_t, u64Src, 1);
9907 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9909
9910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9911 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9912 IEM_MC_ASSIGN(u64Src, u64Imm);
9913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9914 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9915 IEM_MC_FETCH_EFLAGS(EFlags);
9916 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
9917
9918 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
9919 IEM_MC_COMMIT_EFLAGS(EFlags);
9920 IEM_MC_ADVANCE_RIP();
9921 IEM_MC_END();
9922 return VINF_SUCCESS;
9923 }
9924
9925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9926 }
9927 }
9928}
9929
9930
9931/** Opcode 0xf6 /4, /5, /6 and /7. */
9932FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
9933{
9934 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9935 {
9936 /* register access */
9937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9938 IEM_MC_BEGIN(3, 1);
9939 IEM_MC_ARG(uint16_t *, pu16AX, 0);
9940 IEM_MC_ARG(uint8_t, u8Value, 1);
9941 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9942 IEM_MC_LOCAL(int32_t, rc);
9943
9944 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9945 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
9946 IEM_MC_REF_EFLAGS(pEFlags);
9947 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
9948 IEM_MC_IF_LOCAL_IS_Z(rc) {
9949 IEM_MC_ADVANCE_RIP();
9950 } IEM_MC_ELSE() {
9951 IEM_MC_RAISE_DIVIDE_ERROR();
9952 } IEM_MC_ENDIF();
9953
9954 IEM_MC_END();
9955 }
9956 else
9957 {
9958 /* memory access. */
9959 IEM_MC_BEGIN(3, 2);
9960 IEM_MC_ARG(uint16_t *, pu16AX, 0);
9961 IEM_MC_ARG(uint8_t, u8Value, 1);
9962 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9964 IEM_MC_LOCAL(int32_t, rc);
9965
9966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9968 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9969 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
9970 IEM_MC_REF_EFLAGS(pEFlags);
9971 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
9972 IEM_MC_IF_LOCAL_IS_Z(rc) {
9973 IEM_MC_ADVANCE_RIP();
9974 } IEM_MC_ELSE() {
9975 IEM_MC_RAISE_DIVIDE_ERROR();
9976 } IEM_MC_ENDIF();
9977
9978 IEM_MC_END();
9979 }
9980 return VINF_SUCCESS;
9981}
9982
9983
9984/** Opcode 0xf7 /4, /5, /6 and /7. */
9985FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
9986{
9987 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9988
9989 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9990 {
9991 /* register access */
9992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9993 switch (pVCpu->iem.s.enmEffOpSize)
9994 {
9995 case IEMMODE_16BIT:
9996 {
9997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9998 IEM_MC_BEGIN(4, 1);
9999 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10000 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10001 IEM_MC_ARG(uint16_t, u16Value, 2);
10002 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10003 IEM_MC_LOCAL(int32_t, rc);
10004
10005 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10006 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10007 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10008 IEM_MC_REF_EFLAGS(pEFlags);
10009 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10010 IEM_MC_IF_LOCAL_IS_Z(rc) {
10011 IEM_MC_ADVANCE_RIP();
10012 } IEM_MC_ELSE() {
10013 IEM_MC_RAISE_DIVIDE_ERROR();
10014 } IEM_MC_ENDIF();
10015
10016 IEM_MC_END();
10017 return VINF_SUCCESS;
10018 }
10019
10020 case IEMMODE_32BIT:
10021 {
10022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10023 IEM_MC_BEGIN(4, 1);
10024 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10025 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10026 IEM_MC_ARG(uint32_t, u32Value, 2);
10027 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10028 IEM_MC_LOCAL(int32_t, rc);
10029
10030 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10031 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10032 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10033 IEM_MC_REF_EFLAGS(pEFlags);
10034 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10035 IEM_MC_IF_LOCAL_IS_Z(rc) {
10036 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10037 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10038 IEM_MC_ADVANCE_RIP();
10039 } IEM_MC_ELSE() {
10040 IEM_MC_RAISE_DIVIDE_ERROR();
10041 } IEM_MC_ENDIF();
10042
10043 IEM_MC_END();
10044 return VINF_SUCCESS;
10045 }
10046
10047 case IEMMODE_64BIT:
10048 {
10049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10050 IEM_MC_BEGIN(4, 1);
10051 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10052 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10053 IEM_MC_ARG(uint64_t, u64Value, 2);
10054 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10055 IEM_MC_LOCAL(int32_t, rc);
10056
10057 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10058 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10059 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10060 IEM_MC_REF_EFLAGS(pEFlags);
10061 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10062 IEM_MC_IF_LOCAL_IS_Z(rc) {
10063 IEM_MC_ADVANCE_RIP();
10064 } IEM_MC_ELSE() {
10065 IEM_MC_RAISE_DIVIDE_ERROR();
10066 } IEM_MC_ENDIF();
10067
10068 IEM_MC_END();
10069 return VINF_SUCCESS;
10070 }
10071
10072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10073 }
10074 }
10075 else
10076 {
10077 /* memory access. */
10078 switch (pVCpu->iem.s.enmEffOpSize)
10079 {
10080 case IEMMODE_16BIT:
10081 {
10082 IEM_MC_BEGIN(4, 2);
10083 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10084 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10085 IEM_MC_ARG(uint16_t, u16Value, 2);
10086 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10088 IEM_MC_LOCAL(int32_t, rc);
10089
10090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10092 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10093 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10094 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10095 IEM_MC_REF_EFLAGS(pEFlags);
10096 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10097 IEM_MC_IF_LOCAL_IS_Z(rc) {
10098 IEM_MC_ADVANCE_RIP();
10099 } IEM_MC_ELSE() {
10100 IEM_MC_RAISE_DIVIDE_ERROR();
10101 } IEM_MC_ENDIF();
10102
10103 IEM_MC_END();
10104 return VINF_SUCCESS;
10105 }
10106
10107 case IEMMODE_32BIT:
10108 {
10109 IEM_MC_BEGIN(4, 2);
10110 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10111 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10112 IEM_MC_ARG(uint32_t, u32Value, 2);
10113 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10115 IEM_MC_LOCAL(int32_t, rc);
10116
10117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10119 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10120 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10121 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10122 IEM_MC_REF_EFLAGS(pEFlags);
10123 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10124 IEM_MC_IF_LOCAL_IS_Z(rc) {
10125 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10126 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10127 IEM_MC_ADVANCE_RIP();
10128 } IEM_MC_ELSE() {
10129 IEM_MC_RAISE_DIVIDE_ERROR();
10130 } IEM_MC_ENDIF();
10131
10132 IEM_MC_END();
10133 return VINF_SUCCESS;
10134 }
10135
10136 case IEMMODE_64BIT:
10137 {
10138 IEM_MC_BEGIN(4, 2);
10139 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10140 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10141 IEM_MC_ARG(uint64_t, u64Value, 2);
10142 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10144 IEM_MC_LOCAL(int32_t, rc);
10145
10146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10148 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10149 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10150 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10151 IEM_MC_REF_EFLAGS(pEFlags);
10152 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10153 IEM_MC_IF_LOCAL_IS_Z(rc) {
10154 IEM_MC_ADVANCE_RIP();
10155 } IEM_MC_ELSE() {
10156 IEM_MC_RAISE_DIVIDE_ERROR();
10157 } IEM_MC_ENDIF();
10158
10159 IEM_MC_END();
10160 return VINF_SUCCESS;
10161 }
10162
10163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10164 }
10165 }
10166}
10167
10168/** Opcode 0xf6. */
10169FNIEMOP_DEF(iemOp_Grp3_Eb)
10170{
10171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10172 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10173 {
10174 case 0:
10175 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
10176 case 1:
10177/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10178 return IEMOP_RAISE_INVALID_OPCODE();
10179 case 2:
10180 IEMOP_MNEMONIC(not_Eb, "not Eb");
10181 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
10182 case 3:
10183 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
10184 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
10185 case 4:
10186 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
10187 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10188 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
10189 case 5:
10190 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
10191 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10192 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
10193 case 6:
10194 IEMOP_MNEMONIC(div_Eb, "div Eb");
10195 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10196 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
10197 case 7:
10198 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
10199 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10200 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
10201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10202 }
10203}
10204
10205
10206/** Opcode 0xf7. */
10207FNIEMOP_DEF(iemOp_Grp3_Ev)
10208{
10209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10210 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10211 {
10212 case 0:
10213 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
10214 case 1:
10215/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10216 return IEMOP_RAISE_INVALID_OPCODE();
10217 case 2:
10218 IEMOP_MNEMONIC(not_Ev, "not Ev");
10219 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
10220 case 3:
10221 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
10222 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
10223 case 4:
10224 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
10225 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10226 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
10227 case 5:
10228 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
10229 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10230 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
10231 case 6:
10232 IEMOP_MNEMONIC(div_Ev, "div Ev");
10233 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10234 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
10235 case 7:
10236 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
10237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10238 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
10239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10240 }
10241}
10242
10243
10244/** Opcode 0xf8. */
10245FNIEMOP_DEF(iemOp_clc)
10246{
10247 IEMOP_MNEMONIC(clc, "clc");
10248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10249 IEM_MC_BEGIN(0, 0);
10250 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
10251 IEM_MC_ADVANCE_RIP();
10252 IEM_MC_END();
10253 return VINF_SUCCESS;
10254}
10255
10256
10257/** Opcode 0xf9. */
10258FNIEMOP_DEF(iemOp_stc)
10259{
10260 IEMOP_MNEMONIC(stc, "stc");
10261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10262 IEM_MC_BEGIN(0, 0);
10263 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
10264 IEM_MC_ADVANCE_RIP();
10265 IEM_MC_END();
10266 return VINF_SUCCESS;
10267}
10268
10269
10270/** Opcode 0xfa. */
10271FNIEMOP_DEF(iemOp_cli)
10272{
10273 IEMOP_MNEMONIC(cli, "cli");
10274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10275 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
10276}
10277
10278
10279FNIEMOP_DEF(iemOp_sti)
10280{
10281 IEMOP_MNEMONIC(sti, "sti");
10282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10283 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
10284}
10285
10286
10287/** Opcode 0xfc. */
10288FNIEMOP_DEF(iemOp_cld)
10289{
10290 IEMOP_MNEMONIC(cld, "cld");
10291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10292 IEM_MC_BEGIN(0, 0);
10293 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
10294 IEM_MC_ADVANCE_RIP();
10295 IEM_MC_END();
10296 return VINF_SUCCESS;
10297}
10298
10299
10300/** Opcode 0xfd. */
10301FNIEMOP_DEF(iemOp_std)
10302{
10303 IEMOP_MNEMONIC(std, "std");
10304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10305 IEM_MC_BEGIN(0, 0);
10306 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
10307 IEM_MC_ADVANCE_RIP();
10308 IEM_MC_END();
10309 return VINF_SUCCESS;
10310}
10311
10312
10313/** Opcode 0xfe. */
10314FNIEMOP_DEF(iemOp_Grp4)
10315{
10316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10317 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10318 {
10319 case 0:
10320 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
10321 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
10322 case 1:
10323 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
10324 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
10325 default:
10326 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
10327 return IEMOP_RAISE_INVALID_OPCODE();
10328 }
10329}
10330
10331
10332/**
10333 * Opcode 0xff /2.
10334 * @param bRm The RM byte.
10335 */
10336FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
10337{
10338 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
10339 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10340
10341 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10342 {
10343 /* The new RIP is taken from a register. */
10344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10345 switch (pVCpu->iem.s.enmEffOpSize)
10346 {
10347 case IEMMODE_16BIT:
10348 IEM_MC_BEGIN(1, 0);
10349 IEM_MC_ARG(uint16_t, u16Target, 0);
10350 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10351 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
10352 IEM_MC_END()
10353 return VINF_SUCCESS;
10354
10355 case IEMMODE_32BIT:
10356 IEM_MC_BEGIN(1, 0);
10357 IEM_MC_ARG(uint32_t, u32Target, 0);
10358 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10359 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
10360 IEM_MC_END()
10361 return VINF_SUCCESS;
10362
10363 case IEMMODE_64BIT:
10364 IEM_MC_BEGIN(1, 0);
10365 IEM_MC_ARG(uint64_t, u64Target, 0);
10366 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10367 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
10368 IEM_MC_END()
10369 return VINF_SUCCESS;
10370
10371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10372 }
10373 }
10374 else
10375 {
10376 /* The new RIP is taken from a register. */
10377 switch (pVCpu->iem.s.enmEffOpSize)
10378 {
10379 case IEMMODE_16BIT:
10380 IEM_MC_BEGIN(1, 1);
10381 IEM_MC_ARG(uint16_t, u16Target, 0);
10382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10385 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10386 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
10387 IEM_MC_END()
10388 return VINF_SUCCESS;
10389
10390 case IEMMODE_32BIT:
10391 IEM_MC_BEGIN(1, 1);
10392 IEM_MC_ARG(uint32_t, u32Target, 0);
10393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10396 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10397 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
10398 IEM_MC_END()
10399 return VINF_SUCCESS;
10400
10401 case IEMMODE_64BIT:
10402 IEM_MC_BEGIN(1, 1);
10403 IEM_MC_ARG(uint64_t, u64Target, 0);
10404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10407 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10408 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
10409 IEM_MC_END()
10410 return VINF_SUCCESS;
10411
10412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10413 }
10414 }
10415}
10416
10417typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
10418
10419FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
10420{
10421 /* Registers? How?? */
10422 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
10423 { /* likely */ }
10424 else
10425 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
10426
10427 /* Far pointer loaded from memory. */
10428 switch (pVCpu->iem.s.enmEffOpSize)
10429 {
10430 case IEMMODE_16BIT:
10431 IEM_MC_BEGIN(3, 1);
10432 IEM_MC_ARG(uint16_t, u16Sel, 0);
10433 IEM_MC_ARG(uint16_t, offSeg, 1);
10434 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
10435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10438 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10439 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
10440 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10441 IEM_MC_END();
10442 return VINF_SUCCESS;
10443
10444 case IEMMODE_64BIT:
10445 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
10446 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
10447 * and call far qword [rsp] encodings. */
10448 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
10449 {
10450 IEM_MC_BEGIN(3, 1);
10451 IEM_MC_ARG(uint16_t, u16Sel, 0);
10452 IEM_MC_ARG(uint64_t, offSeg, 1);
10453 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
10454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10457 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10458 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
10459 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10460 IEM_MC_END();
10461 return VINF_SUCCESS;
10462 }
10463 /* AMD falls thru. */
10464 /* fall thru */
10465
10466 case IEMMODE_32BIT:
10467 IEM_MC_BEGIN(3, 1);
10468 IEM_MC_ARG(uint16_t, u16Sel, 0);
10469 IEM_MC_ARG(uint32_t, offSeg, 1);
10470 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
10471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10474 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10475 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
10476 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10477 IEM_MC_END();
10478 return VINF_SUCCESS;
10479
10480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10481 }
10482}
10483
10484
10485/**
10486 * Opcode 0xff /3.
10487 * @param bRm The RM byte.
10488 */
10489FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
10490{
10491 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
10492 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
10493}
10494
10495
10496/**
10497 * Opcode 0xff /4.
10498 * @param bRm The RM byte.
10499 */
10500FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
10501{
10502 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
10503 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10504
10505 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10506 {
10507 /* The new RIP is taken from a register. */
10508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10509 switch (pVCpu->iem.s.enmEffOpSize)
10510 {
10511 case IEMMODE_16BIT:
10512 IEM_MC_BEGIN(0, 1);
10513 IEM_MC_LOCAL(uint16_t, u16Target);
10514 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10515 IEM_MC_SET_RIP_U16(u16Target);
10516 IEM_MC_END()
10517 return VINF_SUCCESS;
10518
10519 case IEMMODE_32BIT:
10520 IEM_MC_BEGIN(0, 1);
10521 IEM_MC_LOCAL(uint32_t, u32Target);
10522 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10523 IEM_MC_SET_RIP_U32(u32Target);
10524 IEM_MC_END()
10525 return VINF_SUCCESS;
10526
10527 case IEMMODE_64BIT:
10528 IEM_MC_BEGIN(0, 1);
10529 IEM_MC_LOCAL(uint64_t, u64Target);
10530 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10531 IEM_MC_SET_RIP_U64(u64Target);
10532 IEM_MC_END()
10533 return VINF_SUCCESS;
10534
10535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10536 }
10537 }
10538 else
10539 {
10540 /* The new RIP is taken from a memory location. */
10541 switch (pVCpu->iem.s.enmEffOpSize)
10542 {
10543 case IEMMODE_16BIT:
10544 IEM_MC_BEGIN(0, 2);
10545 IEM_MC_LOCAL(uint16_t, u16Target);
10546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10549 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10550 IEM_MC_SET_RIP_U16(u16Target);
10551 IEM_MC_END()
10552 return VINF_SUCCESS;
10553
10554 case IEMMODE_32BIT:
10555 IEM_MC_BEGIN(0, 2);
10556 IEM_MC_LOCAL(uint32_t, u32Target);
10557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10560 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10561 IEM_MC_SET_RIP_U32(u32Target);
10562 IEM_MC_END()
10563 return VINF_SUCCESS;
10564
10565 case IEMMODE_64BIT:
10566 IEM_MC_BEGIN(0, 2);
10567 IEM_MC_LOCAL(uint64_t, u64Target);
10568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10571 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10572 IEM_MC_SET_RIP_U64(u64Target);
10573 IEM_MC_END()
10574 return VINF_SUCCESS;
10575
10576 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10577 }
10578 }
10579}
10580
10581
10582/**
10583 * Opcode 0xff /5.
10584 * @param bRm The RM byte.
10585 */
10586FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
10587{
10588 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
10589 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
10590}
10591
10592
10593/**
10594 * Opcode 0xff /6.
10595 * @param bRm The RM byte.
10596 */
10597FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
10598{
10599 IEMOP_MNEMONIC(push_Ev, "push Ev");
10600
10601 /* Registers are handled by a common worker. */
10602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10603 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10604
10605 /* Memory we do here. */
10606 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10607 switch (pVCpu->iem.s.enmEffOpSize)
10608 {
10609 case IEMMODE_16BIT:
10610 IEM_MC_BEGIN(0, 2);
10611 IEM_MC_LOCAL(uint16_t, u16Src);
10612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10615 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10616 IEM_MC_PUSH_U16(u16Src);
10617 IEM_MC_ADVANCE_RIP();
10618 IEM_MC_END();
10619 return VINF_SUCCESS;
10620
10621 case IEMMODE_32BIT:
10622 IEM_MC_BEGIN(0, 2);
10623 IEM_MC_LOCAL(uint32_t, u32Src);
10624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10627 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10628 IEM_MC_PUSH_U32(u32Src);
10629 IEM_MC_ADVANCE_RIP();
10630 IEM_MC_END();
10631 return VINF_SUCCESS;
10632
10633 case IEMMODE_64BIT:
10634 IEM_MC_BEGIN(0, 2);
10635 IEM_MC_LOCAL(uint64_t, u64Src);
10636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10639 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10640 IEM_MC_PUSH_U64(u64Src);
10641 IEM_MC_ADVANCE_RIP();
10642 IEM_MC_END();
10643 return VINF_SUCCESS;
10644
10645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10646 }
10647}
10648
10649
10650/** Opcode 0xff. */
10651FNIEMOP_DEF(iemOp_Grp5)
10652{
10653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10654 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10655 {
10656 case 0:
10657 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
10658 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
10659 case 1:
10660 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
10661 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
10662 case 2:
10663 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
10664 case 3:
10665 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
10666 case 4:
10667 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
10668 case 5:
10669 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
10670 case 6:
10671 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
10672 case 7:
10673 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
10674 return IEMOP_RAISE_INVALID_OPCODE();
10675 }
10676 AssertFailedReturn(VERR_IEM_IPE_3);
10677}
10678
10679
10680
10681const PFNIEMOP g_apfnOneByteMap[256] =
10682{
10683 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
10684 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
10685 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
10686 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
10687 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
10688 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
10689 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
10690 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
10691 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
10692 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
10693 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
10694 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
10695 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
10696 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
10697 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
10698 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
10699 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
10700 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
10701 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
10702 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
10703 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
10704 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
10705 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
10706 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
10707 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
10708 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
10709 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
10710 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
10711 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
10712 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
10713 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
10714 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
10715 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
10716 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
10717 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
10718 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
10719 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
10720 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
10721 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
10722 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
10723 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
10724 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
10725 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
10726 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
10727 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
10728 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
10729 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
10730 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
10731 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
10732 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
10733 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
10734 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
10735 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
10736 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
10737 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
10738 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
10739 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
10740 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
10741 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
10742 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
10743 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
10744 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
10745 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
10746 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
10747};
10748
10749
10750/** @} */
10751
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette