VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 65768

Last change on this file since 65768 was 65768, checked in by vboxsync, 8 years ago

IEM: naming fixes.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 365.5 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 65768 2017-02-13 15:02:54Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25
26/** @name One byte opcodes.
27 *
28 * @{
29 */
30
31/** Opcode 0x00. */
32FNIEMOP_DEF(iemOp_add_Eb_Gb)
33{
34 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
35 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
36}
37
38
39/** Opcode 0x01. */
40FNIEMOP_DEF(iemOp_add_Ev_Gv)
41{
42 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
43 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
44}
45
46
47/** Opcode 0x02. */
48FNIEMOP_DEF(iemOp_add_Gb_Eb)
49{
50 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
51 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
52}
53
54
55/** Opcode 0x03. */
56FNIEMOP_DEF(iemOp_add_Gv_Ev)
57{
58 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
59 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
60}
61
62
63/** Opcode 0x04. */
64FNIEMOP_DEF(iemOp_add_Al_Ib)
65{
66 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
67 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
68}
69
70
71/** Opcode 0x05. */
72FNIEMOP_DEF(iemOp_add_eAX_Iz)
73{
74 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
75 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
76}
77
78
79/** Opcode 0x06. */
80FNIEMOP_DEF(iemOp_push_ES)
81{
82 IEMOP_MNEMONIC(push_es, "push es");
83 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
84}
85
86
87/** Opcode 0x07. */
88FNIEMOP_DEF(iemOp_pop_ES)
89{
90 IEMOP_MNEMONIC(pop_es, "pop es");
91 IEMOP_HLP_NO_64BIT();
92 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
93 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
94}
95
96
97/** Opcode 0x08. */
98FNIEMOP_DEF(iemOp_or_Eb_Gb)
99{
100 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
101 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
102 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
103}
104
105
106/** Opcode 0x09. */
107FNIEMOP_DEF(iemOp_or_Ev_Gv)
108{
109 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
110 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
111 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
112}
113
114
115/** Opcode 0x0a. */
116FNIEMOP_DEF(iemOp_or_Gb_Eb)
117{
118 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
119 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
120 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
121}
122
123
124/** Opcode 0x0b. */
125FNIEMOP_DEF(iemOp_or_Gv_Ev)
126{
127 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
128 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
129 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
130}
131
132
133/** Opcode 0x0c. */
134FNIEMOP_DEF(iemOp_or_Al_Ib)
135{
136 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
137 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
138 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
139}
140
141
142/** Opcode 0x0d. */
143FNIEMOP_DEF(iemOp_or_eAX_Iz)
144{
145 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
146 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
147 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
148}
149
150
151/** Opcode 0x0e. */
152FNIEMOP_DEF(iemOp_push_CS)
153{
154 IEMOP_MNEMONIC(push_cs, "push cs");
155 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
156}
157
158
159/** Opcode 0x0f. */
160FNIEMOP_DEF(iemOp_2byteEscape)
161{
162#ifdef VBOX_STRICT
163 static bool s_fTested = false;
164 if (RT_LIKELY(s_fTested)) { /* likely */ }
165 else
166 {
167 s_fTested = true;
168 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
169 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
170 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
171 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
172 }
173#endif
174
175 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
176
177 /** @todo PUSH CS on 8086, undefined on 80186. */
178 IEMOP_HLP_MIN_286();
179 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
180}
181
182/** Opcode 0x10. */
183FNIEMOP_DEF(iemOp_adc_Eb_Gb)
184{
185 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
186 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
187}
188
189
190/** Opcode 0x11. */
191FNIEMOP_DEF(iemOp_adc_Ev_Gv)
192{
193 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
194 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
195}
196
197
198/** Opcode 0x12. */
199FNIEMOP_DEF(iemOp_adc_Gb_Eb)
200{
201 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
202 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
203}
204
205
206/** Opcode 0x13. */
207FNIEMOP_DEF(iemOp_adc_Gv_Ev)
208{
209 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
210 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
211}
212
213
214/** Opcode 0x14. */
215FNIEMOP_DEF(iemOp_adc_Al_Ib)
216{
217 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
218 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
219}
220
221
222/** Opcode 0x15. */
223FNIEMOP_DEF(iemOp_adc_eAX_Iz)
224{
225 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
226 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
227}
228
229
230/** Opcode 0x16. */
231FNIEMOP_DEF(iemOp_push_SS)
232{
233 IEMOP_MNEMONIC(push_ss, "push ss");
234 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
235}
236
237
238/** Opcode 0x17. */
239FNIEMOP_DEF(iemOp_pop_SS)
240{
241 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
243 IEMOP_HLP_NO_64BIT();
244 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
245}
246
247
248/** Opcode 0x18. */
249FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
250{
251 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
252 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
253}
254
255
256/** Opcode 0x19. */
257FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
258{
259 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
260 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
261}
262
263
264/** Opcode 0x1a. */
265FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
266{
267 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
268 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
269}
270
271
272/** Opcode 0x1b. */
273FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
274{
275 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
277}
278
279
280/** Opcode 0x1c. */
281FNIEMOP_DEF(iemOp_sbb_Al_Ib)
282{
283 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
284 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
285}
286
287
288/** Opcode 0x1d. */
289FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
290{
291 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
292 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
293}
294
295
296/** Opcode 0x1e. */
297FNIEMOP_DEF(iemOp_push_DS)
298{
299 IEMOP_MNEMONIC(push_ds, "push ds");
300 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
301}
302
303
304/** Opcode 0x1f. */
305FNIEMOP_DEF(iemOp_pop_DS)
306{
307 IEMOP_MNEMONIC(pop_ds, "pop ds");
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
309 IEMOP_HLP_NO_64BIT();
310 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
311}
312
313
314/** Opcode 0x20. */
315FNIEMOP_DEF(iemOp_and_Eb_Gb)
316{
317 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
318 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
319 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
320}
321
322
323/** Opcode 0x21. */
324FNIEMOP_DEF(iemOp_and_Ev_Gv)
325{
326 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
327 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
328 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
329}
330
331
332/** Opcode 0x22. */
333FNIEMOP_DEF(iemOp_and_Gb_Eb)
334{
335 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
336 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
337 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
338}
339
340
341/** Opcode 0x23. */
342FNIEMOP_DEF(iemOp_and_Gv_Ev)
343{
344 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
345 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
346 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
347}
348
349
350/** Opcode 0x24. */
351FNIEMOP_DEF(iemOp_and_Al_Ib)
352{
353 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
354 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
355 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
356}
357
358
359/** Opcode 0x25. */
360FNIEMOP_DEF(iemOp_and_eAX_Iz)
361{
362 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
363 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
364 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
365}
366
367
368/** Opcode 0x26. */
369FNIEMOP_DEF(iemOp_seg_ES)
370{
371 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
372 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
373 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
374
375 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
376 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
377}
378
379
380/** Opcode 0x27. */
381FNIEMOP_DEF(iemOp_daa)
382{
383 IEMOP_MNEMONIC(daa_AL, "daa AL");
384 IEMOP_HLP_NO_64BIT();
385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
386 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
387 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
388}
389
390
391/** Opcode 0x28. */
392FNIEMOP_DEF(iemOp_sub_Eb_Gb)
393{
394 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
395 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
396}
397
398
399/** Opcode 0x29. */
400FNIEMOP_DEF(iemOp_sub_Ev_Gv)
401{
402 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
403 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
404}
405
406
407/** Opcode 0x2a. */
408FNIEMOP_DEF(iemOp_sub_Gb_Eb)
409{
410 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
411 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
412}
413
414
415/** Opcode 0x2b. */
416FNIEMOP_DEF(iemOp_sub_Gv_Ev)
417{
418 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
419 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
420}
421
422
423/** Opcode 0x2c. */
424FNIEMOP_DEF(iemOp_sub_Al_Ib)
425{
426 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
427 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
428}
429
430
431/** Opcode 0x2d. */
432FNIEMOP_DEF(iemOp_sub_eAX_Iz)
433{
434 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
435 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
436}
437
438
439/** Opcode 0x2e. */
440FNIEMOP_DEF(iemOp_seg_CS)
441{
442 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
443 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
444 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
445
446 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
447 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
448}
449
450
451/** Opcode 0x2f. */
452FNIEMOP_DEF(iemOp_das)
453{
454 IEMOP_MNEMONIC(das_AL, "das AL");
455 IEMOP_HLP_NO_64BIT();
456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
457 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
458 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
459}
460
461
462/** Opcode 0x30. */
463FNIEMOP_DEF(iemOp_xor_Eb_Gb)
464{
465 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
466 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
467 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
468}
469
470
471/** Opcode 0x31. */
472FNIEMOP_DEF(iemOp_xor_Ev_Gv)
473{
474 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
475 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
476 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
477}
478
479
480/** Opcode 0x32. */
481FNIEMOP_DEF(iemOp_xor_Gb_Eb)
482{
483 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
485 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
486}
487
488
489/** Opcode 0x33. */
490FNIEMOP_DEF(iemOp_xor_Gv_Ev)
491{
492 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
493 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
494 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
495}
496
497
498/** Opcode 0x34. */
499FNIEMOP_DEF(iemOp_xor_Al_Ib)
500{
501 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
502 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
503 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
504}
505
506
507/** Opcode 0x35. */
508FNIEMOP_DEF(iemOp_xor_eAX_Iz)
509{
510 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
511 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
512 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
513}
514
515
516/** Opcode 0x36. */
517FNIEMOP_DEF(iemOp_seg_SS)
518{
519 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
520 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
521 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
522
523 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
524 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
525}
526
527
528/** Opcode 0x37. */
529FNIEMOP_STUB(iemOp_aaa);
530
531
532/** Opcode 0x38. */
533FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
534{
535 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
537}
538
539
540/** Opcode 0x39. */
541FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
542{
543 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
544 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
545}
546
547
548/** Opcode 0x3a. */
549FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
550{
551 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
552 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
553}
554
555
556/** Opcode 0x3b. */
557FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
558{
559 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
560 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
561}
562
563
564/** Opcode 0x3c. */
565FNIEMOP_DEF(iemOp_cmp_Al_Ib)
566{
567 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
568 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
569}
570
571
572/** Opcode 0x3d. */
573FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
574{
575 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
577}
578
579
580/** Opcode 0x3e. */
581FNIEMOP_DEF(iemOp_seg_DS)
582{
583 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
584 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
585 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
586
587 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
588 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
589}
590
591
592/** Opcode 0x3f. */
593FNIEMOP_STUB(iemOp_aas);
594
595/**
596 * Common 'inc/dec/not/neg register' helper.
597 */
598FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
599{
600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
601 switch (pVCpu->iem.s.enmEffOpSize)
602 {
603 case IEMMODE_16BIT:
604 IEM_MC_BEGIN(2, 0);
605 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
606 IEM_MC_ARG(uint32_t *, pEFlags, 1);
607 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
608 IEM_MC_REF_EFLAGS(pEFlags);
609 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 return VINF_SUCCESS;
613
614 case IEMMODE_32BIT:
615 IEM_MC_BEGIN(2, 0);
616 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
617 IEM_MC_ARG(uint32_t *, pEFlags, 1);
618 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
619 IEM_MC_REF_EFLAGS(pEFlags);
620 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
621 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
622 IEM_MC_ADVANCE_RIP();
623 IEM_MC_END();
624 return VINF_SUCCESS;
625
626 case IEMMODE_64BIT:
627 IEM_MC_BEGIN(2, 0);
628 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
629 IEM_MC_ARG(uint32_t *, pEFlags, 1);
630 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
631 IEM_MC_REF_EFLAGS(pEFlags);
632 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
633 IEM_MC_ADVANCE_RIP();
634 IEM_MC_END();
635 return VINF_SUCCESS;
636 }
637 return VINF_SUCCESS;
638}
639
640
641/** Opcode 0x40. */
642FNIEMOP_DEF(iemOp_inc_eAX)
643{
644 /*
645 * This is a REX prefix in 64-bit mode.
646 */
647 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
648 {
649 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
650 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
651
652 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
653 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
654 }
655
656 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
657 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
658}
659
660
661/** Opcode 0x41. */
662FNIEMOP_DEF(iemOp_inc_eCX)
663{
664 /*
665 * This is a REX prefix in 64-bit mode.
666 */
667 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
668 {
669 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
670 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
671 pVCpu->iem.s.uRexB = 1 << 3;
672
673 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
674 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
675 }
676
677 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
678 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
679}
680
681
682/** Opcode 0x42. */
683FNIEMOP_DEF(iemOp_inc_eDX)
684{
685 /*
686 * This is a REX prefix in 64-bit mode.
687 */
688 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
689 {
690 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
691 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
692 pVCpu->iem.s.uRexIndex = 1 << 3;
693
694 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
695 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
696 }
697
698 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
699 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
700}
701
702
703
704/** Opcode 0x43. */
705FNIEMOP_DEF(iemOp_inc_eBX)
706{
707 /*
708 * This is a REX prefix in 64-bit mode.
709 */
710 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
711 {
712 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
713 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
714 pVCpu->iem.s.uRexB = 1 << 3;
715 pVCpu->iem.s.uRexIndex = 1 << 3;
716
717 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
718 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
719 }
720
721 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
722 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
723}
724
725
726/** Opcode 0x44. */
727FNIEMOP_DEF(iemOp_inc_eSP)
728{
729 /*
730 * This is a REX prefix in 64-bit mode.
731 */
732 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
733 {
734 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
735 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
736 pVCpu->iem.s.uRexReg = 1 << 3;
737
738 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
739 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
740 }
741
742 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
743 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
744}
745
746
747/** Opcode 0x45. */
748FNIEMOP_DEF(iemOp_inc_eBP)
749{
750 /*
751 * This is a REX prefix in 64-bit mode.
752 */
753 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
754 {
755 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
756 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
757 pVCpu->iem.s.uRexReg = 1 << 3;
758 pVCpu->iem.s.uRexB = 1 << 3;
759
760 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
761 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
762 }
763
764 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
765 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
766}
767
768
769/** Opcode 0x46. */
770FNIEMOP_DEF(iemOp_inc_eSI)
771{
772 /*
773 * This is a REX prefix in 64-bit mode.
774 */
775 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
776 {
777 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
778 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
779 pVCpu->iem.s.uRexReg = 1 << 3;
780 pVCpu->iem.s.uRexIndex = 1 << 3;
781
782 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
783 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
784 }
785
786 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
787 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
788}
789
790
791/** Opcode 0x47. */
792FNIEMOP_DEF(iemOp_inc_eDI)
793{
794 /*
795 * This is a REX prefix in 64-bit mode.
796 */
797 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
798 {
799 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
800 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
801 pVCpu->iem.s.uRexReg = 1 << 3;
802 pVCpu->iem.s.uRexB = 1 << 3;
803 pVCpu->iem.s.uRexIndex = 1 << 3;
804
805 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
806 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
807 }
808
809 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
810 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
811}
812
813
814/** Opcode 0x48. */
815FNIEMOP_DEF(iemOp_dec_eAX)
816{
817 /*
818 * This is a REX prefix in 64-bit mode.
819 */
820 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
821 {
822 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
823 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
824 iemRecalEffOpSize(pVCpu);
825
826 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
827 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
828 }
829
830 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
831 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
832}
833
834
835/** Opcode 0x49. */
836FNIEMOP_DEF(iemOp_dec_eCX)
837{
838 /*
839 * This is a REX prefix in 64-bit mode.
840 */
841 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
842 {
843 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
844 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
845 pVCpu->iem.s.uRexB = 1 << 3;
846 iemRecalEffOpSize(pVCpu);
847
848 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
849 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
850 }
851
852 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
853 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
854}
855
856
857/** Opcode 0x4a. */
858FNIEMOP_DEF(iemOp_dec_eDX)
859{
860 /*
861 * This is a REX prefix in 64-bit mode.
862 */
863 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
864 {
865 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
866 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
867 pVCpu->iem.s.uRexIndex = 1 << 3;
868 iemRecalEffOpSize(pVCpu);
869
870 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
871 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
872 }
873
874 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
875 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
876}
877
878
879/** Opcode 0x4b. */
880FNIEMOP_DEF(iemOp_dec_eBX)
881{
882 /*
883 * This is a REX prefix in 64-bit mode.
884 */
885 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
886 {
887 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
888 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
889 pVCpu->iem.s.uRexB = 1 << 3;
890 pVCpu->iem.s.uRexIndex = 1 << 3;
891 iemRecalEffOpSize(pVCpu);
892
893 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
894 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
895 }
896
897 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
898 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
899}
900
901
902/** Opcode 0x4c. */
903FNIEMOP_DEF(iemOp_dec_eSP)
904{
905 /*
906 * This is a REX prefix in 64-bit mode.
907 */
908 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
909 {
910 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
911 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
912 pVCpu->iem.s.uRexReg = 1 << 3;
913 iemRecalEffOpSize(pVCpu);
914
915 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
916 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
917 }
918
919 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
920 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
921}
922
923
924/** Opcode 0x4d. */
925FNIEMOP_DEF(iemOp_dec_eBP)
926{
927 /*
928 * This is a REX prefix in 64-bit mode.
929 */
930 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
931 {
932 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
933 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
934 pVCpu->iem.s.uRexReg = 1 << 3;
935 pVCpu->iem.s.uRexB = 1 << 3;
936 iemRecalEffOpSize(pVCpu);
937
938 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
939 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
940 }
941
942 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
943 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
944}
945
946
947/** Opcode 0x4e. */
948FNIEMOP_DEF(iemOp_dec_eSI)
949{
950 /*
951 * This is a REX prefix in 64-bit mode.
952 */
953 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
954 {
955 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
956 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
957 pVCpu->iem.s.uRexReg = 1 << 3;
958 pVCpu->iem.s.uRexIndex = 1 << 3;
959 iemRecalEffOpSize(pVCpu);
960
961 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
962 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
963 }
964
965 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
966 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
967}
968
969
970/** Opcode 0x4f. */
971FNIEMOP_DEF(iemOp_dec_eDI)
972{
973 /*
974 * This is a REX prefix in 64-bit mode.
975 */
976 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
977 {
978 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
979 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
980 pVCpu->iem.s.uRexReg = 1 << 3;
981 pVCpu->iem.s.uRexB = 1 << 3;
982 pVCpu->iem.s.uRexIndex = 1 << 3;
983 iemRecalEffOpSize(pVCpu);
984
985 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
986 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
987 }
988
989 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
990 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
991}
992
993
994/**
995 * Common 'push register' helper.
996 */
997FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
998{
999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1000 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1001 {
1002 iReg |= pVCpu->iem.s.uRexB;
1003 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1004 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1005 }
1006
1007 switch (pVCpu->iem.s.enmEffOpSize)
1008 {
1009 case IEMMODE_16BIT:
1010 IEM_MC_BEGIN(0, 1);
1011 IEM_MC_LOCAL(uint16_t, u16Value);
1012 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1013 IEM_MC_PUSH_U16(u16Value);
1014 IEM_MC_ADVANCE_RIP();
1015 IEM_MC_END();
1016 break;
1017
1018 case IEMMODE_32BIT:
1019 IEM_MC_BEGIN(0, 1);
1020 IEM_MC_LOCAL(uint32_t, u32Value);
1021 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1022 IEM_MC_PUSH_U32(u32Value);
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 break;
1026
1027 case IEMMODE_64BIT:
1028 IEM_MC_BEGIN(0, 1);
1029 IEM_MC_LOCAL(uint64_t, u64Value);
1030 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1031 IEM_MC_PUSH_U64(u64Value);
1032 IEM_MC_ADVANCE_RIP();
1033 IEM_MC_END();
1034 break;
1035 }
1036
1037 return VINF_SUCCESS;
1038}
1039
1040
1041/** Opcode 0x50. */
1042FNIEMOP_DEF(iemOp_push_eAX)
1043{
1044 IEMOP_MNEMONIC(push_rAX, "push rAX");
1045 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1046}
1047
1048
1049/** Opcode 0x51. */
1050FNIEMOP_DEF(iemOp_push_eCX)
1051{
1052 IEMOP_MNEMONIC(push_rCX, "push rCX");
1053 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1054}
1055
1056
1057/** Opcode 0x52. */
1058FNIEMOP_DEF(iemOp_push_eDX)
1059{
1060 IEMOP_MNEMONIC(push_rDX, "push rDX");
1061 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1062}
1063
1064
1065/** Opcode 0x53. */
1066FNIEMOP_DEF(iemOp_push_eBX)
1067{
1068 IEMOP_MNEMONIC(push_rBX, "push rBX");
1069 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1070}
1071
1072
1073/** Opcode 0x54. */
1074FNIEMOP_DEF(iemOp_push_eSP)
1075{
1076 IEMOP_MNEMONIC(push_rSP, "push rSP");
1077 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1078 {
1079 IEM_MC_BEGIN(0, 1);
1080 IEM_MC_LOCAL(uint16_t, u16Value);
1081 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1082 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1083 IEM_MC_PUSH_U16(u16Value);
1084 IEM_MC_ADVANCE_RIP();
1085 IEM_MC_END();
1086 }
1087 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1088}
1089
1090
1091/** Opcode 0x55. */
1092FNIEMOP_DEF(iemOp_push_eBP)
1093{
1094 IEMOP_MNEMONIC(push_rBP, "push rBP");
1095 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1096}
1097
1098
1099/** Opcode 0x56. */
1100FNIEMOP_DEF(iemOp_push_eSI)
1101{
1102 IEMOP_MNEMONIC(push_rSI, "push rSI");
1103 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1104}
1105
1106
1107/** Opcode 0x57. */
1108FNIEMOP_DEF(iemOp_push_eDI)
1109{
1110 IEMOP_MNEMONIC(push_rDI, "push rDI");
1111 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1112}
1113
1114
1115/**
1116 * Common 'pop register' helper.
1117 */
1118FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1119{
1120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1121 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1122 {
1123 iReg |= pVCpu->iem.s.uRexB;
1124 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1125 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1126 }
1127
1128 switch (pVCpu->iem.s.enmEffOpSize)
1129 {
1130 case IEMMODE_16BIT:
1131 IEM_MC_BEGIN(0, 1);
1132 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1133 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1134 IEM_MC_POP_U16(pu16Dst);
1135 IEM_MC_ADVANCE_RIP();
1136 IEM_MC_END();
1137 break;
1138
1139 case IEMMODE_32BIT:
1140 IEM_MC_BEGIN(0, 1);
1141 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1142 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1143 IEM_MC_POP_U32(pu32Dst);
1144 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1145 IEM_MC_ADVANCE_RIP();
1146 IEM_MC_END();
1147 break;
1148
1149 case IEMMODE_64BIT:
1150 IEM_MC_BEGIN(0, 1);
1151 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1152 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1153 IEM_MC_POP_U64(pu64Dst);
1154 IEM_MC_ADVANCE_RIP();
1155 IEM_MC_END();
1156 break;
1157 }
1158
1159 return VINF_SUCCESS;
1160}
1161
1162
1163/** Opcode 0x58. */
1164FNIEMOP_DEF(iemOp_pop_eAX)
1165{
1166 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1167 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1168}
1169
1170
1171/** Opcode 0x59. */
1172FNIEMOP_DEF(iemOp_pop_eCX)
1173{
1174 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1175 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1176}
1177
1178
1179/** Opcode 0x5a. */
1180FNIEMOP_DEF(iemOp_pop_eDX)
1181{
1182 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1183 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1184}
1185
1186
1187/** Opcode 0x5b. */
1188FNIEMOP_DEF(iemOp_pop_eBX)
1189{
1190 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1191 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1192}
1193
1194
1195/** Opcode 0x5c. */
1196FNIEMOP_DEF(iemOp_pop_eSP)
1197{
1198 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1199 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1200 {
1201 if (pVCpu->iem.s.uRexB)
1202 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1203 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1204 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1205 }
1206
1207 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1208 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1209 /** @todo add testcase for this instruction. */
1210 switch (pVCpu->iem.s.enmEffOpSize)
1211 {
1212 case IEMMODE_16BIT:
1213 IEM_MC_BEGIN(0, 1);
1214 IEM_MC_LOCAL(uint16_t, u16Dst);
1215 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1216 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1217 IEM_MC_ADVANCE_RIP();
1218 IEM_MC_END();
1219 break;
1220
1221 case IEMMODE_32BIT:
1222 IEM_MC_BEGIN(0, 1);
1223 IEM_MC_LOCAL(uint32_t, u32Dst);
1224 IEM_MC_POP_U32(&u32Dst);
1225 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1226 IEM_MC_ADVANCE_RIP();
1227 IEM_MC_END();
1228 break;
1229
1230 case IEMMODE_64BIT:
1231 IEM_MC_BEGIN(0, 1);
1232 IEM_MC_LOCAL(uint64_t, u64Dst);
1233 IEM_MC_POP_U64(&u64Dst);
1234 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1235 IEM_MC_ADVANCE_RIP();
1236 IEM_MC_END();
1237 break;
1238 }
1239
1240 return VINF_SUCCESS;
1241}
1242
1243
1244/** Opcode 0x5d. */
1245FNIEMOP_DEF(iemOp_pop_eBP)
1246{
1247 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1248 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1249}
1250
1251
1252/** Opcode 0x5e. */
1253FNIEMOP_DEF(iemOp_pop_eSI)
1254{
1255 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1256 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1257}
1258
1259
1260/** Opcode 0x5f. */
1261FNIEMOP_DEF(iemOp_pop_eDI)
1262{
1263 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1264 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1265}
1266
1267
1268/** Opcode 0x60. */
1269FNIEMOP_DEF(iemOp_pusha)
1270{
1271 IEMOP_MNEMONIC(pusha, "pusha");
1272 IEMOP_HLP_MIN_186();
1273 IEMOP_HLP_NO_64BIT();
1274 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1275 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1276 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1277 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1278}
1279
1280
1281/** Opcode 0x61. */
1282FNIEMOP_DEF(iemOp_popa__mvex)
1283{
1284 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1285 {
1286 IEMOP_MNEMONIC(popa, "popa");
1287 IEMOP_HLP_MIN_186();
1288 IEMOP_HLP_NO_64BIT();
1289 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1290 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1291 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1292 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1293 }
1294 IEMOP_MNEMONIC(mvex, "mvex");
1295 Log(("mvex prefix is not supported!\n"));
1296 return IEMOP_RAISE_INVALID_OPCODE();
1297}
1298
1299
1300/** Opcode 0x62. */
1301FNIEMOP_STUB(iemOp_bound_Gv_Ma__evex);
1302// IEMOP_HLP_MIN_186();
1303
1304
1305/** Opcode 0x63 - non-64-bit modes. */
1306FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1307{
1308 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1309 IEMOP_HLP_MIN_286();
1310 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1312
1313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1314 {
1315 /* Register */
1316 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1317 IEM_MC_BEGIN(3, 0);
1318 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1319 IEM_MC_ARG(uint16_t, u16Src, 1);
1320 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1321
1322 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1323 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1324 IEM_MC_REF_EFLAGS(pEFlags);
1325 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1326
1327 IEM_MC_ADVANCE_RIP();
1328 IEM_MC_END();
1329 }
1330 else
1331 {
1332 /* Memory */
1333 IEM_MC_BEGIN(3, 2);
1334 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1335 IEM_MC_ARG(uint16_t, u16Src, 1);
1336 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1338
1339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1340 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1341 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1342 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1343 IEM_MC_FETCH_EFLAGS(EFlags);
1344 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1345
1346 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
1347 IEM_MC_COMMIT_EFLAGS(EFlags);
1348 IEM_MC_ADVANCE_RIP();
1349 IEM_MC_END();
1350 }
1351 return VINF_SUCCESS;
1352
1353}
1354
1355
1356/** Opcode 0x63.
1357 * @note This is a weird one. It works like a regular move instruction if
1358 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
1359 * @todo This definitely needs a testcase to verify the odd cases. */
1360FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
1361{
1362 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
1363
1364 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
1365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1366
1367 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1368 {
1369 /*
1370 * Register to register.
1371 */
1372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1373 IEM_MC_BEGIN(0, 1);
1374 IEM_MC_LOCAL(uint64_t, u64Value);
1375 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1376 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1377 IEM_MC_ADVANCE_RIP();
1378 IEM_MC_END();
1379 }
1380 else
1381 {
1382 /*
1383 * We're loading a register from memory.
1384 */
1385 IEM_MC_BEGIN(0, 2);
1386 IEM_MC_LOCAL(uint64_t, u64Value);
1387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1390 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1391 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1392 IEM_MC_ADVANCE_RIP();
1393 IEM_MC_END();
1394 }
1395 return VINF_SUCCESS;
1396}
1397
1398
1399/** Opcode 0x64. */
1400FNIEMOP_DEF(iemOp_seg_FS)
1401{
1402 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
1403 IEMOP_HLP_MIN_386();
1404
1405 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
1406 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
1407
1408 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1409 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1410}
1411
1412
1413/** Opcode 0x65. */
1414FNIEMOP_DEF(iemOp_seg_GS)
1415{
1416 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
1417 IEMOP_HLP_MIN_386();
1418
1419 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
1420 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
1421
1422 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1423 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1424}
1425
1426
1427/** Opcode 0x66. */
1428FNIEMOP_DEF(iemOp_op_size)
1429{
1430 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
1431 IEMOP_HLP_MIN_386();
1432
1433 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
1434 iemRecalEffOpSize(pVCpu);
1435
1436 /* For the 4 entry opcode tables, the operand prefix doesn't not count
1437 when REPZ or REPNZ are present. */
1438 if (pVCpu->iem.s.idxPrefix == 0)
1439 pVCpu->iem.s.idxPrefix = 1;
1440
1441 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1442 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1443}
1444
1445
1446/** Opcode 0x67. */
1447FNIEMOP_DEF(iemOp_addr_size)
1448{
1449 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
1450 IEMOP_HLP_MIN_386();
1451
1452 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
1453 switch (pVCpu->iem.s.enmDefAddrMode)
1454 {
1455 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1456 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
1457 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1458 default: AssertFailed();
1459 }
1460
1461 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1462 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1463}
1464
1465
1466/** Opcode 0x68. */
1467FNIEMOP_DEF(iemOp_push_Iz)
1468{
1469 IEMOP_MNEMONIC(push_Iz, "push Iz");
1470 IEMOP_HLP_MIN_186();
1471 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1472 switch (pVCpu->iem.s.enmEffOpSize)
1473 {
1474 case IEMMODE_16BIT:
1475 {
1476 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1478 IEM_MC_BEGIN(0,0);
1479 IEM_MC_PUSH_U16(u16Imm);
1480 IEM_MC_ADVANCE_RIP();
1481 IEM_MC_END();
1482 return VINF_SUCCESS;
1483 }
1484
1485 case IEMMODE_32BIT:
1486 {
1487 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1489 IEM_MC_BEGIN(0,0);
1490 IEM_MC_PUSH_U32(u32Imm);
1491 IEM_MC_ADVANCE_RIP();
1492 IEM_MC_END();
1493 return VINF_SUCCESS;
1494 }
1495
1496 case IEMMODE_64BIT:
1497 {
1498 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_BEGIN(0,0);
1501 IEM_MC_PUSH_U64(u64Imm);
1502 IEM_MC_ADVANCE_RIP();
1503 IEM_MC_END();
1504 return VINF_SUCCESS;
1505 }
1506
1507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1508 }
1509}
1510
1511
1512/** Opcode 0x69. */
1513FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
1514{
1515 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
1516 IEMOP_HLP_MIN_186();
1517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1518 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1519
1520 switch (pVCpu->iem.s.enmEffOpSize)
1521 {
1522 case IEMMODE_16BIT:
1523 {
1524 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1525 {
1526 /* register operand */
1527 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1529
1530 IEM_MC_BEGIN(3, 1);
1531 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1532 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
1533 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1534 IEM_MC_LOCAL(uint16_t, u16Tmp);
1535
1536 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1537 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1538 IEM_MC_REF_EFLAGS(pEFlags);
1539 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1540 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1541
1542 IEM_MC_ADVANCE_RIP();
1543 IEM_MC_END();
1544 }
1545 else
1546 {
1547 /* memory operand */
1548 IEM_MC_BEGIN(3, 2);
1549 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1550 IEM_MC_ARG(uint16_t, u16Src, 1);
1551 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1552 IEM_MC_LOCAL(uint16_t, u16Tmp);
1553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1554
1555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
1556 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1557 IEM_MC_ASSIGN(u16Src, u16Imm);
1558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1559 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1560 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1561 IEM_MC_REF_EFLAGS(pEFlags);
1562 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1563 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1564
1565 IEM_MC_ADVANCE_RIP();
1566 IEM_MC_END();
1567 }
1568 return VINF_SUCCESS;
1569 }
1570
1571 case IEMMODE_32BIT:
1572 {
1573 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1574 {
1575 /* register operand */
1576 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1578
1579 IEM_MC_BEGIN(3, 1);
1580 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1581 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
1582 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1583 IEM_MC_LOCAL(uint32_t, u32Tmp);
1584
1585 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1586 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1587 IEM_MC_REF_EFLAGS(pEFlags);
1588 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1589 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1590
1591 IEM_MC_ADVANCE_RIP();
1592 IEM_MC_END();
1593 }
1594 else
1595 {
1596 /* memory operand */
1597 IEM_MC_BEGIN(3, 2);
1598 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1599 IEM_MC_ARG(uint32_t, u32Src, 1);
1600 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1601 IEM_MC_LOCAL(uint32_t, u32Tmp);
1602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1603
1604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
1605 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1606 IEM_MC_ASSIGN(u32Src, u32Imm);
1607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1608 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1609 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1610 IEM_MC_REF_EFLAGS(pEFlags);
1611 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1612 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1613
1614 IEM_MC_ADVANCE_RIP();
1615 IEM_MC_END();
1616 }
1617 return VINF_SUCCESS;
1618 }
1619
1620 case IEMMODE_64BIT:
1621 {
1622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1623 {
1624 /* register operand */
1625 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1627
1628 IEM_MC_BEGIN(3, 1);
1629 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1630 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
1631 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1632 IEM_MC_LOCAL(uint64_t, u64Tmp);
1633
1634 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1635 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1636 IEM_MC_REF_EFLAGS(pEFlags);
1637 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1638 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1639
1640 IEM_MC_ADVANCE_RIP();
1641 IEM_MC_END();
1642 }
1643 else
1644 {
1645 /* memory operand */
1646 IEM_MC_BEGIN(3, 2);
1647 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1648 IEM_MC_ARG(uint64_t, u64Src, 1);
1649 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1650 IEM_MC_LOCAL(uint64_t, u64Tmp);
1651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1652
1653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
1654 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1655 IEM_MC_ASSIGN(u64Src, u64Imm);
1656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1657 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1658 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1659 IEM_MC_REF_EFLAGS(pEFlags);
1660 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1661 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1662
1663 IEM_MC_ADVANCE_RIP();
1664 IEM_MC_END();
1665 }
1666 return VINF_SUCCESS;
1667 }
1668 }
1669 AssertFailedReturn(VERR_IEM_IPE_9);
1670}
1671
1672
1673/** Opcode 0x6a. */
1674FNIEMOP_DEF(iemOp_push_Ib)
1675{
1676 IEMOP_MNEMONIC(push_Ib, "push Ib");
1677 IEMOP_HLP_MIN_186();
1678 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
1679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1680 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1681
1682 IEM_MC_BEGIN(0,0);
1683 switch (pVCpu->iem.s.enmEffOpSize)
1684 {
1685 case IEMMODE_16BIT:
1686 IEM_MC_PUSH_U16(i8Imm);
1687 break;
1688 case IEMMODE_32BIT:
1689 IEM_MC_PUSH_U32(i8Imm);
1690 break;
1691 case IEMMODE_64BIT:
1692 IEM_MC_PUSH_U64(i8Imm);
1693 break;
1694 }
1695 IEM_MC_ADVANCE_RIP();
1696 IEM_MC_END();
1697 return VINF_SUCCESS;
1698}
1699
1700
1701/** Opcode 0x6b. */
1702FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
1703{
1704 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
1705 IEMOP_HLP_MIN_186();
1706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1707 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1708
1709 switch (pVCpu->iem.s.enmEffOpSize)
1710 {
1711 case IEMMODE_16BIT:
1712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1713 {
1714 /* register operand */
1715 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1717
1718 IEM_MC_BEGIN(3, 1);
1719 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1720 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
1721 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1722 IEM_MC_LOCAL(uint16_t, u16Tmp);
1723
1724 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1725 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1726 IEM_MC_REF_EFLAGS(pEFlags);
1727 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1728 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1729
1730 IEM_MC_ADVANCE_RIP();
1731 IEM_MC_END();
1732 }
1733 else
1734 {
1735 /* memory operand */
1736 IEM_MC_BEGIN(3, 2);
1737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1738 IEM_MC_ARG(uint16_t, u16Src, 1);
1739 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1740 IEM_MC_LOCAL(uint16_t, u16Tmp);
1741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1742
1743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1744 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
1745 IEM_MC_ASSIGN(u16Src, u16Imm);
1746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1747 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1748 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1749 IEM_MC_REF_EFLAGS(pEFlags);
1750 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1751 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1752
1753 IEM_MC_ADVANCE_RIP();
1754 IEM_MC_END();
1755 }
1756 return VINF_SUCCESS;
1757
1758 case IEMMODE_32BIT:
1759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1760 {
1761 /* register operand */
1762 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1764
1765 IEM_MC_BEGIN(3, 1);
1766 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1767 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
1768 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1769 IEM_MC_LOCAL(uint32_t, u32Tmp);
1770
1771 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1772 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1773 IEM_MC_REF_EFLAGS(pEFlags);
1774 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1775 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1776
1777 IEM_MC_ADVANCE_RIP();
1778 IEM_MC_END();
1779 }
1780 else
1781 {
1782 /* memory operand */
1783 IEM_MC_BEGIN(3, 2);
1784 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1785 IEM_MC_ARG(uint32_t, u32Src, 1);
1786 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1787 IEM_MC_LOCAL(uint32_t, u32Tmp);
1788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1789
1790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1791 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
1792 IEM_MC_ASSIGN(u32Src, u32Imm);
1793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1794 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1795 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1796 IEM_MC_REF_EFLAGS(pEFlags);
1797 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1798 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1799
1800 IEM_MC_ADVANCE_RIP();
1801 IEM_MC_END();
1802 }
1803 return VINF_SUCCESS;
1804
1805 case IEMMODE_64BIT:
1806 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1807 {
1808 /* register operand */
1809 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1811
1812 IEM_MC_BEGIN(3, 1);
1813 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1814 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
1815 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1816 IEM_MC_LOCAL(uint64_t, u64Tmp);
1817
1818 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1819 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1820 IEM_MC_REF_EFLAGS(pEFlags);
1821 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1822 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1823
1824 IEM_MC_ADVANCE_RIP();
1825 IEM_MC_END();
1826 }
1827 else
1828 {
1829 /* memory operand */
1830 IEM_MC_BEGIN(3, 2);
1831 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1832 IEM_MC_ARG(uint64_t, u64Src, 1);
1833 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1834 IEM_MC_LOCAL(uint64_t, u64Tmp);
1835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1836
1837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1838 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
1839 IEM_MC_ASSIGN(u64Src, u64Imm);
1840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1841 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1842 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1843 IEM_MC_REF_EFLAGS(pEFlags);
1844 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1845 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1846
1847 IEM_MC_ADVANCE_RIP();
1848 IEM_MC_END();
1849 }
1850 return VINF_SUCCESS;
1851 }
1852 AssertFailedReturn(VERR_IEM_IPE_8);
1853}
1854
1855
1856/** Opcode 0x6c. */
1857FNIEMOP_DEF(iemOp_insb_Yb_DX)
1858{
1859 IEMOP_HLP_MIN_186();
1860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1861 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
1862 {
1863 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
1864 switch (pVCpu->iem.s.enmEffAddrMode)
1865 {
1866 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
1867 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
1868 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
1869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1870 }
1871 }
1872 else
1873 {
1874 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
1875 switch (pVCpu->iem.s.enmEffAddrMode)
1876 {
1877 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
1878 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
1879 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
1880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1881 }
1882 }
1883}
1884
1885
1886/** Opcode 0x6d. */
1887FNIEMOP_DEF(iemOp_inswd_Yv_DX)
1888{
1889 IEMOP_HLP_MIN_186();
1890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1891 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1892 {
1893 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
1894 switch (pVCpu->iem.s.enmEffOpSize)
1895 {
1896 case IEMMODE_16BIT:
1897 switch (pVCpu->iem.s.enmEffAddrMode)
1898 {
1899 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
1900 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
1901 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
1902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1903 }
1904 break;
1905 case IEMMODE_64BIT:
1906 case IEMMODE_32BIT:
1907 switch (pVCpu->iem.s.enmEffAddrMode)
1908 {
1909 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
1910 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
1911 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
1912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1913 }
1914 break;
1915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1916 }
1917 }
1918 else
1919 {
1920 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
1921 switch (pVCpu->iem.s.enmEffOpSize)
1922 {
1923 case IEMMODE_16BIT:
1924 switch (pVCpu->iem.s.enmEffAddrMode)
1925 {
1926 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
1927 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
1928 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
1929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1930 }
1931 break;
1932 case IEMMODE_64BIT:
1933 case IEMMODE_32BIT:
1934 switch (pVCpu->iem.s.enmEffAddrMode)
1935 {
1936 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
1937 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
1938 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
1939 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1940 }
1941 break;
1942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1943 }
1944 }
1945}
1946
1947
1948/** Opcode 0x6e. */
1949FNIEMOP_DEF(iemOp_outsb_Yb_DX)
1950{
1951 IEMOP_HLP_MIN_186();
1952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1953 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
1954 {
1955 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
1956 switch (pVCpu->iem.s.enmEffAddrMode)
1957 {
1958 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
1959 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
1960 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
1961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1962 }
1963 }
1964 else
1965 {
1966 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
1967 switch (pVCpu->iem.s.enmEffAddrMode)
1968 {
1969 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
1970 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
1971 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
1972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1973 }
1974 }
1975}
1976
1977
1978/** Opcode 0x6f. */
1979FNIEMOP_DEF(iemOp_outswd_Yv_DX)
1980{
1981 IEMOP_HLP_MIN_186();
1982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1983 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1984 {
1985 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
1986 switch (pVCpu->iem.s.enmEffOpSize)
1987 {
1988 case IEMMODE_16BIT:
1989 switch (pVCpu->iem.s.enmEffAddrMode)
1990 {
1991 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
1992 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
1993 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
1994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1995 }
1996 break;
1997 case IEMMODE_64BIT:
1998 case IEMMODE_32BIT:
1999 switch (pVCpu->iem.s.enmEffAddrMode)
2000 {
2001 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2002 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2003 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2004 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2005 }
2006 break;
2007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2008 }
2009 }
2010 else
2011 {
2012 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2013 switch (pVCpu->iem.s.enmEffOpSize)
2014 {
2015 case IEMMODE_16BIT:
2016 switch (pVCpu->iem.s.enmEffAddrMode)
2017 {
2018 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2019 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2020 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2022 }
2023 break;
2024 case IEMMODE_64BIT:
2025 case IEMMODE_32BIT:
2026 switch (pVCpu->iem.s.enmEffAddrMode)
2027 {
2028 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2029 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2030 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2032 }
2033 break;
2034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2035 }
2036 }
2037}
2038
2039
2040/** Opcode 0x70. */
2041FNIEMOP_DEF(iemOp_jo_Jb)
2042{
2043 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2044 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2047
2048 IEM_MC_BEGIN(0, 0);
2049 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2050 IEM_MC_REL_JMP_S8(i8Imm);
2051 } IEM_MC_ELSE() {
2052 IEM_MC_ADVANCE_RIP();
2053 } IEM_MC_ENDIF();
2054 IEM_MC_END();
2055 return VINF_SUCCESS;
2056}
2057
2058
2059/** Opcode 0x71. */
2060FNIEMOP_DEF(iemOp_jno_Jb)
2061{
2062 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2063 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2065 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2066
2067 IEM_MC_BEGIN(0, 0);
2068 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2069 IEM_MC_ADVANCE_RIP();
2070 } IEM_MC_ELSE() {
2071 IEM_MC_REL_JMP_S8(i8Imm);
2072 } IEM_MC_ENDIF();
2073 IEM_MC_END();
2074 return VINF_SUCCESS;
2075}
2076
2077/** Opcode 0x72. */
2078FNIEMOP_DEF(iemOp_jc_Jb)
2079{
2080 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2081 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2083 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2084
2085 IEM_MC_BEGIN(0, 0);
2086 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2087 IEM_MC_REL_JMP_S8(i8Imm);
2088 } IEM_MC_ELSE() {
2089 IEM_MC_ADVANCE_RIP();
2090 } IEM_MC_ENDIF();
2091 IEM_MC_END();
2092 return VINF_SUCCESS;
2093}
2094
2095
2096/** Opcode 0x73. */
2097FNIEMOP_DEF(iemOp_jnc_Jb)
2098{
2099 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2100 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2102 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2103
2104 IEM_MC_BEGIN(0, 0);
2105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2106 IEM_MC_ADVANCE_RIP();
2107 } IEM_MC_ELSE() {
2108 IEM_MC_REL_JMP_S8(i8Imm);
2109 } IEM_MC_ENDIF();
2110 IEM_MC_END();
2111 return VINF_SUCCESS;
2112}
2113
2114
2115/** Opcode 0x74. */
2116FNIEMOP_DEF(iemOp_je_Jb)
2117{
2118 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2119 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2121 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2122
2123 IEM_MC_BEGIN(0, 0);
2124 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2125 IEM_MC_REL_JMP_S8(i8Imm);
2126 } IEM_MC_ELSE() {
2127 IEM_MC_ADVANCE_RIP();
2128 } IEM_MC_ENDIF();
2129 IEM_MC_END();
2130 return VINF_SUCCESS;
2131}
2132
2133
2134/** Opcode 0x75. */
2135FNIEMOP_DEF(iemOp_jne_Jb)
2136{
2137 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2138 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2140 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2141
2142 IEM_MC_BEGIN(0, 0);
2143 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2144 IEM_MC_ADVANCE_RIP();
2145 } IEM_MC_ELSE() {
2146 IEM_MC_REL_JMP_S8(i8Imm);
2147 } IEM_MC_ENDIF();
2148 IEM_MC_END();
2149 return VINF_SUCCESS;
2150}
2151
2152
2153/** Opcode 0x76. */
2154FNIEMOP_DEF(iemOp_jbe_Jb)
2155{
2156 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2157 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2159 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2160
2161 IEM_MC_BEGIN(0, 0);
2162 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2163 IEM_MC_REL_JMP_S8(i8Imm);
2164 } IEM_MC_ELSE() {
2165 IEM_MC_ADVANCE_RIP();
2166 } IEM_MC_ENDIF();
2167 IEM_MC_END();
2168 return VINF_SUCCESS;
2169}
2170
2171
2172/** Opcode 0x77. */
2173FNIEMOP_DEF(iemOp_jnbe_Jb)
2174{
2175 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2176 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2178 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2179
2180 IEM_MC_BEGIN(0, 0);
2181 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2182 IEM_MC_ADVANCE_RIP();
2183 } IEM_MC_ELSE() {
2184 IEM_MC_REL_JMP_S8(i8Imm);
2185 } IEM_MC_ENDIF();
2186 IEM_MC_END();
2187 return VINF_SUCCESS;
2188}
2189
2190
2191/** Opcode 0x78. */
2192FNIEMOP_DEF(iemOp_js_Jb)
2193{
2194 IEMOP_MNEMONIC(js_Jb, "js Jb");
2195 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2197 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2198
2199 IEM_MC_BEGIN(0, 0);
2200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2201 IEM_MC_REL_JMP_S8(i8Imm);
2202 } IEM_MC_ELSE() {
2203 IEM_MC_ADVANCE_RIP();
2204 } IEM_MC_ENDIF();
2205 IEM_MC_END();
2206 return VINF_SUCCESS;
2207}
2208
2209
2210/** Opcode 0x79. */
2211FNIEMOP_DEF(iemOp_jns_Jb)
2212{
2213 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2214 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2216 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2217
2218 IEM_MC_BEGIN(0, 0);
2219 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2220 IEM_MC_ADVANCE_RIP();
2221 } IEM_MC_ELSE() {
2222 IEM_MC_REL_JMP_S8(i8Imm);
2223 } IEM_MC_ENDIF();
2224 IEM_MC_END();
2225 return VINF_SUCCESS;
2226}
2227
2228
2229/** Opcode 0x7a. */
2230FNIEMOP_DEF(iemOp_jp_Jb)
2231{
2232 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2233 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2235 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2236
2237 IEM_MC_BEGIN(0, 0);
2238 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2239 IEM_MC_REL_JMP_S8(i8Imm);
2240 } IEM_MC_ELSE() {
2241 IEM_MC_ADVANCE_RIP();
2242 } IEM_MC_ENDIF();
2243 IEM_MC_END();
2244 return VINF_SUCCESS;
2245}
2246
2247
2248/** Opcode 0x7b. */
2249FNIEMOP_DEF(iemOp_jnp_Jb)
2250{
2251 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2252 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2254 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2255
2256 IEM_MC_BEGIN(0, 0);
2257 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2258 IEM_MC_ADVANCE_RIP();
2259 } IEM_MC_ELSE() {
2260 IEM_MC_REL_JMP_S8(i8Imm);
2261 } IEM_MC_ENDIF();
2262 IEM_MC_END();
2263 return VINF_SUCCESS;
2264}
2265
2266
2267/** Opcode 0x7c. */
2268FNIEMOP_DEF(iemOp_jl_Jb)
2269{
2270 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2271 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2273 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2274
2275 IEM_MC_BEGIN(0, 0);
2276 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2277 IEM_MC_REL_JMP_S8(i8Imm);
2278 } IEM_MC_ELSE() {
2279 IEM_MC_ADVANCE_RIP();
2280 } IEM_MC_ENDIF();
2281 IEM_MC_END();
2282 return VINF_SUCCESS;
2283}
2284
2285
2286/** Opcode 0x7d. */
2287FNIEMOP_DEF(iemOp_jnl_Jb)
2288{
2289 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
2290 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2292 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2293
2294 IEM_MC_BEGIN(0, 0);
2295 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2296 IEM_MC_ADVANCE_RIP();
2297 } IEM_MC_ELSE() {
2298 IEM_MC_REL_JMP_S8(i8Imm);
2299 } IEM_MC_ENDIF();
2300 IEM_MC_END();
2301 return VINF_SUCCESS;
2302}
2303
2304
2305/** Opcode 0x7e. */
2306FNIEMOP_DEF(iemOp_jle_Jb)
2307{
2308 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
2309 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2311 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2312
2313 IEM_MC_BEGIN(0, 0);
2314 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2315 IEM_MC_REL_JMP_S8(i8Imm);
2316 } IEM_MC_ELSE() {
2317 IEM_MC_ADVANCE_RIP();
2318 } IEM_MC_ENDIF();
2319 IEM_MC_END();
2320 return VINF_SUCCESS;
2321}
2322
2323
2324/** Opcode 0x7f. */
2325FNIEMOP_DEF(iemOp_jnle_Jb)
2326{
2327 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
2328 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2330 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2331
2332 IEM_MC_BEGIN(0, 0);
2333 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2334 IEM_MC_ADVANCE_RIP();
2335 } IEM_MC_ELSE() {
2336 IEM_MC_REL_JMP_S8(i8Imm);
2337 } IEM_MC_ENDIF();
2338 IEM_MC_END();
2339 return VINF_SUCCESS;
2340}
2341
2342
2343/** Opcode 0x80. */
2344FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
2345{
2346 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2347 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2348 {
2349 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
2350 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
2351 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
2352 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
2353 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
2354 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
2355 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
2356 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
2357 }
2358 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2359
2360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2361 {
2362 /* register target */
2363 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2365 IEM_MC_BEGIN(3, 0);
2366 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2367 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2368 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2369
2370 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2371 IEM_MC_REF_EFLAGS(pEFlags);
2372 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2373
2374 IEM_MC_ADVANCE_RIP();
2375 IEM_MC_END();
2376 }
2377 else
2378 {
2379 /* memory target */
2380 uint32_t fAccess;
2381 if (pImpl->pfnLockedU8)
2382 fAccess = IEM_ACCESS_DATA_RW;
2383 else /* CMP */
2384 fAccess = IEM_ACCESS_DATA_R;
2385 IEM_MC_BEGIN(3, 2);
2386 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2387 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2389
2390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2391 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2392 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2393 if (pImpl->pfnLockedU8)
2394 IEMOP_HLP_DONE_DECODING();
2395 else
2396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2397
2398 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2399 IEM_MC_FETCH_EFLAGS(EFlags);
2400 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2401 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2402 else
2403 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
2404
2405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
2406 IEM_MC_COMMIT_EFLAGS(EFlags);
2407 IEM_MC_ADVANCE_RIP();
2408 IEM_MC_END();
2409 }
2410 return VINF_SUCCESS;
2411}
2412
2413
2414/** Opcode 0x81. */
2415FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
2416{
2417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2418 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2419 {
2420 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
2421 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
2422 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
2423 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
2424 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
2425 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
2426 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
2427 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
2428 }
2429 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2430
2431 switch (pVCpu->iem.s.enmEffOpSize)
2432 {
2433 case IEMMODE_16BIT:
2434 {
2435 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2436 {
2437 /* register target */
2438 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2440 IEM_MC_BEGIN(3, 0);
2441 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2442 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
2443 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2444
2445 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2446 IEM_MC_REF_EFLAGS(pEFlags);
2447 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2448
2449 IEM_MC_ADVANCE_RIP();
2450 IEM_MC_END();
2451 }
2452 else
2453 {
2454 /* memory target */
2455 uint32_t fAccess;
2456 if (pImpl->pfnLockedU16)
2457 fAccess = IEM_ACCESS_DATA_RW;
2458 else /* CMP, TEST */
2459 fAccess = IEM_ACCESS_DATA_R;
2460 IEM_MC_BEGIN(3, 2);
2461 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2462 IEM_MC_ARG(uint16_t, u16Src, 1);
2463 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2465
2466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2467 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2468 IEM_MC_ASSIGN(u16Src, u16Imm);
2469 if (pImpl->pfnLockedU16)
2470 IEMOP_HLP_DONE_DECODING();
2471 else
2472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2473 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2474 IEM_MC_FETCH_EFLAGS(EFlags);
2475 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2476 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2477 else
2478 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2479
2480 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2481 IEM_MC_COMMIT_EFLAGS(EFlags);
2482 IEM_MC_ADVANCE_RIP();
2483 IEM_MC_END();
2484 }
2485 break;
2486 }
2487
2488 case IEMMODE_32BIT:
2489 {
2490 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2491 {
2492 /* register target */
2493 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2495 IEM_MC_BEGIN(3, 0);
2496 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2497 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
2498 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2499
2500 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2501 IEM_MC_REF_EFLAGS(pEFlags);
2502 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2503 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
2504
2505 IEM_MC_ADVANCE_RIP();
2506 IEM_MC_END();
2507 }
2508 else
2509 {
2510 /* memory target */
2511 uint32_t fAccess;
2512 if (pImpl->pfnLockedU32)
2513 fAccess = IEM_ACCESS_DATA_RW;
2514 else /* CMP, TEST */
2515 fAccess = IEM_ACCESS_DATA_R;
2516 IEM_MC_BEGIN(3, 2);
2517 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2518 IEM_MC_ARG(uint32_t, u32Src, 1);
2519 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2521
2522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2523 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2524 IEM_MC_ASSIGN(u32Src, u32Imm);
2525 if (pImpl->pfnLockedU32)
2526 IEMOP_HLP_DONE_DECODING();
2527 else
2528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2529 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2530 IEM_MC_FETCH_EFLAGS(EFlags);
2531 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2532 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2533 else
2534 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
2535
2536 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
2537 IEM_MC_COMMIT_EFLAGS(EFlags);
2538 IEM_MC_ADVANCE_RIP();
2539 IEM_MC_END();
2540 }
2541 break;
2542 }
2543
2544 case IEMMODE_64BIT:
2545 {
2546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2547 {
2548 /* register target */
2549 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2551 IEM_MC_BEGIN(3, 0);
2552 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2553 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
2554 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2555
2556 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2557 IEM_MC_REF_EFLAGS(pEFlags);
2558 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2559
2560 IEM_MC_ADVANCE_RIP();
2561 IEM_MC_END();
2562 }
2563 else
2564 {
2565 /* memory target */
2566 uint32_t fAccess;
2567 if (pImpl->pfnLockedU64)
2568 fAccess = IEM_ACCESS_DATA_RW;
2569 else /* CMP */
2570 fAccess = IEM_ACCESS_DATA_R;
2571 IEM_MC_BEGIN(3, 2);
2572 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2573 IEM_MC_ARG(uint64_t, u64Src, 1);
2574 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2576
2577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2578 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2579 if (pImpl->pfnLockedU64)
2580 IEMOP_HLP_DONE_DECODING();
2581 else
2582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2583 IEM_MC_ASSIGN(u64Src, u64Imm);
2584 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2585 IEM_MC_FETCH_EFLAGS(EFlags);
2586 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2587 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2588 else
2589 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
2590
2591 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
2592 IEM_MC_COMMIT_EFLAGS(EFlags);
2593 IEM_MC_ADVANCE_RIP();
2594 IEM_MC_END();
2595 }
2596 break;
2597 }
2598 }
2599 return VINF_SUCCESS;
2600}
2601
2602
2603/** Opcode 0x82. */
2604FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
2605{
2606 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
2607 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
2608}
2609
2610
2611/** Opcode 0x83. */
2612FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
2613{
2614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2615 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2616 {
2617 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
2618 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
2619 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
2620 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
2621 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
2622 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
2623 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
2624 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
2625 }
2626 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
2627 to the 386 even if absent in the intel reference manuals and some
2628 3rd party opcode listings. */
2629 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2630
2631 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2632 {
2633 /*
2634 * Register target
2635 */
2636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2637 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2638 switch (pVCpu->iem.s.enmEffOpSize)
2639 {
2640 case IEMMODE_16BIT:
2641 {
2642 IEM_MC_BEGIN(3, 0);
2643 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2644 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
2645 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2646
2647 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2648 IEM_MC_REF_EFLAGS(pEFlags);
2649 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2650
2651 IEM_MC_ADVANCE_RIP();
2652 IEM_MC_END();
2653 break;
2654 }
2655
2656 case IEMMODE_32BIT:
2657 {
2658 IEM_MC_BEGIN(3, 0);
2659 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2660 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
2661 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2662
2663 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2664 IEM_MC_REF_EFLAGS(pEFlags);
2665 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2666 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
2667
2668 IEM_MC_ADVANCE_RIP();
2669 IEM_MC_END();
2670 break;
2671 }
2672
2673 case IEMMODE_64BIT:
2674 {
2675 IEM_MC_BEGIN(3, 0);
2676 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2677 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
2678 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2679
2680 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2681 IEM_MC_REF_EFLAGS(pEFlags);
2682 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2683
2684 IEM_MC_ADVANCE_RIP();
2685 IEM_MC_END();
2686 break;
2687 }
2688 }
2689 }
2690 else
2691 {
2692 /*
2693 * Memory target.
2694 */
2695 uint32_t fAccess;
2696 if (pImpl->pfnLockedU16)
2697 fAccess = IEM_ACCESS_DATA_RW;
2698 else /* CMP */
2699 fAccess = IEM_ACCESS_DATA_R;
2700
2701 switch (pVCpu->iem.s.enmEffOpSize)
2702 {
2703 case IEMMODE_16BIT:
2704 {
2705 IEM_MC_BEGIN(3, 2);
2706 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2707 IEM_MC_ARG(uint16_t, u16Src, 1);
2708 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2710
2711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2712 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2713 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
2714 if (pImpl->pfnLockedU16)
2715 IEMOP_HLP_DONE_DECODING();
2716 else
2717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2718 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2719 IEM_MC_FETCH_EFLAGS(EFlags);
2720 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2721 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2722 else
2723 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2724
2725 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2726 IEM_MC_COMMIT_EFLAGS(EFlags);
2727 IEM_MC_ADVANCE_RIP();
2728 IEM_MC_END();
2729 break;
2730 }
2731
2732 case IEMMODE_32BIT:
2733 {
2734 IEM_MC_BEGIN(3, 2);
2735 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2736 IEM_MC_ARG(uint32_t, u32Src, 1);
2737 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2739
2740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2741 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2742 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
2743 if (pImpl->pfnLockedU32)
2744 IEMOP_HLP_DONE_DECODING();
2745 else
2746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2747 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2748 IEM_MC_FETCH_EFLAGS(EFlags);
2749 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2750 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2751 else
2752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
2753
2754 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
2755 IEM_MC_COMMIT_EFLAGS(EFlags);
2756 IEM_MC_ADVANCE_RIP();
2757 IEM_MC_END();
2758 break;
2759 }
2760
2761 case IEMMODE_64BIT:
2762 {
2763 IEM_MC_BEGIN(3, 2);
2764 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2765 IEM_MC_ARG(uint64_t, u64Src, 1);
2766 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2768
2769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2770 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2771 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
2772 if (pImpl->pfnLockedU64)
2773 IEMOP_HLP_DONE_DECODING();
2774 else
2775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2776 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2777 IEM_MC_FETCH_EFLAGS(EFlags);
2778 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2779 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2780 else
2781 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
2782
2783 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
2784 IEM_MC_COMMIT_EFLAGS(EFlags);
2785 IEM_MC_ADVANCE_RIP();
2786 IEM_MC_END();
2787 break;
2788 }
2789 }
2790 }
2791 return VINF_SUCCESS;
2792}
2793
2794
2795/** Opcode 0x84. */
2796FNIEMOP_DEF(iemOp_test_Eb_Gb)
2797{
2798 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
2799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
2800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
2801}
2802
2803
2804/** Opcode 0x85. */
2805FNIEMOP_DEF(iemOp_test_Ev_Gv)
2806{
2807 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
2808 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
2809 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
2810}
2811
2812
2813/** Opcode 0x86. */
2814FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
2815{
2816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2817 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
2818
2819 /*
2820 * If rm is denoting a register, no more instruction bytes.
2821 */
2822 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2823 {
2824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2825
2826 IEM_MC_BEGIN(0, 2);
2827 IEM_MC_LOCAL(uint8_t, uTmp1);
2828 IEM_MC_LOCAL(uint8_t, uTmp2);
2829
2830 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2831 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2832 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2833 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2834
2835 IEM_MC_ADVANCE_RIP();
2836 IEM_MC_END();
2837 }
2838 else
2839 {
2840 /*
2841 * We're accessing memory.
2842 */
2843/** @todo the register must be committed separately! */
2844 IEM_MC_BEGIN(2, 2);
2845 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
2846 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
2847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2848
2849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2850 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2851 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2852 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
2853 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
2854
2855 IEM_MC_ADVANCE_RIP();
2856 IEM_MC_END();
2857 }
2858 return VINF_SUCCESS;
2859}
2860
2861
2862/** Opcode 0x87. */
2863FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
2864{
2865 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
2866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2867
2868 /*
2869 * If rm is denoting a register, no more instruction bytes.
2870 */
2871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2872 {
2873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2874
2875 switch (pVCpu->iem.s.enmEffOpSize)
2876 {
2877 case IEMMODE_16BIT:
2878 IEM_MC_BEGIN(0, 2);
2879 IEM_MC_LOCAL(uint16_t, uTmp1);
2880 IEM_MC_LOCAL(uint16_t, uTmp2);
2881
2882 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2883 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2884 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2885 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2886
2887 IEM_MC_ADVANCE_RIP();
2888 IEM_MC_END();
2889 return VINF_SUCCESS;
2890
2891 case IEMMODE_32BIT:
2892 IEM_MC_BEGIN(0, 2);
2893 IEM_MC_LOCAL(uint32_t, uTmp1);
2894 IEM_MC_LOCAL(uint32_t, uTmp2);
2895
2896 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2897 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2898 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2899 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2900
2901 IEM_MC_ADVANCE_RIP();
2902 IEM_MC_END();
2903 return VINF_SUCCESS;
2904
2905 case IEMMODE_64BIT:
2906 IEM_MC_BEGIN(0, 2);
2907 IEM_MC_LOCAL(uint64_t, uTmp1);
2908 IEM_MC_LOCAL(uint64_t, uTmp2);
2909
2910 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2911 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2912 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2913 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2914
2915 IEM_MC_ADVANCE_RIP();
2916 IEM_MC_END();
2917 return VINF_SUCCESS;
2918
2919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2920 }
2921 }
2922 else
2923 {
2924 /*
2925 * We're accessing memory.
2926 */
2927 switch (pVCpu->iem.s.enmEffOpSize)
2928 {
2929/** @todo the register must be committed separately! */
2930 case IEMMODE_16BIT:
2931 IEM_MC_BEGIN(2, 2);
2932 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
2933 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
2934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2935
2936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2937 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2938 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2939 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
2940 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
2941
2942 IEM_MC_ADVANCE_RIP();
2943 IEM_MC_END();
2944 return VINF_SUCCESS;
2945
2946 case IEMMODE_32BIT:
2947 IEM_MC_BEGIN(2, 2);
2948 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
2949 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
2950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2951
2952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2953 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2954 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2955 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
2956 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
2957
2958 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
2959 IEM_MC_ADVANCE_RIP();
2960 IEM_MC_END();
2961 return VINF_SUCCESS;
2962
2963 case IEMMODE_64BIT:
2964 IEM_MC_BEGIN(2, 2);
2965 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
2966 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
2967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2968
2969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2970 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2971 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2972 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
2973 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
2974
2975 IEM_MC_ADVANCE_RIP();
2976 IEM_MC_END();
2977 return VINF_SUCCESS;
2978
2979 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2980 }
2981 }
2982}
2983
2984
2985/** Opcode 0x88. */
2986FNIEMOP_DEF(iemOp_mov_Eb_Gb)
2987{
2988 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
2989
2990 uint8_t bRm;
2991 IEM_OPCODE_GET_NEXT_U8(&bRm);
2992
2993 /*
2994 * If rm is denoting a register, no more instruction bytes.
2995 */
2996 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2997 {
2998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2999 IEM_MC_BEGIN(0, 1);
3000 IEM_MC_LOCAL(uint8_t, u8Value);
3001 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3002 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3003 IEM_MC_ADVANCE_RIP();
3004 IEM_MC_END();
3005 }
3006 else
3007 {
3008 /*
3009 * We're writing a register to memory.
3010 */
3011 IEM_MC_BEGIN(0, 2);
3012 IEM_MC_LOCAL(uint8_t, u8Value);
3013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3016 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3017 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3018 IEM_MC_ADVANCE_RIP();
3019 IEM_MC_END();
3020 }
3021 return VINF_SUCCESS;
3022
3023}
3024
3025
3026/** Opcode 0x89. */
3027FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3028{
3029 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3030
3031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3032
3033 /*
3034 * If rm is denoting a register, no more instruction bytes.
3035 */
3036 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3037 {
3038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3039 switch (pVCpu->iem.s.enmEffOpSize)
3040 {
3041 case IEMMODE_16BIT:
3042 IEM_MC_BEGIN(0, 1);
3043 IEM_MC_LOCAL(uint16_t, u16Value);
3044 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3045 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3046 IEM_MC_ADVANCE_RIP();
3047 IEM_MC_END();
3048 break;
3049
3050 case IEMMODE_32BIT:
3051 IEM_MC_BEGIN(0, 1);
3052 IEM_MC_LOCAL(uint32_t, u32Value);
3053 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3054 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3055 IEM_MC_ADVANCE_RIP();
3056 IEM_MC_END();
3057 break;
3058
3059 case IEMMODE_64BIT:
3060 IEM_MC_BEGIN(0, 1);
3061 IEM_MC_LOCAL(uint64_t, u64Value);
3062 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3063 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3064 IEM_MC_ADVANCE_RIP();
3065 IEM_MC_END();
3066 break;
3067 }
3068 }
3069 else
3070 {
3071 /*
3072 * We're writing a register to memory.
3073 */
3074 switch (pVCpu->iem.s.enmEffOpSize)
3075 {
3076 case IEMMODE_16BIT:
3077 IEM_MC_BEGIN(0, 2);
3078 IEM_MC_LOCAL(uint16_t, u16Value);
3079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3083 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3084 IEM_MC_ADVANCE_RIP();
3085 IEM_MC_END();
3086 break;
3087
3088 case IEMMODE_32BIT:
3089 IEM_MC_BEGIN(0, 2);
3090 IEM_MC_LOCAL(uint32_t, u32Value);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3094 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3095 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3096 IEM_MC_ADVANCE_RIP();
3097 IEM_MC_END();
3098 break;
3099
3100 case IEMMODE_64BIT:
3101 IEM_MC_BEGIN(0, 2);
3102 IEM_MC_LOCAL(uint64_t, u64Value);
3103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3106 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3107 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3108 IEM_MC_ADVANCE_RIP();
3109 IEM_MC_END();
3110 break;
3111 }
3112 }
3113 return VINF_SUCCESS;
3114}
3115
3116
3117/** Opcode 0x8a. */
3118FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3119{
3120 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3121
3122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3123
3124 /*
3125 * If rm is denoting a register, no more instruction bytes.
3126 */
3127 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3128 {
3129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3130 IEM_MC_BEGIN(0, 1);
3131 IEM_MC_LOCAL(uint8_t, u8Value);
3132 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3133 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3134 IEM_MC_ADVANCE_RIP();
3135 IEM_MC_END();
3136 }
3137 else
3138 {
3139 /*
3140 * We're loading a register from memory.
3141 */
3142 IEM_MC_BEGIN(0, 2);
3143 IEM_MC_LOCAL(uint8_t, u8Value);
3144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3147 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3148 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3149 IEM_MC_ADVANCE_RIP();
3150 IEM_MC_END();
3151 }
3152 return VINF_SUCCESS;
3153}
3154
3155
3156/** Opcode 0x8b. */
3157FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3158{
3159 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3160
3161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3162
3163 /*
3164 * If rm is denoting a register, no more instruction bytes.
3165 */
3166 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3167 {
3168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3169 switch (pVCpu->iem.s.enmEffOpSize)
3170 {
3171 case IEMMODE_16BIT:
3172 IEM_MC_BEGIN(0, 1);
3173 IEM_MC_LOCAL(uint16_t, u16Value);
3174 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3175 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3176 IEM_MC_ADVANCE_RIP();
3177 IEM_MC_END();
3178 break;
3179
3180 case IEMMODE_32BIT:
3181 IEM_MC_BEGIN(0, 1);
3182 IEM_MC_LOCAL(uint32_t, u32Value);
3183 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3184 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3185 IEM_MC_ADVANCE_RIP();
3186 IEM_MC_END();
3187 break;
3188
3189 case IEMMODE_64BIT:
3190 IEM_MC_BEGIN(0, 1);
3191 IEM_MC_LOCAL(uint64_t, u64Value);
3192 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3193 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3194 IEM_MC_ADVANCE_RIP();
3195 IEM_MC_END();
3196 break;
3197 }
3198 }
3199 else
3200 {
3201 /*
3202 * We're loading a register from memory.
3203 */
3204 switch (pVCpu->iem.s.enmEffOpSize)
3205 {
3206 case IEMMODE_16BIT:
3207 IEM_MC_BEGIN(0, 2);
3208 IEM_MC_LOCAL(uint16_t, u16Value);
3209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3212 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3213 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3214 IEM_MC_ADVANCE_RIP();
3215 IEM_MC_END();
3216 break;
3217
3218 case IEMMODE_32BIT:
3219 IEM_MC_BEGIN(0, 2);
3220 IEM_MC_LOCAL(uint32_t, u32Value);
3221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3224 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3225 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3226 IEM_MC_ADVANCE_RIP();
3227 IEM_MC_END();
3228 break;
3229
3230 case IEMMODE_64BIT:
3231 IEM_MC_BEGIN(0, 2);
3232 IEM_MC_LOCAL(uint64_t, u64Value);
3233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3236 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3237 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3238 IEM_MC_ADVANCE_RIP();
3239 IEM_MC_END();
3240 break;
3241 }
3242 }
3243 return VINF_SUCCESS;
3244}
3245
3246
3247/** Opcode 0x63. */
3248FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
3249{
3250 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
3251 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
3252 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
3253 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
3254 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
3255}
3256
3257
3258/** Opcode 0x8c. */
3259FNIEMOP_DEF(iemOp_mov_Ev_Sw)
3260{
3261 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
3262
3263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3264
3265 /*
3266 * Check that the destination register exists. The REX.R prefix is ignored.
3267 */
3268 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3269 if ( iSegReg > X86_SREG_GS)
3270 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3271
3272 /*
3273 * If rm is denoting a register, no more instruction bytes.
3274 * In that case, the operand size is respected and the upper bits are
3275 * cleared (starting with some pentium).
3276 */
3277 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3278 {
3279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3280 switch (pVCpu->iem.s.enmEffOpSize)
3281 {
3282 case IEMMODE_16BIT:
3283 IEM_MC_BEGIN(0, 1);
3284 IEM_MC_LOCAL(uint16_t, u16Value);
3285 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3286 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3287 IEM_MC_ADVANCE_RIP();
3288 IEM_MC_END();
3289 break;
3290
3291 case IEMMODE_32BIT:
3292 IEM_MC_BEGIN(0, 1);
3293 IEM_MC_LOCAL(uint32_t, u32Value);
3294 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
3295 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3296 IEM_MC_ADVANCE_RIP();
3297 IEM_MC_END();
3298 break;
3299
3300 case IEMMODE_64BIT:
3301 IEM_MC_BEGIN(0, 1);
3302 IEM_MC_LOCAL(uint64_t, u64Value);
3303 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
3304 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3305 IEM_MC_ADVANCE_RIP();
3306 IEM_MC_END();
3307 break;
3308 }
3309 }
3310 else
3311 {
3312 /*
3313 * We're saving the register to memory. The access is word sized
3314 * regardless of operand size prefixes.
3315 */
3316#if 0 /* not necessary */
3317 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3318#endif
3319 IEM_MC_BEGIN(0, 2);
3320 IEM_MC_LOCAL(uint16_t, u16Value);
3321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3324 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3325 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3326 IEM_MC_ADVANCE_RIP();
3327 IEM_MC_END();
3328 }
3329 return VINF_SUCCESS;
3330}
3331
3332
3333
3334
3335/** Opcode 0x8d. */
3336FNIEMOP_DEF(iemOp_lea_Gv_M)
3337{
3338 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
3339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3340 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3341 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
3342
3343 switch (pVCpu->iem.s.enmEffOpSize)
3344 {
3345 case IEMMODE_16BIT:
3346 IEM_MC_BEGIN(0, 2);
3347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3348 IEM_MC_LOCAL(uint16_t, u16Cast);
3349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3351 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
3352 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
3353 IEM_MC_ADVANCE_RIP();
3354 IEM_MC_END();
3355 return VINF_SUCCESS;
3356
3357 case IEMMODE_32BIT:
3358 IEM_MC_BEGIN(0, 2);
3359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3360 IEM_MC_LOCAL(uint32_t, u32Cast);
3361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3363 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
3364 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
3365 IEM_MC_ADVANCE_RIP();
3366 IEM_MC_END();
3367 return VINF_SUCCESS;
3368
3369 case IEMMODE_64BIT:
3370 IEM_MC_BEGIN(0, 1);
3371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3374 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
3375 IEM_MC_ADVANCE_RIP();
3376 IEM_MC_END();
3377 return VINF_SUCCESS;
3378 }
3379 AssertFailedReturn(VERR_IEM_IPE_7);
3380}
3381
3382
3383/** Opcode 0x8e. */
3384FNIEMOP_DEF(iemOp_mov_Sw_Ev)
3385{
3386 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
3387
3388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3389
3390 /*
3391 * The practical operand size is 16-bit.
3392 */
3393#if 0 /* not necessary */
3394 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3395#endif
3396
3397 /*
3398 * Check that the destination register exists and can be used with this
3399 * instruction. The REX.R prefix is ignored.
3400 */
3401 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3402 if ( iSegReg == X86_SREG_CS
3403 || iSegReg > X86_SREG_GS)
3404 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3405
3406 /*
3407 * If rm is denoting a register, no more instruction bytes.
3408 */
3409 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3410 {
3411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3412 IEM_MC_BEGIN(2, 0);
3413 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3414 IEM_MC_ARG(uint16_t, u16Value, 1);
3415 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3416 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3417 IEM_MC_END();
3418 }
3419 else
3420 {
3421 /*
3422 * We're loading the register from memory. The access is word sized
3423 * regardless of operand size prefixes.
3424 */
3425 IEM_MC_BEGIN(2, 1);
3426 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3427 IEM_MC_ARG(uint16_t, u16Value, 1);
3428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3431 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3432 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3433 IEM_MC_END();
3434 }
3435 return VINF_SUCCESS;
3436}
3437
3438
3439/** Opcode 0x8f /0. */
3440FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
3441{
3442 /* This bugger is rather annoying as it requires rSP to be updated before
3443 doing the effective address calculations. Will eventually require a
3444 split between the R/M+SIB decoding and the effective address
3445 calculation - which is something that is required for any attempt at
3446 reusing this code for a recompiler. It may also be good to have if we
3447 need to delay #UD exception caused by invalid lock prefixes.
3448
3449 For now, we'll do a mostly safe interpreter-only implementation here. */
3450 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
3451 * now until tests show it's checked.. */
3452 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
3453
3454 /* Register access is relatively easy and can share code. */
3455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3456 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3457
3458 /*
3459 * Memory target.
3460 *
3461 * Intel says that RSP is incremented before it's used in any effective
3462 * address calcuations. This means some serious extra annoyance here since
3463 * we decode and calculate the effective address in one step and like to
3464 * delay committing registers till everything is done.
3465 *
3466 * So, we'll decode and calculate the effective address twice. This will
3467 * require some recoding if turned into a recompiler.
3468 */
3469 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
3470
3471#ifndef TST_IEM_CHECK_MC
3472 /* Calc effective address with modified ESP. */
3473/** @todo testcase */
3474 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
3475 RTGCPTR GCPtrEff;
3476 VBOXSTRICTRC rcStrict;
3477 switch (pVCpu->iem.s.enmEffOpSize)
3478 {
3479 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
3480 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
3481 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
3482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3483 }
3484 if (rcStrict != VINF_SUCCESS)
3485 return rcStrict;
3486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3487
3488 /* Perform the operation - this should be CImpl. */
3489 RTUINT64U TmpRsp;
3490 TmpRsp.u = pCtx->rsp;
3491 switch (pVCpu->iem.s.enmEffOpSize)
3492 {
3493 case IEMMODE_16BIT:
3494 {
3495 uint16_t u16Value;
3496 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
3497 if (rcStrict == VINF_SUCCESS)
3498 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
3499 break;
3500 }
3501
3502 case IEMMODE_32BIT:
3503 {
3504 uint32_t u32Value;
3505 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
3506 if (rcStrict == VINF_SUCCESS)
3507 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
3508 break;
3509 }
3510
3511 case IEMMODE_64BIT:
3512 {
3513 uint64_t u64Value;
3514 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
3515 if (rcStrict == VINF_SUCCESS)
3516 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
3517 break;
3518 }
3519
3520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3521 }
3522 if (rcStrict == VINF_SUCCESS)
3523 {
3524 pCtx->rsp = TmpRsp.u;
3525 iemRegUpdateRipAndClearRF(pVCpu);
3526 }
3527 return rcStrict;
3528
3529#else
3530 return VERR_IEM_IPE_2;
3531#endif
3532}
3533
3534
3535/** Opcode 0x8f. */
3536FNIEMOP_DEF(iemOp_Grp1A__xop)
3537{
3538 /*
3539 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
3540 * three byte VEX prefix, except that the mmmmm field cannot have the values
3541 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
3542 */
3543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3544 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
3545 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
3546
3547 IEMOP_MNEMONIC(xop, "xop");
3548 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
3549 {
3550 /** @todo Test when exctly the XOP conformance checks kick in during
3551 * instruction decoding and fetching (using \#PF). */
3552 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
3553 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
3554 if ( ( pVCpu->iem.s.fPrefixes
3555 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
3556 == 0)
3557 {
3558 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
3559 if (bXop2 & 0x80 /* XOP.W */)
3560 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
3561 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
3562 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
3563 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
3564 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
3565 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
3566 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
3567
3568 /** @todo XOP: Just use new tables and decoders. */
3569 switch (bRm & 0x1f)
3570 {
3571 case 8: /* xop opcode map 8. */
3572 IEMOP_BITCH_ABOUT_STUB();
3573 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3574
3575 case 9: /* xop opcode map 9. */
3576 IEMOP_BITCH_ABOUT_STUB();
3577 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3578
3579 case 10: /* xop opcode map 10. */
3580 IEMOP_BITCH_ABOUT_STUB();
3581 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3582
3583 default:
3584 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
3585 return IEMOP_RAISE_INVALID_OPCODE();
3586 }
3587 }
3588 else
3589 Log(("XOP: Invalid prefix mix!\n"));
3590 }
3591 else
3592 Log(("XOP: XOP support disabled!\n"));
3593 return IEMOP_RAISE_INVALID_OPCODE();
3594}
3595
3596
3597/**
3598 * Common 'xchg reg,rAX' helper.
3599 */
3600FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
3601{
3602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3603
3604 iReg |= pVCpu->iem.s.uRexB;
3605 switch (pVCpu->iem.s.enmEffOpSize)
3606 {
3607 case IEMMODE_16BIT:
3608 IEM_MC_BEGIN(0, 2);
3609 IEM_MC_LOCAL(uint16_t, u16Tmp1);
3610 IEM_MC_LOCAL(uint16_t, u16Tmp2);
3611 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
3612 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
3613 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
3614 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
3615 IEM_MC_ADVANCE_RIP();
3616 IEM_MC_END();
3617 return VINF_SUCCESS;
3618
3619 case IEMMODE_32BIT:
3620 IEM_MC_BEGIN(0, 2);
3621 IEM_MC_LOCAL(uint32_t, u32Tmp1);
3622 IEM_MC_LOCAL(uint32_t, u32Tmp2);
3623 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
3624 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
3625 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
3626 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
3627 IEM_MC_ADVANCE_RIP();
3628 IEM_MC_END();
3629 return VINF_SUCCESS;
3630
3631 case IEMMODE_64BIT:
3632 IEM_MC_BEGIN(0, 2);
3633 IEM_MC_LOCAL(uint64_t, u64Tmp1);
3634 IEM_MC_LOCAL(uint64_t, u64Tmp2);
3635 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
3636 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
3637 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
3638 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
3639 IEM_MC_ADVANCE_RIP();
3640 IEM_MC_END();
3641 return VINF_SUCCESS;
3642
3643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3644 }
3645}
3646
3647
3648/** Opcode 0x90. */
3649FNIEMOP_DEF(iemOp_nop)
3650{
3651 /* R8/R8D and RAX/EAX can be exchanged. */
3652 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
3653 {
3654 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
3655 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
3656 }
3657
3658 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3659 IEMOP_MNEMONIC(pause, "pause");
3660 else
3661 IEMOP_MNEMONIC(nop, "nop");
3662 IEM_MC_BEGIN(0, 0);
3663 IEM_MC_ADVANCE_RIP();
3664 IEM_MC_END();
3665 return VINF_SUCCESS;
3666}
3667
3668
3669/** Opcode 0x91. */
3670FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
3671{
3672 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
3673 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
3674}
3675
3676
3677/** Opcode 0x92. */
3678FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
3679{
3680 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
3681 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
3682}
3683
3684
3685/** Opcode 0x93. */
3686FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
3687{
3688 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
3689 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
3690}
3691
3692
3693/** Opcode 0x94. */
3694FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
3695{
3696 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
3697 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
3698}
3699
3700
3701/** Opcode 0x95. */
3702FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
3703{
3704 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
3705 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
3706}
3707
3708
3709/** Opcode 0x96. */
3710FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
3711{
3712 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
3713 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
3714}
3715
3716
3717/** Opcode 0x97. */
3718FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
3719{
3720 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
3721 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
3722}
3723
3724
3725/** Opcode 0x98. */
3726FNIEMOP_DEF(iemOp_cbw)
3727{
3728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3729 switch (pVCpu->iem.s.enmEffOpSize)
3730 {
3731 case IEMMODE_16BIT:
3732 IEMOP_MNEMONIC(cbw, "cbw");
3733 IEM_MC_BEGIN(0, 1);
3734 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
3735 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
3736 } IEM_MC_ELSE() {
3737 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
3738 } IEM_MC_ENDIF();
3739 IEM_MC_ADVANCE_RIP();
3740 IEM_MC_END();
3741 return VINF_SUCCESS;
3742
3743 case IEMMODE_32BIT:
3744 IEMOP_MNEMONIC(cwde, "cwde");
3745 IEM_MC_BEGIN(0, 1);
3746 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
3747 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
3748 } IEM_MC_ELSE() {
3749 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
3750 } IEM_MC_ENDIF();
3751 IEM_MC_ADVANCE_RIP();
3752 IEM_MC_END();
3753 return VINF_SUCCESS;
3754
3755 case IEMMODE_64BIT:
3756 IEMOP_MNEMONIC(cdqe, "cdqe");
3757 IEM_MC_BEGIN(0, 1);
3758 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
3759 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
3760 } IEM_MC_ELSE() {
3761 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
3762 } IEM_MC_ENDIF();
3763 IEM_MC_ADVANCE_RIP();
3764 IEM_MC_END();
3765 return VINF_SUCCESS;
3766
3767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3768 }
3769}
3770
3771
3772/** Opcode 0x99. */
3773FNIEMOP_DEF(iemOp_cwd)
3774{
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776 switch (pVCpu->iem.s.enmEffOpSize)
3777 {
3778 case IEMMODE_16BIT:
3779 IEMOP_MNEMONIC(cwd, "cwd");
3780 IEM_MC_BEGIN(0, 1);
3781 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
3782 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
3783 } IEM_MC_ELSE() {
3784 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
3785 } IEM_MC_ENDIF();
3786 IEM_MC_ADVANCE_RIP();
3787 IEM_MC_END();
3788 return VINF_SUCCESS;
3789
3790 case IEMMODE_32BIT:
3791 IEMOP_MNEMONIC(cdq, "cdq");
3792 IEM_MC_BEGIN(0, 1);
3793 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
3794 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
3795 } IEM_MC_ELSE() {
3796 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
3797 } IEM_MC_ENDIF();
3798 IEM_MC_ADVANCE_RIP();
3799 IEM_MC_END();
3800 return VINF_SUCCESS;
3801
3802 case IEMMODE_64BIT:
3803 IEMOP_MNEMONIC(cqo, "cqo");
3804 IEM_MC_BEGIN(0, 1);
3805 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
3806 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
3807 } IEM_MC_ELSE() {
3808 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
3809 } IEM_MC_ENDIF();
3810 IEM_MC_ADVANCE_RIP();
3811 IEM_MC_END();
3812 return VINF_SUCCESS;
3813
3814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3815 }
3816}
3817
3818
3819/** Opcode 0x9a. */
3820FNIEMOP_DEF(iemOp_call_Ap)
3821{
3822 IEMOP_MNEMONIC(call_Ap, "call Ap");
3823 IEMOP_HLP_NO_64BIT();
3824
3825 /* Decode the far pointer address and pass it on to the far call C implementation. */
3826 uint32_t offSeg;
3827 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
3828 IEM_OPCODE_GET_NEXT_U32(&offSeg);
3829 else
3830 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
3831 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
3832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3833 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
3834}
3835
3836
3837/** Opcode 0x9b. (aka fwait) */
3838FNIEMOP_DEF(iemOp_wait)
3839{
3840 IEMOP_MNEMONIC(wait, "wait");
3841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3842
3843 IEM_MC_BEGIN(0, 0);
3844 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
3845 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3846 IEM_MC_ADVANCE_RIP();
3847 IEM_MC_END();
3848 return VINF_SUCCESS;
3849}
3850
3851
3852/** Opcode 0x9c. */
3853FNIEMOP_DEF(iemOp_pushf_Fv)
3854{
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3856 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3857 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
3858}
3859
3860
3861/** Opcode 0x9d. */
3862FNIEMOP_DEF(iemOp_popf_Fv)
3863{
3864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3865 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3866 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
3867}
3868
3869
3870/** Opcode 0x9e. */
3871FNIEMOP_DEF(iemOp_sahf)
3872{
3873 IEMOP_MNEMONIC(sahf, "sahf");
3874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3875 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
3876 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
3877 return IEMOP_RAISE_INVALID_OPCODE();
3878 IEM_MC_BEGIN(0, 2);
3879 IEM_MC_LOCAL(uint32_t, u32Flags);
3880 IEM_MC_LOCAL(uint32_t, EFlags);
3881 IEM_MC_FETCH_EFLAGS(EFlags);
3882 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
3883 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
3884 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
3885 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
3886 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
3887 IEM_MC_COMMIT_EFLAGS(EFlags);
3888 IEM_MC_ADVANCE_RIP();
3889 IEM_MC_END();
3890 return VINF_SUCCESS;
3891}
3892
3893
3894/** Opcode 0x9f. */
3895FNIEMOP_DEF(iemOp_lahf)
3896{
3897 IEMOP_MNEMONIC(lahf, "lahf");
3898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3899 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
3900 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
3901 return IEMOP_RAISE_INVALID_OPCODE();
3902 IEM_MC_BEGIN(0, 1);
3903 IEM_MC_LOCAL(uint8_t, u8Flags);
3904 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
3905 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
3906 IEM_MC_ADVANCE_RIP();
3907 IEM_MC_END();
3908 return VINF_SUCCESS;
3909}
3910
3911
3912/**
3913 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
3914 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
3915 * prefixes. Will return on failures.
3916 * @param a_GCPtrMemOff The variable to store the offset in.
3917 */
3918#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
3919 do \
3920 { \
3921 switch (pVCpu->iem.s.enmEffAddrMode) \
3922 { \
3923 case IEMMODE_16BIT: \
3924 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
3925 break; \
3926 case IEMMODE_32BIT: \
3927 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
3928 break; \
3929 case IEMMODE_64BIT: \
3930 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
3931 break; \
3932 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3933 } \
3934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3935 } while (0)
3936
3937/** Opcode 0xa0. */
3938FNIEMOP_DEF(iemOp_mov_Al_Ob)
3939{
3940 /*
3941 * Get the offset and fend of lock prefixes.
3942 */
3943 RTGCPTR GCPtrMemOff;
3944 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
3945
3946 /*
3947 * Fetch AL.
3948 */
3949 IEM_MC_BEGIN(0,1);
3950 IEM_MC_LOCAL(uint8_t, u8Tmp);
3951 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3952 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
3953 IEM_MC_ADVANCE_RIP();
3954 IEM_MC_END();
3955 return VINF_SUCCESS;
3956}
3957
3958
3959/** Opcode 0xa1. */
3960FNIEMOP_DEF(iemOp_mov_rAX_Ov)
3961{
3962 /*
3963 * Get the offset and fend of lock prefixes.
3964 */
3965 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
3966 RTGCPTR GCPtrMemOff;
3967 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
3968
3969 /*
3970 * Fetch rAX.
3971 */
3972 switch (pVCpu->iem.s.enmEffOpSize)
3973 {
3974 case IEMMODE_16BIT:
3975 IEM_MC_BEGIN(0,1);
3976 IEM_MC_LOCAL(uint16_t, u16Tmp);
3977 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3978 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
3979 IEM_MC_ADVANCE_RIP();
3980 IEM_MC_END();
3981 return VINF_SUCCESS;
3982
3983 case IEMMODE_32BIT:
3984 IEM_MC_BEGIN(0,1);
3985 IEM_MC_LOCAL(uint32_t, u32Tmp);
3986 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3987 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
3988 IEM_MC_ADVANCE_RIP();
3989 IEM_MC_END();
3990 return VINF_SUCCESS;
3991
3992 case IEMMODE_64BIT:
3993 IEM_MC_BEGIN(0,1);
3994 IEM_MC_LOCAL(uint64_t, u64Tmp);
3995 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3996 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
3997 IEM_MC_ADVANCE_RIP();
3998 IEM_MC_END();
3999 return VINF_SUCCESS;
4000
4001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4002 }
4003}
4004
4005
4006/** Opcode 0xa2. */
4007FNIEMOP_DEF(iemOp_mov_Ob_AL)
4008{
4009 /*
4010 * Get the offset and fend of lock prefixes.
4011 */
4012 RTGCPTR GCPtrMemOff;
4013 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4014
4015 /*
4016 * Store AL.
4017 */
4018 IEM_MC_BEGIN(0,1);
4019 IEM_MC_LOCAL(uint8_t, u8Tmp);
4020 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4021 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4022 IEM_MC_ADVANCE_RIP();
4023 IEM_MC_END();
4024 return VINF_SUCCESS;
4025}
4026
4027
4028/** Opcode 0xa3. */
4029FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4030{
4031 /*
4032 * Get the offset and fend of lock prefixes.
4033 */
4034 RTGCPTR GCPtrMemOff;
4035 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4036
4037 /*
4038 * Store rAX.
4039 */
4040 switch (pVCpu->iem.s.enmEffOpSize)
4041 {
4042 case IEMMODE_16BIT:
4043 IEM_MC_BEGIN(0,1);
4044 IEM_MC_LOCAL(uint16_t, u16Tmp);
4045 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4046 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4047 IEM_MC_ADVANCE_RIP();
4048 IEM_MC_END();
4049 return VINF_SUCCESS;
4050
4051 case IEMMODE_32BIT:
4052 IEM_MC_BEGIN(0,1);
4053 IEM_MC_LOCAL(uint32_t, u32Tmp);
4054 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4055 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4056 IEM_MC_ADVANCE_RIP();
4057 IEM_MC_END();
4058 return VINF_SUCCESS;
4059
4060 case IEMMODE_64BIT:
4061 IEM_MC_BEGIN(0,1);
4062 IEM_MC_LOCAL(uint64_t, u64Tmp);
4063 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4064 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4065 IEM_MC_ADVANCE_RIP();
4066 IEM_MC_END();
4067 return VINF_SUCCESS;
4068
4069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4070 }
4071}
4072
4073/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4074#define IEM_MOVS_CASE(ValBits, AddrBits) \
4075 IEM_MC_BEGIN(0, 2); \
4076 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4077 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4078 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4079 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4080 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4081 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4082 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4083 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4084 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4085 } IEM_MC_ELSE() { \
4086 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4087 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4088 } IEM_MC_ENDIF(); \
4089 IEM_MC_ADVANCE_RIP(); \
4090 IEM_MC_END();
4091
4092/** Opcode 0xa4. */
4093FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4094{
4095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4096
4097 /*
4098 * Use the C implementation if a repeat prefix is encountered.
4099 */
4100 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4101 {
4102 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4103 switch (pVCpu->iem.s.enmEffAddrMode)
4104 {
4105 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4106 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4107 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4108 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4109 }
4110 }
4111 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4112
4113 /*
4114 * Sharing case implementation with movs[wdq] below.
4115 */
4116 switch (pVCpu->iem.s.enmEffAddrMode)
4117 {
4118 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4119 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4120 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4122 }
4123 return VINF_SUCCESS;
4124}
4125
4126
4127/** Opcode 0xa5. */
4128FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4129{
4130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4131
4132 /*
4133 * Use the C implementation if a repeat prefix is encountered.
4134 */
4135 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4136 {
4137 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4138 switch (pVCpu->iem.s.enmEffOpSize)
4139 {
4140 case IEMMODE_16BIT:
4141 switch (pVCpu->iem.s.enmEffAddrMode)
4142 {
4143 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4144 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4145 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4147 }
4148 break;
4149 case IEMMODE_32BIT:
4150 switch (pVCpu->iem.s.enmEffAddrMode)
4151 {
4152 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4153 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4154 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4156 }
4157 case IEMMODE_64BIT:
4158 switch (pVCpu->iem.s.enmEffAddrMode)
4159 {
4160 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4161 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4162 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4164 }
4165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4166 }
4167 }
4168 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4169
4170 /*
4171 * Annoying double switch here.
4172 * Using ugly macro for implementing the cases, sharing it with movsb.
4173 */
4174 switch (pVCpu->iem.s.enmEffOpSize)
4175 {
4176 case IEMMODE_16BIT:
4177 switch (pVCpu->iem.s.enmEffAddrMode)
4178 {
4179 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4180 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4181 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4183 }
4184 break;
4185
4186 case IEMMODE_32BIT:
4187 switch (pVCpu->iem.s.enmEffAddrMode)
4188 {
4189 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4190 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4191 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
4192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4193 }
4194 break;
4195
4196 case IEMMODE_64BIT:
4197 switch (pVCpu->iem.s.enmEffAddrMode)
4198 {
4199 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4200 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
4201 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
4202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4203 }
4204 break;
4205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4206 }
4207 return VINF_SUCCESS;
4208}
4209
4210#undef IEM_MOVS_CASE
4211
4212/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
4213#define IEM_CMPS_CASE(ValBits, AddrBits) \
4214 IEM_MC_BEGIN(3, 3); \
4215 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
4216 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
4217 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4218 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
4219 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4220 \
4221 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4222 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
4223 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4224 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
4225 IEM_MC_REF_LOCAL(puValue1, uValue1); \
4226 IEM_MC_REF_EFLAGS(pEFlags); \
4227 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
4228 \
4229 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4230 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4231 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4232 } IEM_MC_ELSE() { \
4233 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4234 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4235 } IEM_MC_ENDIF(); \
4236 IEM_MC_ADVANCE_RIP(); \
4237 IEM_MC_END(); \
4238
4239/** Opcode 0xa6. */
4240FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
4241{
4242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4243
4244 /*
4245 * Use the C implementation if a repeat prefix is encountered.
4246 */
4247 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4248 {
4249 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
4250 switch (pVCpu->iem.s.enmEffAddrMode)
4251 {
4252 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4253 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4254 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4256 }
4257 }
4258 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4259 {
4260 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
4261 switch (pVCpu->iem.s.enmEffAddrMode)
4262 {
4263 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4264 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4265 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4267 }
4268 }
4269 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
4270
4271 /*
4272 * Sharing case implementation with cmps[wdq] below.
4273 */
4274 switch (pVCpu->iem.s.enmEffAddrMode)
4275 {
4276 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
4277 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
4278 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
4279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4280 }
4281 return VINF_SUCCESS;
4282
4283}
4284
4285
4286/** Opcode 0xa7. */
4287FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
4288{
4289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4290
4291 /*
4292 * Use the C implementation if a repeat prefix is encountered.
4293 */
4294 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4295 {
4296 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
4297 switch (pVCpu->iem.s.enmEffOpSize)
4298 {
4299 case IEMMODE_16BIT:
4300 switch (pVCpu->iem.s.enmEffAddrMode)
4301 {
4302 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4303 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4304 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4306 }
4307 break;
4308 case IEMMODE_32BIT:
4309 switch (pVCpu->iem.s.enmEffAddrMode)
4310 {
4311 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4312 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4313 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4315 }
4316 case IEMMODE_64BIT:
4317 switch (pVCpu->iem.s.enmEffAddrMode)
4318 {
4319 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
4320 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4321 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4323 }
4324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4325 }
4326 }
4327
4328 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4329 {
4330 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
4331 switch (pVCpu->iem.s.enmEffOpSize)
4332 {
4333 case IEMMODE_16BIT:
4334 switch (pVCpu->iem.s.enmEffAddrMode)
4335 {
4336 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4337 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4338 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4340 }
4341 break;
4342 case IEMMODE_32BIT:
4343 switch (pVCpu->iem.s.enmEffAddrMode)
4344 {
4345 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4346 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4347 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4349 }
4350 case IEMMODE_64BIT:
4351 switch (pVCpu->iem.s.enmEffAddrMode)
4352 {
4353 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
4354 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4355 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4357 }
4358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4359 }
4360 }
4361
4362 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
4363
4364 /*
4365 * Annoying double switch here.
4366 * Using ugly macro for implementing the cases, sharing it with cmpsb.
4367 */
4368 switch (pVCpu->iem.s.enmEffOpSize)
4369 {
4370 case IEMMODE_16BIT:
4371 switch (pVCpu->iem.s.enmEffAddrMode)
4372 {
4373 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
4374 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
4375 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
4376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4377 }
4378 break;
4379
4380 case IEMMODE_32BIT:
4381 switch (pVCpu->iem.s.enmEffAddrMode)
4382 {
4383 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
4384 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
4385 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
4386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4387 }
4388 break;
4389
4390 case IEMMODE_64BIT:
4391 switch (pVCpu->iem.s.enmEffAddrMode)
4392 {
4393 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4394 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
4395 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
4396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4397 }
4398 break;
4399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4400 }
4401 return VINF_SUCCESS;
4402
4403}
4404
4405#undef IEM_CMPS_CASE
4406
4407/** Opcode 0xa8. */
4408FNIEMOP_DEF(iemOp_test_AL_Ib)
4409{
4410 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
4411 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4412 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
4413}
4414
4415
4416/** Opcode 0xa9. */
4417FNIEMOP_DEF(iemOp_test_eAX_Iz)
4418{
4419 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
4420 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4421 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
4422}
4423
4424
4425/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
4426#define IEM_STOS_CASE(ValBits, AddrBits) \
4427 IEM_MC_BEGIN(0, 2); \
4428 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4429 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4430 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
4431 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4432 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4433 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4434 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4435 } IEM_MC_ELSE() { \
4436 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4437 } IEM_MC_ENDIF(); \
4438 IEM_MC_ADVANCE_RIP(); \
4439 IEM_MC_END(); \
4440
4441/** Opcode 0xaa. */
4442FNIEMOP_DEF(iemOp_stosb_Yb_AL)
4443{
4444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4445
4446 /*
4447 * Use the C implementation if a repeat prefix is encountered.
4448 */
4449 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4450 {
4451 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
4452 switch (pVCpu->iem.s.enmEffAddrMode)
4453 {
4454 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
4455 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
4456 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
4457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4458 }
4459 }
4460 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
4461
4462 /*
4463 * Sharing case implementation with stos[wdq] below.
4464 */
4465 switch (pVCpu->iem.s.enmEffAddrMode)
4466 {
4467 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
4468 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
4469 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
4470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4471 }
4472 return VINF_SUCCESS;
4473}
4474
4475
4476/** Opcode 0xab. */
4477FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
4478{
4479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4480
4481 /*
4482 * Use the C implementation if a repeat prefix is encountered.
4483 */
4484 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4485 {
4486 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
4487 switch (pVCpu->iem.s.enmEffOpSize)
4488 {
4489 case IEMMODE_16BIT:
4490 switch (pVCpu->iem.s.enmEffAddrMode)
4491 {
4492 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
4493 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
4494 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
4495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4496 }
4497 break;
4498 case IEMMODE_32BIT:
4499 switch (pVCpu->iem.s.enmEffAddrMode)
4500 {
4501 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
4502 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
4503 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
4504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4505 }
4506 case IEMMODE_64BIT:
4507 switch (pVCpu->iem.s.enmEffAddrMode)
4508 {
4509 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
4510 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
4511 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
4512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4513 }
4514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4515 }
4516 }
4517 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
4518
4519 /*
4520 * Annoying double switch here.
4521 * Using ugly macro for implementing the cases, sharing it with stosb.
4522 */
4523 switch (pVCpu->iem.s.enmEffOpSize)
4524 {
4525 case IEMMODE_16BIT:
4526 switch (pVCpu->iem.s.enmEffAddrMode)
4527 {
4528 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
4529 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
4530 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
4531 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4532 }
4533 break;
4534
4535 case IEMMODE_32BIT:
4536 switch (pVCpu->iem.s.enmEffAddrMode)
4537 {
4538 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
4539 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
4540 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
4541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4542 }
4543 break;
4544
4545 case IEMMODE_64BIT:
4546 switch (pVCpu->iem.s.enmEffAddrMode)
4547 {
4548 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4549 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
4550 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
4551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4552 }
4553 break;
4554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4555 }
4556 return VINF_SUCCESS;
4557}
4558
4559#undef IEM_STOS_CASE
4560
4561/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
4562#define IEM_LODS_CASE(ValBits, AddrBits) \
4563 IEM_MC_BEGIN(0, 2); \
4564 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4565 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4566 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4567 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4568 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
4569 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4570 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4571 } IEM_MC_ELSE() { \
4572 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4573 } IEM_MC_ENDIF(); \
4574 IEM_MC_ADVANCE_RIP(); \
4575 IEM_MC_END();
4576
4577/** Opcode 0xac. */
4578FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
4579{
4580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4581
4582 /*
4583 * Use the C implementation if a repeat prefix is encountered.
4584 */
4585 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4586 {
4587 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
4588 switch (pVCpu->iem.s.enmEffAddrMode)
4589 {
4590 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
4591 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
4592 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
4593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4594 }
4595 }
4596 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
4597
4598 /*
4599 * Sharing case implementation with stos[wdq] below.
4600 */
4601 switch (pVCpu->iem.s.enmEffAddrMode)
4602 {
4603 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
4604 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
4605 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
4606 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4607 }
4608 return VINF_SUCCESS;
4609}
4610
4611
4612/** Opcode 0xad. */
4613FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
4614{
4615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4616
4617 /*
4618 * Use the C implementation if a repeat prefix is encountered.
4619 */
4620 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4621 {
4622 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
4623 switch (pVCpu->iem.s.enmEffOpSize)
4624 {
4625 case IEMMODE_16BIT:
4626 switch (pVCpu->iem.s.enmEffAddrMode)
4627 {
4628 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
4629 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
4630 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
4631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4632 }
4633 break;
4634 case IEMMODE_32BIT:
4635 switch (pVCpu->iem.s.enmEffAddrMode)
4636 {
4637 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
4638 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
4639 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
4640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4641 }
4642 case IEMMODE_64BIT:
4643 switch (pVCpu->iem.s.enmEffAddrMode)
4644 {
4645 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
4646 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
4647 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
4648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4649 }
4650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4651 }
4652 }
4653 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
4654
4655 /*
4656 * Annoying double switch here.
4657 * Using ugly macro for implementing the cases, sharing it with lodsb.
4658 */
4659 switch (pVCpu->iem.s.enmEffOpSize)
4660 {
4661 case IEMMODE_16BIT:
4662 switch (pVCpu->iem.s.enmEffAddrMode)
4663 {
4664 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
4665 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
4666 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
4667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4668 }
4669 break;
4670
4671 case IEMMODE_32BIT:
4672 switch (pVCpu->iem.s.enmEffAddrMode)
4673 {
4674 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
4675 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
4676 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
4677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4678 }
4679 break;
4680
4681 case IEMMODE_64BIT:
4682 switch (pVCpu->iem.s.enmEffAddrMode)
4683 {
4684 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4685 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
4686 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
4687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4688 }
4689 break;
4690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4691 }
4692 return VINF_SUCCESS;
4693}
4694
4695#undef IEM_LODS_CASE
4696
4697/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
4698#define IEM_SCAS_CASE(ValBits, AddrBits) \
4699 IEM_MC_BEGIN(3, 2); \
4700 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
4701 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
4702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4703 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4704 \
4705 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4706 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
4707 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
4708 IEM_MC_REF_EFLAGS(pEFlags); \
4709 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
4710 \
4711 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4712 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4713 } IEM_MC_ELSE() { \
4714 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4715 } IEM_MC_ENDIF(); \
4716 IEM_MC_ADVANCE_RIP(); \
4717 IEM_MC_END();
4718
4719/** Opcode 0xae. */
4720FNIEMOP_DEF(iemOp_scasb_AL_Xb)
4721{
4722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4723
4724 /*
4725 * Use the C implementation if a repeat prefix is encountered.
4726 */
4727 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4728 {
4729 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
4730 switch (pVCpu->iem.s.enmEffAddrMode)
4731 {
4732 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
4733 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
4734 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
4735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4736 }
4737 }
4738 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4739 {
4740 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
4741 switch (pVCpu->iem.s.enmEffAddrMode)
4742 {
4743 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
4744 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
4745 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
4746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4747 }
4748 }
4749 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
4750
4751 /*
4752 * Sharing case implementation with stos[wdq] below.
4753 */
4754 switch (pVCpu->iem.s.enmEffAddrMode)
4755 {
4756 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
4757 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
4758 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
4759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4760 }
4761 return VINF_SUCCESS;
4762}
4763
4764
4765/** Opcode 0xaf. */
4766FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
4767{
4768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4769
4770 /*
4771 * Use the C implementation if a repeat prefix is encountered.
4772 */
4773 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4774 {
4775 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
4776 switch (pVCpu->iem.s.enmEffOpSize)
4777 {
4778 case IEMMODE_16BIT:
4779 switch (pVCpu->iem.s.enmEffAddrMode)
4780 {
4781 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
4782 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
4783 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
4784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4785 }
4786 break;
4787 case IEMMODE_32BIT:
4788 switch (pVCpu->iem.s.enmEffAddrMode)
4789 {
4790 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
4791 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
4792 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
4793 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4794 }
4795 case IEMMODE_64BIT:
4796 switch (pVCpu->iem.s.enmEffAddrMode)
4797 {
4798 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
4799 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
4800 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
4801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4802 }
4803 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4804 }
4805 }
4806 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4807 {
4808 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
4809 switch (pVCpu->iem.s.enmEffOpSize)
4810 {
4811 case IEMMODE_16BIT:
4812 switch (pVCpu->iem.s.enmEffAddrMode)
4813 {
4814 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
4815 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
4816 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
4817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4818 }
4819 break;
4820 case IEMMODE_32BIT:
4821 switch (pVCpu->iem.s.enmEffAddrMode)
4822 {
4823 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
4824 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
4825 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
4826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4827 }
4828 case IEMMODE_64BIT:
4829 switch (pVCpu->iem.s.enmEffAddrMode)
4830 {
4831 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
4832 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
4833 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
4834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4835 }
4836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4837 }
4838 }
4839 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
4840
4841 /*
4842 * Annoying double switch here.
4843 * Using ugly macro for implementing the cases, sharing it with scasb.
4844 */
4845 switch (pVCpu->iem.s.enmEffOpSize)
4846 {
4847 case IEMMODE_16BIT:
4848 switch (pVCpu->iem.s.enmEffAddrMode)
4849 {
4850 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
4851 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
4852 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
4853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4854 }
4855 break;
4856
4857 case IEMMODE_32BIT:
4858 switch (pVCpu->iem.s.enmEffAddrMode)
4859 {
4860 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
4861 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
4862 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
4863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4864 }
4865 break;
4866
4867 case IEMMODE_64BIT:
4868 switch (pVCpu->iem.s.enmEffAddrMode)
4869 {
4870 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4871 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
4872 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
4873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4874 }
4875 break;
4876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4877 }
4878 return VINF_SUCCESS;
4879}
4880
4881#undef IEM_SCAS_CASE
4882
4883/**
4884 * Common 'mov r8, imm8' helper.
4885 */
4886FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
4887{
4888 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
4889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4890
4891 IEM_MC_BEGIN(0, 1);
4892 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
4893 IEM_MC_STORE_GREG_U8(iReg, u8Value);
4894 IEM_MC_ADVANCE_RIP();
4895 IEM_MC_END();
4896
4897 return VINF_SUCCESS;
4898}
4899
4900
4901/** Opcode 0xb0. */
4902FNIEMOP_DEF(iemOp_mov_AL_Ib)
4903{
4904 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
4905 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
4906}
4907
4908
4909/** Opcode 0xb1. */
4910FNIEMOP_DEF(iemOp_CL_Ib)
4911{
4912 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
4913 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
4914}
4915
4916
4917/** Opcode 0xb2. */
4918FNIEMOP_DEF(iemOp_DL_Ib)
4919{
4920 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
4921 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
4922}
4923
4924
4925/** Opcode 0xb3. */
4926FNIEMOP_DEF(iemOp_BL_Ib)
4927{
4928 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
4929 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
4930}
4931
4932
4933/** Opcode 0xb4. */
4934FNIEMOP_DEF(iemOp_mov_AH_Ib)
4935{
4936 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
4937 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
4938}
4939
4940
4941/** Opcode 0xb5. */
4942FNIEMOP_DEF(iemOp_CH_Ib)
4943{
4944 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
4945 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
4946}
4947
4948
4949/** Opcode 0xb6. */
4950FNIEMOP_DEF(iemOp_DH_Ib)
4951{
4952 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
4953 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
4954}
4955
4956
4957/** Opcode 0xb7. */
4958FNIEMOP_DEF(iemOp_BH_Ib)
4959{
4960 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
4961 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
4962}
4963
4964
4965/**
4966 * Common 'mov regX,immX' helper.
4967 */
4968FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
4969{
4970 switch (pVCpu->iem.s.enmEffOpSize)
4971 {
4972 case IEMMODE_16BIT:
4973 {
4974 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
4975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4976
4977 IEM_MC_BEGIN(0, 1);
4978 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
4979 IEM_MC_STORE_GREG_U16(iReg, u16Value);
4980 IEM_MC_ADVANCE_RIP();
4981 IEM_MC_END();
4982 break;
4983 }
4984
4985 case IEMMODE_32BIT:
4986 {
4987 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
4988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4989
4990 IEM_MC_BEGIN(0, 1);
4991 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
4992 IEM_MC_STORE_GREG_U32(iReg, u32Value);
4993 IEM_MC_ADVANCE_RIP();
4994 IEM_MC_END();
4995 break;
4996 }
4997 case IEMMODE_64BIT:
4998 {
4999 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5001
5002 IEM_MC_BEGIN(0, 1);
5003 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5004 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5005 IEM_MC_ADVANCE_RIP();
5006 IEM_MC_END();
5007 break;
5008 }
5009 }
5010
5011 return VINF_SUCCESS;
5012}
5013
5014
5015/** Opcode 0xb8. */
5016FNIEMOP_DEF(iemOp_eAX_Iv)
5017{
5018 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5019 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5020}
5021
5022
5023/** Opcode 0xb9. */
5024FNIEMOP_DEF(iemOp_eCX_Iv)
5025{
5026 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5027 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5028}
5029
5030
5031/** Opcode 0xba. */
5032FNIEMOP_DEF(iemOp_eDX_Iv)
5033{
5034 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5035 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5036}
5037
5038
5039/** Opcode 0xbb. */
5040FNIEMOP_DEF(iemOp_eBX_Iv)
5041{
5042 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5043 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5044}
5045
5046
5047/** Opcode 0xbc. */
5048FNIEMOP_DEF(iemOp_eSP_Iv)
5049{
5050 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5051 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5052}
5053
5054
5055/** Opcode 0xbd. */
5056FNIEMOP_DEF(iemOp_eBP_Iv)
5057{
5058 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5059 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5060}
5061
5062
5063/** Opcode 0xbe. */
5064FNIEMOP_DEF(iemOp_eSI_Iv)
5065{
5066 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5067 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5068}
5069
5070
5071/** Opcode 0xbf. */
5072FNIEMOP_DEF(iemOp_eDI_Iv)
5073{
5074 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5075 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5076}
5077
5078
5079/** Opcode 0xc0. */
5080FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5081{
5082 IEMOP_HLP_MIN_186();
5083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5084 PCIEMOPSHIFTSIZES pImpl;
5085 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5086 {
5087 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5088 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5089 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5090 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5091 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5092 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5093 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5094 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5095 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5096 }
5097 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5098
5099 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5100 {
5101 /* register */
5102 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5104 IEM_MC_BEGIN(3, 0);
5105 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5106 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5107 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5108 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5109 IEM_MC_REF_EFLAGS(pEFlags);
5110 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5111 IEM_MC_ADVANCE_RIP();
5112 IEM_MC_END();
5113 }
5114 else
5115 {
5116 /* memory */
5117 IEM_MC_BEGIN(3, 2);
5118 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5119 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5120 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5122
5123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5124 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5125 IEM_MC_ASSIGN(cShiftArg, cShift);
5126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5127 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5128 IEM_MC_FETCH_EFLAGS(EFlags);
5129 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5130
5131 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5132 IEM_MC_COMMIT_EFLAGS(EFlags);
5133 IEM_MC_ADVANCE_RIP();
5134 IEM_MC_END();
5135 }
5136 return VINF_SUCCESS;
5137}
5138
5139
5140/** Opcode 0xc1. */
5141FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
5142{
5143 IEMOP_HLP_MIN_186();
5144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5145 PCIEMOPSHIFTSIZES pImpl;
5146 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5147 {
5148 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
5149 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
5150 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
5151 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
5152 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
5153 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
5154 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
5155 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5156 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5157 }
5158 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5159
5160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5161 {
5162 /* register */
5163 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5165 switch (pVCpu->iem.s.enmEffOpSize)
5166 {
5167 case IEMMODE_16BIT:
5168 IEM_MC_BEGIN(3, 0);
5169 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5170 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5171 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5172 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5173 IEM_MC_REF_EFLAGS(pEFlags);
5174 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5175 IEM_MC_ADVANCE_RIP();
5176 IEM_MC_END();
5177 return VINF_SUCCESS;
5178
5179 case IEMMODE_32BIT:
5180 IEM_MC_BEGIN(3, 0);
5181 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5182 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5183 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5184 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5185 IEM_MC_REF_EFLAGS(pEFlags);
5186 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5187 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5188 IEM_MC_ADVANCE_RIP();
5189 IEM_MC_END();
5190 return VINF_SUCCESS;
5191
5192 case IEMMODE_64BIT:
5193 IEM_MC_BEGIN(3, 0);
5194 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5195 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5196 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5197 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5198 IEM_MC_REF_EFLAGS(pEFlags);
5199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5200 IEM_MC_ADVANCE_RIP();
5201 IEM_MC_END();
5202 return VINF_SUCCESS;
5203
5204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5205 }
5206 }
5207 else
5208 {
5209 /* memory */
5210 switch (pVCpu->iem.s.enmEffOpSize)
5211 {
5212 case IEMMODE_16BIT:
5213 IEM_MC_BEGIN(3, 2);
5214 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5215 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5216 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5218
5219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5220 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5221 IEM_MC_ASSIGN(cShiftArg, cShift);
5222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5223 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5224 IEM_MC_FETCH_EFLAGS(EFlags);
5225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5226
5227 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5228 IEM_MC_COMMIT_EFLAGS(EFlags);
5229 IEM_MC_ADVANCE_RIP();
5230 IEM_MC_END();
5231 return VINF_SUCCESS;
5232
5233 case IEMMODE_32BIT:
5234 IEM_MC_BEGIN(3, 2);
5235 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5236 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5237 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5239
5240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5241 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5242 IEM_MC_ASSIGN(cShiftArg, cShift);
5243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5244 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5245 IEM_MC_FETCH_EFLAGS(EFlags);
5246 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5247
5248 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5249 IEM_MC_COMMIT_EFLAGS(EFlags);
5250 IEM_MC_ADVANCE_RIP();
5251 IEM_MC_END();
5252 return VINF_SUCCESS;
5253
5254 case IEMMODE_64BIT:
5255 IEM_MC_BEGIN(3, 2);
5256 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5257 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5258 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5260
5261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5262 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5263 IEM_MC_ASSIGN(cShiftArg, cShift);
5264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5265 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5266 IEM_MC_FETCH_EFLAGS(EFlags);
5267 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5268
5269 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5270 IEM_MC_COMMIT_EFLAGS(EFlags);
5271 IEM_MC_ADVANCE_RIP();
5272 IEM_MC_END();
5273 return VINF_SUCCESS;
5274
5275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5276 }
5277 }
5278}
5279
5280
5281/** Opcode 0xc2. */
5282FNIEMOP_DEF(iemOp_retn_Iw)
5283{
5284 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
5285 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5287 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5288 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
5289}
5290
5291
5292/** Opcode 0xc3. */
5293FNIEMOP_DEF(iemOp_retn)
5294{
5295 IEMOP_MNEMONIC(retn, "retn");
5296 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5298 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
5299}
5300
5301
5302/** Opcode 0xc4. */
5303FNIEMOP_DEF(iemOp_les_Gv_Mp__vex2)
5304{
5305 /* The LES instruction is invalid 64-bit mode. In legacy and
5306 compatability mode it is invalid with MOD=3.
5307 The use as a VEX prefix is made possible by assigning the inverted
5308 REX.R to the top MOD bit, and the top bit in the inverted register
5309 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
5310 to accessing registers 0..7 in this VEX form. */
5311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5312 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5313 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5314 {
5315 IEMOP_MNEMONIC(vex2_prefix, "vex2");
5316 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5317 {
5318 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5319 if ( ( pVCpu->iem.s.fPrefixes
5320 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5321 == 0)
5322 {
5323 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5324 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5325 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
5326 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
5327 pVCpu->iem.s.idxPrefix = bRm & 0x3;
5328
5329 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5330 }
5331
5332 Log(("VEX2: Invalid prefix mix!\n"));
5333 }
5334 else
5335 Log(("VEX2: AVX support disabled!\n"));
5336
5337 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
5338 return IEMOP_RAISE_INVALID_OPCODE();
5339 }
5340 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
5341 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
5342}
5343
5344
5345/** Opcode 0xc5. */
5346FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex3)
5347{
5348 /* The LDS instruction is invalid 64-bit mode. In legacy and
5349 compatability mode it is invalid with MOD=3.
5350 The use as a VEX prefix is made possible by assigning the inverted
5351 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
5352 outside of 64-bit mode. VEX is not available in real or v86 mode. */
5353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5354 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
5355 {
5356 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5357 {
5358 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
5359 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
5360 }
5361 IEMOP_HLP_NO_REAL_OR_V86_MODE();
5362 }
5363
5364 IEMOP_MNEMONIC(vex3_prefix, "vex3");
5365 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5366 {
5367 /** @todo Test when exctly the VEX conformance checks kick in during
5368 * instruction decoding and fetching (using \#PF). */
5369 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
5370 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5371 if ( ( pVCpu->iem.s.fPrefixes
5372 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5373 == 0)
5374 {
5375 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5376 if (bVex2 & 0x80 /* VEX.W */)
5377 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5378 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5379 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
5380 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
5381 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
5382 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
5383 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
5384
5385 switch (bRm & 0x1f)
5386 {
5387 case 1: /* 0x0f lead opcode byte. */
5388 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5389
5390 case 2: /* 0x0f 0x38 lead opcode bytes. */
5391 /** @todo VEX: Just use new tables and decoders. */
5392 IEMOP_BITCH_ABOUT_STUB();
5393 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5394
5395 case 3: /* 0x0f 0x3a lead opcode bytes. */
5396 /** @todo VEX: Just use new tables and decoders. */
5397 IEMOP_BITCH_ABOUT_STUB();
5398 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5399
5400 default:
5401 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5402 return IEMOP_RAISE_INVALID_OPCODE();
5403 }
5404 }
5405 else
5406 Log(("VEX3: Invalid prefix mix!\n"));
5407 }
5408 else
5409 Log(("VEX3: AVX support disabled!\n"));
5410 return IEMOP_RAISE_INVALID_OPCODE();
5411}
5412
5413
5414/** Opcode 0xc6. */
5415FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
5416{
5417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5418 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
5419 return IEMOP_RAISE_INVALID_OPCODE();
5420 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
5421
5422 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5423 {
5424 /* register access */
5425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5427 IEM_MC_BEGIN(0, 0);
5428 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
5429 IEM_MC_ADVANCE_RIP();
5430 IEM_MC_END();
5431 }
5432 else
5433 {
5434 /* memory access. */
5435 IEM_MC_BEGIN(0, 1);
5436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5438 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5440 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
5441 IEM_MC_ADVANCE_RIP();
5442 IEM_MC_END();
5443 }
5444 return VINF_SUCCESS;
5445}
5446
5447
5448/** Opcode 0xc7. */
5449FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
5450{
5451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5452 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
5453 return IEMOP_RAISE_INVALID_OPCODE();
5454 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
5455
5456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5457 {
5458 /* register access */
5459 switch (pVCpu->iem.s.enmEffOpSize)
5460 {
5461 case IEMMODE_16BIT:
5462 IEM_MC_BEGIN(0, 0);
5463 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5465 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
5466 IEM_MC_ADVANCE_RIP();
5467 IEM_MC_END();
5468 return VINF_SUCCESS;
5469
5470 case IEMMODE_32BIT:
5471 IEM_MC_BEGIN(0, 0);
5472 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5474 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
5475 IEM_MC_ADVANCE_RIP();
5476 IEM_MC_END();
5477 return VINF_SUCCESS;
5478
5479 case IEMMODE_64BIT:
5480 IEM_MC_BEGIN(0, 0);
5481 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
5482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5483 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
5484 IEM_MC_ADVANCE_RIP();
5485 IEM_MC_END();
5486 return VINF_SUCCESS;
5487
5488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5489 }
5490 }
5491 else
5492 {
5493 /* memory access. */
5494 switch (pVCpu->iem.s.enmEffOpSize)
5495 {
5496 case IEMMODE_16BIT:
5497 IEM_MC_BEGIN(0, 1);
5498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
5500 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5502 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
5503 IEM_MC_ADVANCE_RIP();
5504 IEM_MC_END();
5505 return VINF_SUCCESS;
5506
5507 case IEMMODE_32BIT:
5508 IEM_MC_BEGIN(0, 1);
5509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
5511 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5513 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
5514 IEM_MC_ADVANCE_RIP();
5515 IEM_MC_END();
5516 return VINF_SUCCESS;
5517
5518 case IEMMODE_64BIT:
5519 IEM_MC_BEGIN(0, 1);
5520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
5522 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
5523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5524 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
5525 IEM_MC_ADVANCE_RIP();
5526 IEM_MC_END();
5527 return VINF_SUCCESS;
5528
5529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5530 }
5531 }
5532}
5533
5534
5535
5536
5537/** Opcode 0xc8. */
5538FNIEMOP_DEF(iemOp_enter_Iw_Ib)
5539{
5540 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
5541 IEMOP_HLP_MIN_186();
5542 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5543 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
5544 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
5545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5546 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
5547}
5548
5549
5550/** Opcode 0xc9. */
5551FNIEMOP_DEF(iemOp_leave)
5552{
5553 IEMOP_MNEMONIC(leave, "leave");
5554 IEMOP_HLP_MIN_186();
5555 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5557 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
5558}
5559
5560
5561/** Opcode 0xca. */
5562FNIEMOP_DEF(iemOp_retf_Iw)
5563{
5564 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
5565 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5567 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5568 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
5569}
5570
5571
5572/** Opcode 0xcb. */
5573FNIEMOP_DEF(iemOp_retf)
5574{
5575 IEMOP_MNEMONIC(retf, "retf");
5576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5577 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5578 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
5579}
5580
5581
5582/** Opcode 0xcc. */
5583FNIEMOP_DEF(iemOp_int_3)
5584{
5585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5586 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
5587}
5588
5589
5590/** Opcode 0xcd. */
5591FNIEMOP_DEF(iemOp_int_Ib)
5592{
5593 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
5594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5595 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
5596}
5597
5598
5599/** Opcode 0xce. */
5600FNIEMOP_DEF(iemOp_into)
5601{
5602 IEMOP_MNEMONIC(into, "into");
5603 IEMOP_HLP_NO_64BIT();
5604
5605 IEM_MC_BEGIN(2, 0);
5606 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
5607 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
5608 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
5609 IEM_MC_END();
5610 return VINF_SUCCESS;
5611}
5612
5613
5614/** Opcode 0xcf. */
5615FNIEMOP_DEF(iemOp_iret)
5616{
5617 IEMOP_MNEMONIC(iret, "iret");
5618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5619 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
5620}
5621
5622
5623/** Opcode 0xd0. */
5624FNIEMOP_DEF(iemOp_Grp2_Eb_1)
5625{
5626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5627 PCIEMOPSHIFTSIZES pImpl;
5628 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5629 {
5630 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
5631 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
5632 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
5633 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
5634 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
5635 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
5636 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
5637 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5638 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
5639 }
5640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5641
5642 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5643 {
5644 /* register */
5645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5646 IEM_MC_BEGIN(3, 0);
5647 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5648 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
5649 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5650 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5651 IEM_MC_REF_EFLAGS(pEFlags);
5652 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5653 IEM_MC_ADVANCE_RIP();
5654 IEM_MC_END();
5655 }
5656 else
5657 {
5658 /* memory */
5659 IEM_MC_BEGIN(3, 2);
5660 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5661 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
5662 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5664
5665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5667 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5668 IEM_MC_FETCH_EFLAGS(EFlags);
5669 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5670
5671 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5672 IEM_MC_COMMIT_EFLAGS(EFlags);
5673 IEM_MC_ADVANCE_RIP();
5674 IEM_MC_END();
5675 }
5676 return VINF_SUCCESS;
5677}
5678
5679
5680
5681/** Opcode 0xd1. */
5682FNIEMOP_DEF(iemOp_Grp2_Ev_1)
5683{
5684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5685 PCIEMOPSHIFTSIZES pImpl;
5686 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5687 {
5688 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
5689 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
5690 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
5691 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
5692 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
5693 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
5694 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
5695 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5696 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
5697 }
5698 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5699
5700 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5701 {
5702 /* register */
5703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5704 switch (pVCpu->iem.s.enmEffOpSize)
5705 {
5706 case IEMMODE_16BIT:
5707 IEM_MC_BEGIN(3, 0);
5708 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5709 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5710 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5711 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5712 IEM_MC_REF_EFLAGS(pEFlags);
5713 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5714 IEM_MC_ADVANCE_RIP();
5715 IEM_MC_END();
5716 return VINF_SUCCESS;
5717
5718 case IEMMODE_32BIT:
5719 IEM_MC_BEGIN(3, 0);
5720 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5721 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5722 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5723 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5724 IEM_MC_REF_EFLAGS(pEFlags);
5725 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5726 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5727 IEM_MC_ADVANCE_RIP();
5728 IEM_MC_END();
5729 return VINF_SUCCESS;
5730
5731 case IEMMODE_64BIT:
5732 IEM_MC_BEGIN(3, 0);
5733 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5734 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5735 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5736 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5737 IEM_MC_REF_EFLAGS(pEFlags);
5738 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5739 IEM_MC_ADVANCE_RIP();
5740 IEM_MC_END();
5741 return VINF_SUCCESS;
5742
5743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5744 }
5745 }
5746 else
5747 {
5748 /* memory */
5749 switch (pVCpu->iem.s.enmEffOpSize)
5750 {
5751 case IEMMODE_16BIT:
5752 IEM_MC_BEGIN(3, 2);
5753 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5754 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5755 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5757
5758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5760 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5761 IEM_MC_FETCH_EFLAGS(EFlags);
5762 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5763
5764 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5765 IEM_MC_COMMIT_EFLAGS(EFlags);
5766 IEM_MC_ADVANCE_RIP();
5767 IEM_MC_END();
5768 return VINF_SUCCESS;
5769
5770 case IEMMODE_32BIT:
5771 IEM_MC_BEGIN(3, 2);
5772 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5773 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5774 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5776
5777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5779 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5780 IEM_MC_FETCH_EFLAGS(EFlags);
5781 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5782
5783 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5784 IEM_MC_COMMIT_EFLAGS(EFlags);
5785 IEM_MC_ADVANCE_RIP();
5786 IEM_MC_END();
5787 return VINF_SUCCESS;
5788
5789 case IEMMODE_64BIT:
5790 IEM_MC_BEGIN(3, 2);
5791 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5792 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5793 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5795
5796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5798 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5799 IEM_MC_FETCH_EFLAGS(EFlags);
5800 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5801
5802 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5803 IEM_MC_COMMIT_EFLAGS(EFlags);
5804 IEM_MC_ADVANCE_RIP();
5805 IEM_MC_END();
5806 return VINF_SUCCESS;
5807
5808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5809 }
5810 }
5811}
5812
5813
5814/** Opcode 0xd2. */
5815FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
5816{
5817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5818 PCIEMOPSHIFTSIZES pImpl;
5819 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5820 {
5821 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
5822 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
5823 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
5824 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
5825 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
5826 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
5827 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
5828 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5829 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
5830 }
5831 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5832
5833 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5834 {
5835 /* register */
5836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5837 IEM_MC_BEGIN(3, 0);
5838 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5839 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5840 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5841 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5842 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5843 IEM_MC_REF_EFLAGS(pEFlags);
5844 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5845 IEM_MC_ADVANCE_RIP();
5846 IEM_MC_END();
5847 }
5848 else
5849 {
5850 /* memory */
5851 IEM_MC_BEGIN(3, 2);
5852 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5853 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5854 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5856
5857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5859 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5860 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5861 IEM_MC_FETCH_EFLAGS(EFlags);
5862 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5863
5864 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5865 IEM_MC_COMMIT_EFLAGS(EFlags);
5866 IEM_MC_ADVANCE_RIP();
5867 IEM_MC_END();
5868 }
5869 return VINF_SUCCESS;
5870}
5871
5872
5873/** Opcode 0xd3. */
5874FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
5875{
5876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5877 PCIEMOPSHIFTSIZES pImpl;
5878 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5879 {
5880 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
5881 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
5882 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
5883 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
5884 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
5885 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
5886 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
5887 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5888 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5889 }
5890 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5891
5892 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5893 {
5894 /* register */
5895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5896 switch (pVCpu->iem.s.enmEffOpSize)
5897 {
5898 case IEMMODE_16BIT:
5899 IEM_MC_BEGIN(3, 0);
5900 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5901 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5903 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5904 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5905 IEM_MC_REF_EFLAGS(pEFlags);
5906 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5907 IEM_MC_ADVANCE_RIP();
5908 IEM_MC_END();
5909 return VINF_SUCCESS;
5910
5911 case IEMMODE_32BIT:
5912 IEM_MC_BEGIN(3, 0);
5913 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5914 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5915 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5916 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5917 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5918 IEM_MC_REF_EFLAGS(pEFlags);
5919 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5920 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5921 IEM_MC_ADVANCE_RIP();
5922 IEM_MC_END();
5923 return VINF_SUCCESS;
5924
5925 case IEMMODE_64BIT:
5926 IEM_MC_BEGIN(3, 0);
5927 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5928 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5929 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5930 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5931 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5932 IEM_MC_REF_EFLAGS(pEFlags);
5933 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5934 IEM_MC_ADVANCE_RIP();
5935 IEM_MC_END();
5936 return VINF_SUCCESS;
5937
5938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5939 }
5940 }
5941 else
5942 {
5943 /* memory */
5944 switch (pVCpu->iem.s.enmEffOpSize)
5945 {
5946 case IEMMODE_16BIT:
5947 IEM_MC_BEGIN(3, 2);
5948 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5949 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5950 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5952
5953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5955 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5956 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5957 IEM_MC_FETCH_EFLAGS(EFlags);
5958 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5959
5960 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5961 IEM_MC_COMMIT_EFLAGS(EFlags);
5962 IEM_MC_ADVANCE_RIP();
5963 IEM_MC_END();
5964 return VINF_SUCCESS;
5965
5966 case IEMMODE_32BIT:
5967 IEM_MC_BEGIN(3, 2);
5968 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5969 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5970 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5972
5973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5975 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5976 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5977 IEM_MC_FETCH_EFLAGS(EFlags);
5978 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5979
5980 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5981 IEM_MC_COMMIT_EFLAGS(EFlags);
5982 IEM_MC_ADVANCE_RIP();
5983 IEM_MC_END();
5984 return VINF_SUCCESS;
5985
5986 case IEMMODE_64BIT:
5987 IEM_MC_BEGIN(3, 2);
5988 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5989 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5990 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5992
5993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5995 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5996 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5997 IEM_MC_FETCH_EFLAGS(EFlags);
5998 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5999
6000 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6001 IEM_MC_COMMIT_EFLAGS(EFlags);
6002 IEM_MC_ADVANCE_RIP();
6003 IEM_MC_END();
6004 return VINF_SUCCESS;
6005
6006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6007 }
6008 }
6009}
6010
6011/** Opcode 0xd4. */
6012FNIEMOP_DEF(iemOp_aam_Ib)
6013{
6014 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6015 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6017 IEMOP_HLP_NO_64BIT();
6018 if (!bImm)
6019 return IEMOP_RAISE_DIVIDE_ERROR();
6020 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6021}
6022
6023
6024/** Opcode 0xd5. */
6025FNIEMOP_DEF(iemOp_aad_Ib)
6026{
6027 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6028 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6030 IEMOP_HLP_NO_64BIT();
6031 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6032}
6033
6034
6035/** Opcode 0xd6. */
6036FNIEMOP_DEF(iemOp_salc)
6037{
6038 IEMOP_MNEMONIC(salc, "salc");
6039 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6040 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6042 IEMOP_HLP_NO_64BIT();
6043
6044 IEM_MC_BEGIN(0, 0);
6045 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6046 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6047 } IEM_MC_ELSE() {
6048 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6049 } IEM_MC_ENDIF();
6050 IEM_MC_ADVANCE_RIP();
6051 IEM_MC_END();
6052 return VINF_SUCCESS;
6053}
6054
6055
6056/** Opcode 0xd7. */
6057FNIEMOP_DEF(iemOp_xlat)
6058{
6059 IEMOP_MNEMONIC(xlat, "xlat");
6060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6061 switch (pVCpu->iem.s.enmEffAddrMode)
6062 {
6063 case IEMMODE_16BIT:
6064 IEM_MC_BEGIN(2, 0);
6065 IEM_MC_LOCAL(uint8_t, u8Tmp);
6066 IEM_MC_LOCAL(uint16_t, u16Addr);
6067 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6068 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6069 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6070 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6071 IEM_MC_ADVANCE_RIP();
6072 IEM_MC_END();
6073 return VINF_SUCCESS;
6074
6075 case IEMMODE_32BIT:
6076 IEM_MC_BEGIN(2, 0);
6077 IEM_MC_LOCAL(uint8_t, u8Tmp);
6078 IEM_MC_LOCAL(uint32_t, u32Addr);
6079 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6080 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6081 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6082 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6083 IEM_MC_ADVANCE_RIP();
6084 IEM_MC_END();
6085 return VINF_SUCCESS;
6086
6087 case IEMMODE_64BIT:
6088 IEM_MC_BEGIN(2, 0);
6089 IEM_MC_LOCAL(uint8_t, u8Tmp);
6090 IEM_MC_LOCAL(uint64_t, u64Addr);
6091 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
6092 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
6093 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
6094 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6095 IEM_MC_ADVANCE_RIP();
6096 IEM_MC_END();
6097 return VINF_SUCCESS;
6098
6099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6100 }
6101}
6102
6103
6104/**
6105 * Common worker for FPU instructions working on ST0 and STn, and storing the
6106 * result in ST0.
6107 *
6108 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6109 */
6110FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6111{
6112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6113
6114 IEM_MC_BEGIN(3, 1);
6115 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6116 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6117 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6118 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6119
6120 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6121 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6122 IEM_MC_PREPARE_FPU_USAGE();
6123 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6124 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6125 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6126 IEM_MC_ELSE()
6127 IEM_MC_FPU_STACK_UNDERFLOW(0);
6128 IEM_MC_ENDIF();
6129 IEM_MC_ADVANCE_RIP();
6130
6131 IEM_MC_END();
6132 return VINF_SUCCESS;
6133}
6134
6135
6136/**
6137 * Common worker for FPU instructions working on ST0 and STn, and only affecting
6138 * flags.
6139 *
6140 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6141 */
6142FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6143{
6144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6145
6146 IEM_MC_BEGIN(3, 1);
6147 IEM_MC_LOCAL(uint16_t, u16Fsw);
6148 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6149 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6150 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6151
6152 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6153 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6154 IEM_MC_PREPARE_FPU_USAGE();
6155 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6156 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6157 IEM_MC_UPDATE_FSW(u16Fsw);
6158 IEM_MC_ELSE()
6159 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6160 IEM_MC_ENDIF();
6161 IEM_MC_ADVANCE_RIP();
6162
6163 IEM_MC_END();
6164 return VINF_SUCCESS;
6165}
6166
6167
6168/**
6169 * Common worker for FPU instructions working on ST0 and STn, only affecting
6170 * flags, and popping when done.
6171 *
6172 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6173 */
6174FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6175{
6176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6177
6178 IEM_MC_BEGIN(3, 1);
6179 IEM_MC_LOCAL(uint16_t, u16Fsw);
6180 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6181 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6182 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6183
6184 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6185 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6186 IEM_MC_PREPARE_FPU_USAGE();
6187 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6188 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6189 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6190 IEM_MC_ELSE()
6191 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
6192 IEM_MC_ENDIF();
6193 IEM_MC_ADVANCE_RIP();
6194
6195 IEM_MC_END();
6196 return VINF_SUCCESS;
6197}
6198
6199
6200/** Opcode 0xd8 11/0. */
6201FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
6202{
6203 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
6204 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
6205}
6206
6207
6208/** Opcode 0xd8 11/1. */
6209FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
6210{
6211 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
6212 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
6213}
6214
6215
6216/** Opcode 0xd8 11/2. */
6217FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
6218{
6219 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
6220 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
6221}
6222
6223
6224/** Opcode 0xd8 11/3. */
6225FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
6226{
6227 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
6228 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
6229}
6230
6231
6232/** Opcode 0xd8 11/4. */
6233FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
6234{
6235 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
6236 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
6237}
6238
6239
6240/** Opcode 0xd8 11/5. */
6241FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
6242{
6243 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
6244 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
6245}
6246
6247
6248/** Opcode 0xd8 11/6. */
6249FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
6250{
6251 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
6252 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
6253}
6254
6255
6256/** Opcode 0xd8 11/7. */
6257FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
6258{
6259 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
6260 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
6261}
6262
6263
6264/**
6265 * Common worker for FPU instructions working on ST0 and an m32r, and storing
6266 * the result in ST0.
6267 *
6268 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6269 */
6270FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
6271{
6272 IEM_MC_BEGIN(3, 3);
6273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6274 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6275 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6276 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6277 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6278 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6279
6280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6282
6283 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6284 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6285 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6286
6287 IEM_MC_PREPARE_FPU_USAGE();
6288 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6289 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
6290 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6291 IEM_MC_ELSE()
6292 IEM_MC_FPU_STACK_UNDERFLOW(0);
6293 IEM_MC_ENDIF();
6294 IEM_MC_ADVANCE_RIP();
6295
6296 IEM_MC_END();
6297 return VINF_SUCCESS;
6298}
6299
6300
6301/** Opcode 0xd8 !11/0. */
6302FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
6303{
6304 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
6305 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
6306}
6307
6308
6309/** Opcode 0xd8 !11/1. */
6310FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
6311{
6312 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
6313 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
6314}
6315
6316
6317/** Opcode 0xd8 !11/2. */
6318FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
6319{
6320 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
6321
6322 IEM_MC_BEGIN(3, 3);
6323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6324 IEM_MC_LOCAL(uint16_t, u16Fsw);
6325 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6326 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6327 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6328 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6329
6330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6332
6333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6334 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6335 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6336
6337 IEM_MC_PREPARE_FPU_USAGE();
6338 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6339 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6340 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6341 IEM_MC_ELSE()
6342 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6343 IEM_MC_ENDIF();
6344 IEM_MC_ADVANCE_RIP();
6345
6346 IEM_MC_END();
6347 return VINF_SUCCESS;
6348}
6349
6350
6351/** Opcode 0xd8 !11/3. */
6352FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
6353{
6354 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
6355
6356 IEM_MC_BEGIN(3, 3);
6357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6358 IEM_MC_LOCAL(uint16_t, u16Fsw);
6359 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6360 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6361 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6362 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6363
6364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6366
6367 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6368 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6369 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6370
6371 IEM_MC_PREPARE_FPU_USAGE();
6372 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6373 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6374 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6375 IEM_MC_ELSE()
6376 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6377 IEM_MC_ENDIF();
6378 IEM_MC_ADVANCE_RIP();
6379
6380 IEM_MC_END();
6381 return VINF_SUCCESS;
6382}
6383
6384
6385/** Opcode 0xd8 !11/4. */
6386FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
6387{
6388 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
6389 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
6390}
6391
6392
6393/** Opcode 0xd8 !11/5. */
6394FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
6395{
6396 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
6397 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
6398}
6399
6400
6401/** Opcode 0xd8 !11/6. */
6402FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
6403{
6404 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
6405 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
6406}
6407
6408
6409/** Opcode 0xd8 !11/7. */
6410FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
6411{
6412 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
6413 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
6414}
6415
6416
6417/** Opcode 0xd8. */
6418FNIEMOP_DEF(iemOp_EscF0)
6419{
6420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6421 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
6422
6423 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6424 {
6425 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6426 {
6427 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
6428 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
6429 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
6430 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
6431 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
6432 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
6433 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
6434 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
6435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6436 }
6437 }
6438 else
6439 {
6440 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6441 {
6442 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
6443 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
6444 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
6445 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
6446 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
6447 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
6448 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
6449 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
6450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6451 }
6452 }
6453}
6454
6455
6456/** Opcode 0xd9 /0 mem32real
6457 * @sa iemOp_fld_m64r */
6458FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
6459{
6460 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
6461
6462 IEM_MC_BEGIN(2, 3);
6463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6464 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6465 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
6466 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6467 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
6468
6469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6471
6472 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6473 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6474 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6475
6476 IEM_MC_PREPARE_FPU_USAGE();
6477 IEM_MC_IF_FPUREG_IS_EMPTY(7)
6478 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
6479 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6480 IEM_MC_ELSE()
6481 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6482 IEM_MC_ENDIF();
6483 IEM_MC_ADVANCE_RIP();
6484
6485 IEM_MC_END();
6486 return VINF_SUCCESS;
6487}
6488
6489
6490/** Opcode 0xd9 !11/2 mem32real */
6491FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
6492{
6493 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
6494 IEM_MC_BEGIN(3, 2);
6495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6496 IEM_MC_LOCAL(uint16_t, u16Fsw);
6497 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6498 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
6499 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
6500
6501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6503 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6504 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6505
6506 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
6507 IEM_MC_PREPARE_FPU_USAGE();
6508 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6509 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
6510 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
6511 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6512 IEM_MC_ELSE()
6513 IEM_MC_IF_FCW_IM()
6514 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
6515 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
6516 IEM_MC_ENDIF();
6517 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6518 IEM_MC_ENDIF();
6519 IEM_MC_ADVANCE_RIP();
6520
6521 IEM_MC_END();
6522 return VINF_SUCCESS;
6523}
6524
6525
6526/** Opcode 0xd9 !11/3 */
6527FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
6528{
6529 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
6530 IEM_MC_BEGIN(3, 2);
6531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6532 IEM_MC_LOCAL(uint16_t, u16Fsw);
6533 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6534 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
6535 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
6536
6537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6539 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6540 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6541
6542 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
6543 IEM_MC_PREPARE_FPU_USAGE();
6544 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6545 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
6546 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
6547 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6548 IEM_MC_ELSE()
6549 IEM_MC_IF_FCW_IM()
6550 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
6551 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
6552 IEM_MC_ENDIF();
6553 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6554 IEM_MC_ENDIF();
6555 IEM_MC_ADVANCE_RIP();
6556
6557 IEM_MC_END();
6558 return VINF_SUCCESS;
6559}
6560
6561
6562/** Opcode 0xd9 !11/4 */
6563FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
6564{
6565 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
6566 IEM_MC_BEGIN(3, 0);
6567 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
6568 IEM_MC_ARG(uint8_t, iEffSeg, 1);
6569 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
6570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6572 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6573 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6574 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6575 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
6576 IEM_MC_END();
6577 return VINF_SUCCESS;
6578}
6579
6580
6581/** Opcode 0xd9 !11/5 */
6582FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
6583{
6584 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
6585 IEM_MC_BEGIN(1, 1);
6586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6587 IEM_MC_ARG(uint16_t, u16Fsw, 0);
6588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6590 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6591 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6592 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6593 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
6594 IEM_MC_END();
6595 return VINF_SUCCESS;
6596}
6597
6598
6599/** Opcode 0xd9 !11/6 */
6600FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
6601{
6602 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
6603 IEM_MC_BEGIN(3, 0);
6604 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
6605 IEM_MC_ARG(uint8_t, iEffSeg, 1);
6606 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
6607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6609 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6610 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6611 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6612 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
6613 IEM_MC_END();
6614 return VINF_SUCCESS;
6615}
6616
6617
6618/** Opcode 0xd9 !11/7 */
6619FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
6620{
6621 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
6622 IEM_MC_BEGIN(2, 0);
6623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6624 IEM_MC_LOCAL(uint16_t, u16Fcw);
6625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6627 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6628 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6629 IEM_MC_FETCH_FCW(u16Fcw);
6630 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
6631 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
6632 IEM_MC_END();
6633 return VINF_SUCCESS;
6634}
6635
6636
6637/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
6638FNIEMOP_DEF(iemOp_fnop)
6639{
6640 IEMOP_MNEMONIC(fnop, "fnop");
6641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6642
6643 IEM_MC_BEGIN(0, 0);
6644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6646 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6647 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
6648 * intel optimizations. Investigate. */
6649 IEM_MC_UPDATE_FPU_OPCODE_IP();
6650 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
6651 IEM_MC_END();
6652 return VINF_SUCCESS;
6653}
6654
6655
6656/** Opcode 0xd9 11/0 stN */
6657FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
6658{
6659 IEMOP_MNEMONIC(fld_stN, "fld stN");
6660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6661
6662 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
6663 * indicates that it does. */
6664 IEM_MC_BEGIN(0, 2);
6665 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
6666 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6667 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6668 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6669
6670 IEM_MC_PREPARE_FPU_USAGE();
6671 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
6672 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
6673 IEM_MC_PUSH_FPU_RESULT(FpuRes);
6674 IEM_MC_ELSE()
6675 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
6676 IEM_MC_ENDIF();
6677
6678 IEM_MC_ADVANCE_RIP();
6679 IEM_MC_END();
6680
6681 return VINF_SUCCESS;
6682}
6683
6684
6685/** Opcode 0xd9 11/3 stN */
6686FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
6687{
6688 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
6689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6690
6691 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
6692 * indicates that it does. */
6693 IEM_MC_BEGIN(1, 3);
6694 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
6695 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
6696 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6697 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
6698 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6699 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6700
6701 IEM_MC_PREPARE_FPU_USAGE();
6702 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6703 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
6704 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
6705 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6706 IEM_MC_ELSE()
6707 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
6708 IEM_MC_ENDIF();
6709
6710 IEM_MC_ADVANCE_RIP();
6711 IEM_MC_END();
6712
6713 return VINF_SUCCESS;
6714}
6715
6716
6717/** Opcode 0xd9 11/4, 0xdd 11/2. */
6718FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
6719{
6720 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
6721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6722
6723 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
6724 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
6725 if (!iDstReg)
6726 {
6727 IEM_MC_BEGIN(0, 1);
6728 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
6729 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6730 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6731
6732 IEM_MC_PREPARE_FPU_USAGE();
6733 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
6734 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6735 IEM_MC_ELSE()
6736 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
6737 IEM_MC_ENDIF();
6738
6739 IEM_MC_ADVANCE_RIP();
6740 IEM_MC_END();
6741 }
6742 else
6743 {
6744 IEM_MC_BEGIN(0, 2);
6745 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
6746 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6749
6750 IEM_MC_PREPARE_FPU_USAGE();
6751 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6752 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
6753 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
6754 IEM_MC_ELSE()
6755 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
6756 IEM_MC_ENDIF();
6757
6758 IEM_MC_ADVANCE_RIP();
6759 IEM_MC_END();
6760 }
6761 return VINF_SUCCESS;
6762}
6763
6764
6765/**
6766 * Common worker for FPU instructions working on ST0 and replaces it with the
6767 * result, i.e. unary operators.
6768 *
6769 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6770 */
6771FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
6772{
6773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6774
6775 IEM_MC_BEGIN(2, 1);
6776 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6777 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6778 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
6779
6780 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6781 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6782 IEM_MC_PREPARE_FPU_USAGE();
6783 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6784 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
6785 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6786 IEM_MC_ELSE()
6787 IEM_MC_FPU_STACK_UNDERFLOW(0);
6788 IEM_MC_ENDIF();
6789 IEM_MC_ADVANCE_RIP();
6790
6791 IEM_MC_END();
6792 return VINF_SUCCESS;
6793}
6794
6795
6796/** Opcode 0xd9 0xe0. */
6797FNIEMOP_DEF(iemOp_fchs)
6798{
6799 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
6800 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
6801}
6802
6803
6804/** Opcode 0xd9 0xe1. */
6805FNIEMOP_DEF(iemOp_fabs)
6806{
6807 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
6808 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
6809}
6810
6811
6812/**
6813 * Common worker for FPU instructions working on ST0 and only returns FSW.
6814 *
6815 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6816 */
6817FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
6818{
6819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6820
6821 IEM_MC_BEGIN(2, 1);
6822 IEM_MC_LOCAL(uint16_t, u16Fsw);
6823 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6824 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
6825
6826 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6827 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6828 IEM_MC_PREPARE_FPU_USAGE();
6829 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6830 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
6831 IEM_MC_UPDATE_FSW(u16Fsw);
6832 IEM_MC_ELSE()
6833 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6834 IEM_MC_ENDIF();
6835 IEM_MC_ADVANCE_RIP();
6836
6837 IEM_MC_END();
6838 return VINF_SUCCESS;
6839}
6840
6841
6842/** Opcode 0xd9 0xe4. */
6843FNIEMOP_DEF(iemOp_ftst)
6844{
6845 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
6846 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
6847}
6848
6849
6850/** Opcode 0xd9 0xe5. */
6851FNIEMOP_DEF(iemOp_fxam)
6852{
6853 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
6854 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
6855}
6856
6857
6858/**
6859 * Common worker for FPU instructions pushing a constant onto the FPU stack.
6860 *
6861 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6862 */
6863FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
6864{
6865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6866
6867 IEM_MC_BEGIN(1, 1);
6868 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6869 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6870
6871 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6872 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6873 IEM_MC_PREPARE_FPU_USAGE();
6874 IEM_MC_IF_FPUREG_IS_EMPTY(7)
6875 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
6876 IEM_MC_PUSH_FPU_RESULT(FpuRes);
6877 IEM_MC_ELSE()
6878 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
6879 IEM_MC_ENDIF();
6880 IEM_MC_ADVANCE_RIP();
6881
6882 IEM_MC_END();
6883 return VINF_SUCCESS;
6884}
6885
6886
6887/** Opcode 0xd9 0xe8. */
6888FNIEMOP_DEF(iemOp_fld1)
6889{
6890 IEMOP_MNEMONIC(fld1, "fld1");
6891 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
6892}
6893
6894
6895/** Opcode 0xd9 0xe9. */
6896FNIEMOP_DEF(iemOp_fldl2t)
6897{
6898 IEMOP_MNEMONIC(fldl2t, "fldl2t");
6899 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
6900}
6901
6902
6903/** Opcode 0xd9 0xea. */
6904FNIEMOP_DEF(iemOp_fldl2e)
6905{
6906 IEMOP_MNEMONIC(fldl2e, "fldl2e");
6907 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
6908}
6909
6910/** Opcode 0xd9 0xeb. */
6911FNIEMOP_DEF(iemOp_fldpi)
6912{
6913 IEMOP_MNEMONIC(fldpi, "fldpi");
6914 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
6915}
6916
6917
6918/** Opcode 0xd9 0xec. */
6919FNIEMOP_DEF(iemOp_fldlg2)
6920{
6921 IEMOP_MNEMONIC(fldlg2, "fldlg2");
6922 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
6923}
6924
6925/** Opcode 0xd9 0xed. */
6926FNIEMOP_DEF(iemOp_fldln2)
6927{
6928 IEMOP_MNEMONIC(fldln2, "fldln2");
6929 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
6930}
6931
6932
6933/** Opcode 0xd9 0xee. */
6934FNIEMOP_DEF(iemOp_fldz)
6935{
6936 IEMOP_MNEMONIC(fldz, "fldz");
6937 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
6938}
6939
6940
6941/** Opcode 0xd9 0xf0. */
6942FNIEMOP_DEF(iemOp_f2xm1)
6943{
6944 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
6945 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
6946}
6947
6948
6949/**
6950 * Common worker for FPU instructions working on STn and ST0, storing the result
6951 * in STn, and popping the stack unless IE, DE or ZE was raised.
6952 *
6953 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6954 */
6955FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6956{
6957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6958
6959 IEM_MC_BEGIN(3, 1);
6960 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6961 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6962 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6963 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6964
6965 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6966 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6967
6968 IEM_MC_PREPARE_FPU_USAGE();
6969 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
6970 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6971 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
6972 IEM_MC_ELSE()
6973 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
6974 IEM_MC_ENDIF();
6975 IEM_MC_ADVANCE_RIP();
6976
6977 IEM_MC_END();
6978 return VINF_SUCCESS;
6979}
6980
6981
6982/** Opcode 0xd9 0xf1. */
6983FNIEMOP_DEF(iemOp_fyl2x)
6984{
6985 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
6986 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
6987}
6988
6989
6990/**
6991 * Common worker for FPU instructions working on ST0 and having two outputs, one
6992 * replacing ST0 and one pushed onto the stack.
6993 *
6994 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6995 */
6996FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
6997{
6998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6999
7000 IEM_MC_BEGIN(2, 1);
7001 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7002 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7003 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7004
7005 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7006 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7007 IEM_MC_PREPARE_FPU_USAGE();
7008 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7009 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7010 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7011 IEM_MC_ELSE()
7012 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7013 IEM_MC_ENDIF();
7014 IEM_MC_ADVANCE_RIP();
7015
7016 IEM_MC_END();
7017 return VINF_SUCCESS;
7018}
7019
7020
7021/** Opcode 0xd9 0xf2. */
7022FNIEMOP_DEF(iemOp_fptan)
7023{
7024 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7025 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7026}
7027
7028
7029/** Opcode 0xd9 0xf3. */
7030FNIEMOP_DEF(iemOp_fpatan)
7031{
7032 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7033 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7034}
7035
7036
7037/** Opcode 0xd9 0xf4. */
7038FNIEMOP_DEF(iemOp_fxtract)
7039{
7040 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7041 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7042}
7043
7044
7045/** Opcode 0xd9 0xf5. */
7046FNIEMOP_DEF(iemOp_fprem1)
7047{
7048 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7049 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7050}
7051
7052
7053/** Opcode 0xd9 0xf6. */
7054FNIEMOP_DEF(iemOp_fdecstp)
7055{
7056 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7058 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7059 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7060 * FINCSTP and FDECSTP. */
7061
7062 IEM_MC_BEGIN(0,0);
7063
7064 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7065 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7066
7067 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7068 IEM_MC_FPU_STACK_DEC_TOP();
7069 IEM_MC_UPDATE_FSW_CONST(0);
7070
7071 IEM_MC_ADVANCE_RIP();
7072 IEM_MC_END();
7073 return VINF_SUCCESS;
7074}
7075
7076
7077/** Opcode 0xd9 0xf7. */
7078FNIEMOP_DEF(iemOp_fincstp)
7079{
7080 IEMOP_MNEMONIC(fincstp, "fincstp");
7081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7082 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7083 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7084 * FINCSTP and FDECSTP. */
7085
7086 IEM_MC_BEGIN(0,0);
7087
7088 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7089 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7090
7091 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7092 IEM_MC_FPU_STACK_INC_TOP();
7093 IEM_MC_UPDATE_FSW_CONST(0);
7094
7095 IEM_MC_ADVANCE_RIP();
7096 IEM_MC_END();
7097 return VINF_SUCCESS;
7098}
7099
7100
7101/** Opcode 0xd9 0xf8. */
7102FNIEMOP_DEF(iemOp_fprem)
7103{
7104 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
7105 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
7106}
7107
7108
7109/** Opcode 0xd9 0xf9. */
7110FNIEMOP_DEF(iemOp_fyl2xp1)
7111{
7112 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
7113 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
7114}
7115
7116
7117/** Opcode 0xd9 0xfa. */
7118FNIEMOP_DEF(iemOp_fsqrt)
7119{
7120 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
7121 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
7122}
7123
7124
7125/** Opcode 0xd9 0xfb. */
7126FNIEMOP_DEF(iemOp_fsincos)
7127{
7128 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
7129 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
7130}
7131
7132
7133/** Opcode 0xd9 0xfc. */
7134FNIEMOP_DEF(iemOp_frndint)
7135{
7136 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
7137 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
7138}
7139
7140
7141/** Opcode 0xd9 0xfd. */
7142FNIEMOP_DEF(iemOp_fscale)
7143{
7144 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
7145 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
7146}
7147
7148
7149/** Opcode 0xd9 0xfe. */
7150FNIEMOP_DEF(iemOp_fsin)
7151{
7152 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
7153 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
7154}
7155
7156
7157/** Opcode 0xd9 0xff. */
7158FNIEMOP_DEF(iemOp_fcos)
7159{
7160 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
7161 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
7162}
7163
7164
7165/** Used by iemOp_EscF1. */
7166IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
7167{
7168 /* 0xe0 */ iemOp_fchs,
7169 /* 0xe1 */ iemOp_fabs,
7170 /* 0xe2 */ iemOp_Invalid,
7171 /* 0xe3 */ iemOp_Invalid,
7172 /* 0xe4 */ iemOp_ftst,
7173 /* 0xe5 */ iemOp_fxam,
7174 /* 0xe6 */ iemOp_Invalid,
7175 /* 0xe7 */ iemOp_Invalid,
7176 /* 0xe8 */ iemOp_fld1,
7177 /* 0xe9 */ iemOp_fldl2t,
7178 /* 0xea */ iemOp_fldl2e,
7179 /* 0xeb */ iemOp_fldpi,
7180 /* 0xec */ iemOp_fldlg2,
7181 /* 0xed */ iemOp_fldln2,
7182 /* 0xee */ iemOp_fldz,
7183 /* 0xef */ iemOp_Invalid,
7184 /* 0xf0 */ iemOp_f2xm1,
7185 /* 0xf1 */ iemOp_fyl2x,
7186 /* 0xf2 */ iemOp_fptan,
7187 /* 0xf3 */ iemOp_fpatan,
7188 /* 0xf4 */ iemOp_fxtract,
7189 /* 0xf5 */ iemOp_fprem1,
7190 /* 0xf6 */ iemOp_fdecstp,
7191 /* 0xf7 */ iemOp_fincstp,
7192 /* 0xf8 */ iemOp_fprem,
7193 /* 0xf9 */ iemOp_fyl2xp1,
7194 /* 0xfa */ iemOp_fsqrt,
7195 /* 0xfb */ iemOp_fsincos,
7196 /* 0xfc */ iemOp_frndint,
7197 /* 0xfd */ iemOp_fscale,
7198 /* 0xfe */ iemOp_fsin,
7199 /* 0xff */ iemOp_fcos
7200};
7201
7202
7203/** Opcode 0xd9. */
7204FNIEMOP_DEF(iemOp_EscF1)
7205{
7206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7207 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
7208
7209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7210 {
7211 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7212 {
7213 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
7214 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
7215 case 2:
7216 if (bRm == 0xd0)
7217 return FNIEMOP_CALL(iemOp_fnop);
7218 return IEMOP_RAISE_INVALID_OPCODE();
7219 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
7220 case 4:
7221 case 5:
7222 case 6:
7223 case 7:
7224 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
7225 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
7226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7227 }
7228 }
7229 else
7230 {
7231 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7232 {
7233 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
7234 case 1: return IEMOP_RAISE_INVALID_OPCODE();
7235 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
7236 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
7237 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
7238 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
7239 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
7240 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
7241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7242 }
7243 }
7244}
7245
7246
7247/** Opcode 0xda 11/0. */
7248FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
7249{
7250 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
7251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7252
7253 IEM_MC_BEGIN(0, 1);
7254 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7255
7256 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7257 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7258
7259 IEM_MC_PREPARE_FPU_USAGE();
7260 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7261 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
7262 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7263 IEM_MC_ENDIF();
7264 IEM_MC_UPDATE_FPU_OPCODE_IP();
7265 IEM_MC_ELSE()
7266 IEM_MC_FPU_STACK_UNDERFLOW(0);
7267 IEM_MC_ENDIF();
7268 IEM_MC_ADVANCE_RIP();
7269
7270 IEM_MC_END();
7271 return VINF_SUCCESS;
7272}
7273
7274
7275/** Opcode 0xda 11/1. */
7276FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
7277{
7278 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
7279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7280
7281 IEM_MC_BEGIN(0, 1);
7282 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7283
7284 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7285 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7286
7287 IEM_MC_PREPARE_FPU_USAGE();
7288 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7289 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
7290 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7291 IEM_MC_ENDIF();
7292 IEM_MC_UPDATE_FPU_OPCODE_IP();
7293 IEM_MC_ELSE()
7294 IEM_MC_FPU_STACK_UNDERFLOW(0);
7295 IEM_MC_ENDIF();
7296 IEM_MC_ADVANCE_RIP();
7297
7298 IEM_MC_END();
7299 return VINF_SUCCESS;
7300}
7301
7302
7303/** Opcode 0xda 11/2. */
7304FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
7305{
7306 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
7307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7308
7309 IEM_MC_BEGIN(0, 1);
7310 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7311
7312 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7313 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7314
7315 IEM_MC_PREPARE_FPU_USAGE();
7316 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7317 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7318 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7319 IEM_MC_ENDIF();
7320 IEM_MC_UPDATE_FPU_OPCODE_IP();
7321 IEM_MC_ELSE()
7322 IEM_MC_FPU_STACK_UNDERFLOW(0);
7323 IEM_MC_ENDIF();
7324 IEM_MC_ADVANCE_RIP();
7325
7326 IEM_MC_END();
7327 return VINF_SUCCESS;
7328}
7329
7330
7331/** Opcode 0xda 11/3. */
7332FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
7333{
7334 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
7335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7336
7337 IEM_MC_BEGIN(0, 1);
7338 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7339
7340 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7341 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7342
7343 IEM_MC_PREPARE_FPU_USAGE();
7344 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7345 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
7346 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7347 IEM_MC_ENDIF();
7348 IEM_MC_UPDATE_FPU_OPCODE_IP();
7349 IEM_MC_ELSE()
7350 IEM_MC_FPU_STACK_UNDERFLOW(0);
7351 IEM_MC_ENDIF();
7352 IEM_MC_ADVANCE_RIP();
7353
7354 IEM_MC_END();
7355 return VINF_SUCCESS;
7356}
7357
7358
7359/**
7360 * Common worker for FPU instructions working on ST0 and STn, only affecting
7361 * flags, and popping twice when done.
7362 *
7363 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7364 */
7365FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7366{
7367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7368
7369 IEM_MC_BEGIN(3, 1);
7370 IEM_MC_LOCAL(uint16_t, u16Fsw);
7371 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7372 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7373 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7374
7375 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7376 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7377
7378 IEM_MC_PREPARE_FPU_USAGE();
7379 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
7380 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7381 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
7382 IEM_MC_ELSE()
7383 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
7384 IEM_MC_ENDIF();
7385 IEM_MC_ADVANCE_RIP();
7386
7387 IEM_MC_END();
7388 return VINF_SUCCESS;
7389}
7390
7391
7392/** Opcode 0xda 0xe9. */
7393FNIEMOP_DEF(iemOp_fucompp)
7394{
7395 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
7396 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
7397}
7398
7399
7400/**
7401 * Common worker for FPU instructions working on ST0 and an m32i, and storing
7402 * the result in ST0.
7403 *
7404 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7405 */
7406FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
7407{
7408 IEM_MC_BEGIN(3, 3);
7409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7410 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7411 IEM_MC_LOCAL(int32_t, i32Val2);
7412 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7413 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7414 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7415
7416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7418
7419 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7420 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7421 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7422
7423 IEM_MC_PREPARE_FPU_USAGE();
7424 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7425 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
7426 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7427 IEM_MC_ELSE()
7428 IEM_MC_FPU_STACK_UNDERFLOW(0);
7429 IEM_MC_ENDIF();
7430 IEM_MC_ADVANCE_RIP();
7431
7432 IEM_MC_END();
7433 return VINF_SUCCESS;
7434}
7435
7436
7437/** Opcode 0xda !11/0. */
7438FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
7439{
7440 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
7441 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
7442}
7443
7444
7445/** Opcode 0xda !11/1. */
7446FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
7447{
7448 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
7449 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
7450}
7451
7452
7453/** Opcode 0xda !11/2. */
7454FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
7455{
7456 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
7457
7458 IEM_MC_BEGIN(3, 3);
7459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7460 IEM_MC_LOCAL(uint16_t, u16Fsw);
7461 IEM_MC_LOCAL(int32_t, i32Val2);
7462 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7463 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7464 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7465
7466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7468
7469 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7470 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7471 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7472
7473 IEM_MC_PREPARE_FPU_USAGE();
7474 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7475 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
7476 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7477 IEM_MC_ELSE()
7478 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7479 IEM_MC_ENDIF();
7480 IEM_MC_ADVANCE_RIP();
7481
7482 IEM_MC_END();
7483 return VINF_SUCCESS;
7484}
7485
7486
7487/** Opcode 0xda !11/3. */
7488FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
7489{
7490 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
7491
7492 IEM_MC_BEGIN(3, 3);
7493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7494 IEM_MC_LOCAL(uint16_t, u16Fsw);
7495 IEM_MC_LOCAL(int32_t, i32Val2);
7496 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7497 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7498 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7499
7500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7502
7503 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7504 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7505 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7506
7507 IEM_MC_PREPARE_FPU_USAGE();
7508 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7509 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
7510 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7511 IEM_MC_ELSE()
7512 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7513 IEM_MC_ENDIF();
7514 IEM_MC_ADVANCE_RIP();
7515
7516 IEM_MC_END();
7517 return VINF_SUCCESS;
7518}
7519
7520
7521/** Opcode 0xda !11/4. */
7522FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
7523{
7524 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
7525 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
7526}
7527
7528
7529/** Opcode 0xda !11/5. */
7530FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
7531{
7532 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
7533 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
7534}
7535
7536
7537/** Opcode 0xda !11/6. */
7538FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
7539{
7540 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
7541 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
7542}
7543
7544
7545/** Opcode 0xda !11/7. */
7546FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
7547{
7548 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
7549 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
7550}
7551
7552
7553/** Opcode 0xda. */
7554FNIEMOP_DEF(iemOp_EscF2)
7555{
7556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7557 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
7558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7559 {
7560 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7561 {
7562 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
7563 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
7564 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
7565 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
7566 case 4: return IEMOP_RAISE_INVALID_OPCODE();
7567 case 5:
7568 if (bRm == 0xe9)
7569 return FNIEMOP_CALL(iemOp_fucompp);
7570 return IEMOP_RAISE_INVALID_OPCODE();
7571 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7572 case 7: return IEMOP_RAISE_INVALID_OPCODE();
7573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7574 }
7575 }
7576 else
7577 {
7578 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7579 {
7580 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
7581 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
7582 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
7583 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
7584 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
7585 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
7586 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
7587 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
7588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7589 }
7590 }
7591}
7592
7593
7594/** Opcode 0xdb !11/0. */
7595FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
7596{
7597 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
7598
7599 IEM_MC_BEGIN(2, 3);
7600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7601 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7602 IEM_MC_LOCAL(int32_t, i32Val);
7603 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7604 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
7605
7606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7608
7609 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7610 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7611 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7612
7613 IEM_MC_PREPARE_FPU_USAGE();
7614 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7615 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
7616 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7617 IEM_MC_ELSE()
7618 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7619 IEM_MC_ENDIF();
7620 IEM_MC_ADVANCE_RIP();
7621
7622 IEM_MC_END();
7623 return VINF_SUCCESS;
7624}
7625
7626
7627/** Opcode 0xdb !11/1. */
7628FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
7629{
7630 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
7631 IEM_MC_BEGIN(3, 2);
7632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7633 IEM_MC_LOCAL(uint16_t, u16Fsw);
7634 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7635 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7636 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7637
7638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7641 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7642
7643 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7644 IEM_MC_PREPARE_FPU_USAGE();
7645 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7646 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7647 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7648 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7649 IEM_MC_ELSE()
7650 IEM_MC_IF_FCW_IM()
7651 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7652 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7653 IEM_MC_ENDIF();
7654 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7655 IEM_MC_ENDIF();
7656 IEM_MC_ADVANCE_RIP();
7657
7658 IEM_MC_END();
7659 return VINF_SUCCESS;
7660}
7661
7662
7663/** Opcode 0xdb !11/2. */
7664FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
7665{
7666 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
7667 IEM_MC_BEGIN(3, 2);
7668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7669 IEM_MC_LOCAL(uint16_t, u16Fsw);
7670 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7671 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7672 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7673
7674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7676 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7677 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7678
7679 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7680 IEM_MC_PREPARE_FPU_USAGE();
7681 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7682 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7683 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7684 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7685 IEM_MC_ELSE()
7686 IEM_MC_IF_FCW_IM()
7687 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7688 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7689 IEM_MC_ENDIF();
7690 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7691 IEM_MC_ENDIF();
7692 IEM_MC_ADVANCE_RIP();
7693
7694 IEM_MC_END();
7695 return VINF_SUCCESS;
7696}
7697
7698
7699/** Opcode 0xdb !11/3. */
7700FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
7701{
7702 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
7703 IEM_MC_BEGIN(3, 2);
7704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7705 IEM_MC_LOCAL(uint16_t, u16Fsw);
7706 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7707 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7708 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7709
7710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7712 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7714
7715 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7716 IEM_MC_PREPARE_FPU_USAGE();
7717 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7718 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7719 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7720 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7721 IEM_MC_ELSE()
7722 IEM_MC_IF_FCW_IM()
7723 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7724 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7725 IEM_MC_ENDIF();
7726 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7727 IEM_MC_ENDIF();
7728 IEM_MC_ADVANCE_RIP();
7729
7730 IEM_MC_END();
7731 return VINF_SUCCESS;
7732}
7733
7734
7735/** Opcode 0xdb !11/5. */
7736FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
7737{
7738 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
7739
7740 IEM_MC_BEGIN(2, 3);
7741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7742 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7743 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
7744 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7745 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
7746
7747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7749
7750 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7751 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7752 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7753
7754 IEM_MC_PREPARE_FPU_USAGE();
7755 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7756 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
7757 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7758 IEM_MC_ELSE()
7759 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7760 IEM_MC_ENDIF();
7761 IEM_MC_ADVANCE_RIP();
7762
7763 IEM_MC_END();
7764 return VINF_SUCCESS;
7765}
7766
7767
7768/** Opcode 0xdb !11/7. */
7769FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
7770{
7771 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
7772 IEM_MC_BEGIN(3, 2);
7773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7774 IEM_MC_LOCAL(uint16_t, u16Fsw);
7775 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7776 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
7777 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7778
7779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7781 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7782 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7783
7784 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7785 IEM_MC_PREPARE_FPU_USAGE();
7786 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7787 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
7788 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
7789 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7790 IEM_MC_ELSE()
7791 IEM_MC_IF_FCW_IM()
7792 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
7793 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
7794 IEM_MC_ENDIF();
7795 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7796 IEM_MC_ENDIF();
7797 IEM_MC_ADVANCE_RIP();
7798
7799 IEM_MC_END();
7800 return VINF_SUCCESS;
7801}
7802
7803
7804/** Opcode 0xdb 11/0. */
7805FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
7806{
7807 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
7808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7809
7810 IEM_MC_BEGIN(0, 1);
7811 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7812
7813 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7814 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7815
7816 IEM_MC_PREPARE_FPU_USAGE();
7817 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7818 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
7819 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7820 IEM_MC_ENDIF();
7821 IEM_MC_UPDATE_FPU_OPCODE_IP();
7822 IEM_MC_ELSE()
7823 IEM_MC_FPU_STACK_UNDERFLOW(0);
7824 IEM_MC_ENDIF();
7825 IEM_MC_ADVANCE_RIP();
7826
7827 IEM_MC_END();
7828 return VINF_SUCCESS;
7829}
7830
7831
7832/** Opcode 0xdb 11/1. */
7833FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
7834{
7835 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
7836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7837
7838 IEM_MC_BEGIN(0, 1);
7839 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7840
7841 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7842 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7843
7844 IEM_MC_PREPARE_FPU_USAGE();
7845 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7846 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7847 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7848 IEM_MC_ENDIF();
7849 IEM_MC_UPDATE_FPU_OPCODE_IP();
7850 IEM_MC_ELSE()
7851 IEM_MC_FPU_STACK_UNDERFLOW(0);
7852 IEM_MC_ENDIF();
7853 IEM_MC_ADVANCE_RIP();
7854
7855 IEM_MC_END();
7856 return VINF_SUCCESS;
7857}
7858
7859
7860/** Opcode 0xdb 11/2. */
7861FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
7862{
7863 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
7864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7865
7866 IEM_MC_BEGIN(0, 1);
7867 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7868
7869 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7870 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7871
7872 IEM_MC_PREPARE_FPU_USAGE();
7873 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7874 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7875 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7876 IEM_MC_ENDIF();
7877 IEM_MC_UPDATE_FPU_OPCODE_IP();
7878 IEM_MC_ELSE()
7879 IEM_MC_FPU_STACK_UNDERFLOW(0);
7880 IEM_MC_ENDIF();
7881 IEM_MC_ADVANCE_RIP();
7882
7883 IEM_MC_END();
7884 return VINF_SUCCESS;
7885}
7886
7887
7888/** Opcode 0xdb 11/3. */
7889FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
7890{
7891 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
7892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7893
7894 IEM_MC_BEGIN(0, 1);
7895 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7896
7897 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7898 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7899
7900 IEM_MC_PREPARE_FPU_USAGE();
7901 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7902 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
7903 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7904 IEM_MC_ENDIF();
7905 IEM_MC_UPDATE_FPU_OPCODE_IP();
7906 IEM_MC_ELSE()
7907 IEM_MC_FPU_STACK_UNDERFLOW(0);
7908 IEM_MC_ENDIF();
7909 IEM_MC_ADVANCE_RIP();
7910
7911 IEM_MC_END();
7912 return VINF_SUCCESS;
7913}
7914
7915
7916/** Opcode 0xdb 0xe0. */
7917FNIEMOP_DEF(iemOp_fneni)
7918{
7919 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
7920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7921 IEM_MC_BEGIN(0,0);
7922 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7923 IEM_MC_ADVANCE_RIP();
7924 IEM_MC_END();
7925 return VINF_SUCCESS;
7926}
7927
7928
7929/** Opcode 0xdb 0xe1. */
7930FNIEMOP_DEF(iemOp_fndisi)
7931{
7932 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
7933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7934 IEM_MC_BEGIN(0,0);
7935 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7936 IEM_MC_ADVANCE_RIP();
7937 IEM_MC_END();
7938 return VINF_SUCCESS;
7939}
7940
7941
7942/** Opcode 0xdb 0xe2. */
7943FNIEMOP_DEF(iemOp_fnclex)
7944{
7945 IEMOP_MNEMONIC(fnclex, "fnclex");
7946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7947
7948 IEM_MC_BEGIN(0,0);
7949 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7950 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7951 IEM_MC_CLEAR_FSW_EX();
7952 IEM_MC_ADVANCE_RIP();
7953 IEM_MC_END();
7954 return VINF_SUCCESS;
7955}
7956
7957
7958/** Opcode 0xdb 0xe3. */
7959FNIEMOP_DEF(iemOp_fninit)
7960{
7961 IEMOP_MNEMONIC(fninit, "fninit");
7962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7963 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
7964}
7965
7966
7967/** Opcode 0xdb 0xe4. */
7968FNIEMOP_DEF(iemOp_fnsetpm)
7969{
7970 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
7971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7972 IEM_MC_BEGIN(0,0);
7973 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7974 IEM_MC_ADVANCE_RIP();
7975 IEM_MC_END();
7976 return VINF_SUCCESS;
7977}
7978
7979
7980/** Opcode 0xdb 0xe5. */
7981FNIEMOP_DEF(iemOp_frstpm)
7982{
7983 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
7984#if 0 /* #UDs on newer CPUs */
7985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7986 IEM_MC_BEGIN(0,0);
7987 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7988 IEM_MC_ADVANCE_RIP();
7989 IEM_MC_END();
7990 return VINF_SUCCESS;
7991#else
7992 return IEMOP_RAISE_INVALID_OPCODE();
7993#endif
7994}
7995
7996
7997/** Opcode 0xdb 11/5. */
7998FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
7999{
8000 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8001 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8002}
8003
8004
8005/** Opcode 0xdb 11/6. */
8006FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8007{
8008 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8009 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8010}
8011
8012
8013/** Opcode 0xdb. */
8014FNIEMOP_DEF(iemOp_EscF3)
8015{
8016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8017 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8019 {
8020 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8021 {
8022 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8023 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8024 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8025 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8026 case 4:
8027 switch (bRm)
8028 {
8029 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8030 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8031 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8032 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8033 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8034 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8035 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8036 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8038 }
8039 break;
8040 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8041 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8042 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8044 }
8045 }
8046 else
8047 {
8048 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8049 {
8050 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8051 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8052 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8053 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8054 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8055 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8056 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8057 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8058 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8059 }
8060 }
8061}
8062
8063
8064/**
8065 * Common worker for FPU instructions working on STn and ST0, and storing the
8066 * result in STn unless IE, DE or ZE was raised.
8067 *
8068 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8069 */
8070FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8071{
8072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8073
8074 IEM_MC_BEGIN(3, 1);
8075 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8076 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8077 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8078 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8079
8080 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8081 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8082
8083 IEM_MC_PREPARE_FPU_USAGE();
8084 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
8085 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8086 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8087 IEM_MC_ELSE()
8088 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8089 IEM_MC_ENDIF();
8090 IEM_MC_ADVANCE_RIP();
8091
8092 IEM_MC_END();
8093 return VINF_SUCCESS;
8094}
8095
8096
8097/** Opcode 0xdc 11/0. */
8098FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
8099{
8100 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
8101 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
8102}
8103
8104
8105/** Opcode 0xdc 11/1. */
8106FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
8107{
8108 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
8109 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
8110}
8111
8112
8113/** Opcode 0xdc 11/4. */
8114FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
8115{
8116 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
8117 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
8118}
8119
8120
8121/** Opcode 0xdc 11/5. */
8122FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
8123{
8124 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
8125 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
8126}
8127
8128
8129/** Opcode 0xdc 11/6. */
8130FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
8131{
8132 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
8133 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
8134}
8135
8136
8137/** Opcode 0xdc 11/7. */
8138FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
8139{
8140 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
8141 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
8142}
8143
8144
8145/**
8146 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
8147 * memory operand, and storing the result in ST0.
8148 *
8149 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8150 */
8151FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
8152{
8153 IEM_MC_BEGIN(3, 3);
8154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8155 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8156 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
8157 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8158 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
8159 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
8160
8161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8163 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8164 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8165
8166 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8167 IEM_MC_PREPARE_FPU_USAGE();
8168 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
8169 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
8170 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8171 IEM_MC_ELSE()
8172 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8173 IEM_MC_ENDIF();
8174 IEM_MC_ADVANCE_RIP();
8175
8176 IEM_MC_END();
8177 return VINF_SUCCESS;
8178}
8179
8180
8181/** Opcode 0xdc !11/0. */
8182FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
8183{
8184 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
8185 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
8186}
8187
8188
8189/** Opcode 0xdc !11/1. */
8190FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
8191{
8192 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
8193 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
8194}
8195
8196
8197/** Opcode 0xdc !11/2. */
8198FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
8199{
8200 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
8201
8202 IEM_MC_BEGIN(3, 3);
8203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8204 IEM_MC_LOCAL(uint16_t, u16Fsw);
8205 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8206 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8207 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8208 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8209
8210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8212
8213 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8214 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8215 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8216
8217 IEM_MC_PREPARE_FPU_USAGE();
8218 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8219 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8220 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8221 IEM_MC_ELSE()
8222 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8223 IEM_MC_ENDIF();
8224 IEM_MC_ADVANCE_RIP();
8225
8226 IEM_MC_END();
8227 return VINF_SUCCESS;
8228}
8229
8230
8231/** Opcode 0xdc !11/3. */
8232FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
8233{
8234 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
8235
8236 IEM_MC_BEGIN(3, 3);
8237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8238 IEM_MC_LOCAL(uint16_t, u16Fsw);
8239 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8240 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8241 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8242 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8243
8244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8246
8247 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8248 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8249 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8250
8251 IEM_MC_PREPARE_FPU_USAGE();
8252 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8253 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8254 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8255 IEM_MC_ELSE()
8256 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8257 IEM_MC_ENDIF();
8258 IEM_MC_ADVANCE_RIP();
8259
8260 IEM_MC_END();
8261 return VINF_SUCCESS;
8262}
8263
8264
8265/** Opcode 0xdc !11/4. */
8266FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
8267{
8268 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
8269 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
8270}
8271
8272
8273/** Opcode 0xdc !11/5. */
8274FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
8275{
8276 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
8277 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
8278}
8279
8280
8281/** Opcode 0xdc !11/6. */
8282FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
8283{
8284 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
8285 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
8286}
8287
8288
8289/** Opcode 0xdc !11/7. */
8290FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
8291{
8292 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
8293 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
8294}
8295
8296
8297/** Opcode 0xdc. */
8298FNIEMOP_DEF(iemOp_EscF4)
8299{
8300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8301 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
8302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8303 {
8304 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8305 {
8306 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
8307 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
8308 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
8309 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
8310 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
8311 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
8312 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
8313 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
8314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8315 }
8316 }
8317 else
8318 {
8319 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8320 {
8321 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
8322 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
8323 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
8324 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
8325 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
8326 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
8327 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
8328 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
8329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8330 }
8331 }
8332}
8333
8334
8335/** Opcode 0xdd !11/0.
8336 * @sa iemOp_fld_m32r */
8337FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
8338{
8339 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
8340
8341 IEM_MC_BEGIN(2, 3);
8342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8343 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8344 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
8345 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8346 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
8347
8348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8350 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8351 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8352
8353 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8354 IEM_MC_PREPARE_FPU_USAGE();
8355 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8356 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
8357 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8358 IEM_MC_ELSE()
8359 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8360 IEM_MC_ENDIF();
8361 IEM_MC_ADVANCE_RIP();
8362
8363 IEM_MC_END();
8364 return VINF_SUCCESS;
8365}
8366
8367
8368/** Opcode 0xdd !11/0. */
8369FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
8370{
8371 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
8372 IEM_MC_BEGIN(3, 2);
8373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8374 IEM_MC_LOCAL(uint16_t, u16Fsw);
8375 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8376 IEM_MC_ARG(int64_t *, pi64Dst, 1);
8377 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8378
8379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8381 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8382 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8383
8384 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8385 IEM_MC_PREPARE_FPU_USAGE();
8386 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8387 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
8388 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8389 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8390 IEM_MC_ELSE()
8391 IEM_MC_IF_FCW_IM()
8392 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
8393 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
8394 IEM_MC_ENDIF();
8395 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8396 IEM_MC_ENDIF();
8397 IEM_MC_ADVANCE_RIP();
8398
8399 IEM_MC_END();
8400 return VINF_SUCCESS;
8401}
8402
8403
8404/** Opcode 0xdd !11/0. */
8405FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
8406{
8407 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
8408 IEM_MC_BEGIN(3, 2);
8409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8410 IEM_MC_LOCAL(uint16_t, u16Fsw);
8411 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8412 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
8413 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8414
8415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8417 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8418 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8419
8420 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8421 IEM_MC_PREPARE_FPU_USAGE();
8422 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8423 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
8424 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8425 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8426 IEM_MC_ELSE()
8427 IEM_MC_IF_FCW_IM()
8428 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
8429 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
8430 IEM_MC_ENDIF();
8431 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8432 IEM_MC_ENDIF();
8433 IEM_MC_ADVANCE_RIP();
8434
8435 IEM_MC_END();
8436 return VINF_SUCCESS;
8437}
8438
8439
8440
8441
8442/** Opcode 0xdd !11/0. */
8443FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
8444{
8445 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
8446 IEM_MC_BEGIN(3, 2);
8447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8448 IEM_MC_LOCAL(uint16_t, u16Fsw);
8449 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8450 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
8451 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8452
8453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8455 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8456 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8457
8458 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8459 IEM_MC_PREPARE_FPU_USAGE();
8460 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8461 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
8462 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8463 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8464 IEM_MC_ELSE()
8465 IEM_MC_IF_FCW_IM()
8466 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
8467 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
8468 IEM_MC_ENDIF();
8469 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8470 IEM_MC_ENDIF();
8471 IEM_MC_ADVANCE_RIP();
8472
8473 IEM_MC_END();
8474 return VINF_SUCCESS;
8475}
8476
8477
8478/** Opcode 0xdd !11/0. */
8479FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
8480{
8481 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
8482 IEM_MC_BEGIN(3, 0);
8483 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8484 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8485 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8488 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8489 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8490 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8491 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8492 IEM_MC_END();
8493 return VINF_SUCCESS;
8494}
8495
8496
8497/** Opcode 0xdd !11/0. */
8498FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
8499{
8500 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
8501 IEM_MC_BEGIN(3, 0);
8502 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8503 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8504 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8507 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8508 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8509 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8510 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
8511 IEM_MC_END();
8512 return VINF_SUCCESS;
8513
8514}
8515
8516/** Opcode 0xdd !11/0. */
8517FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
8518{
8519 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
8520
8521 IEM_MC_BEGIN(0, 2);
8522 IEM_MC_LOCAL(uint16_t, u16Tmp);
8523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8524
8525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8527 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8528
8529 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8530 IEM_MC_FETCH_FSW(u16Tmp);
8531 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
8532 IEM_MC_ADVANCE_RIP();
8533
8534/** @todo Debug / drop a hint to the verifier that things may differ
8535 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
8536 * NT4SP1. (X86_FSW_PE) */
8537 IEM_MC_END();
8538 return VINF_SUCCESS;
8539}
8540
8541
8542/** Opcode 0xdd 11/0. */
8543FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
8544{
8545 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
8546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8547 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
8548 unmodified. */
8549
8550 IEM_MC_BEGIN(0, 0);
8551
8552 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8553 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8554
8555 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8556 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
8557 IEM_MC_UPDATE_FPU_OPCODE_IP();
8558
8559 IEM_MC_ADVANCE_RIP();
8560 IEM_MC_END();
8561 return VINF_SUCCESS;
8562}
8563
8564
8565/** Opcode 0xdd 11/1. */
8566FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
8567{
8568 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
8569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8570
8571 IEM_MC_BEGIN(0, 2);
8572 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8573 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8576
8577 IEM_MC_PREPARE_FPU_USAGE();
8578 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8579 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8580 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8581 IEM_MC_ELSE()
8582 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8583 IEM_MC_ENDIF();
8584
8585 IEM_MC_ADVANCE_RIP();
8586 IEM_MC_END();
8587 return VINF_SUCCESS;
8588}
8589
8590
8591/** Opcode 0xdd 11/3. */
8592FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
8593{
8594 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
8595 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
8596}
8597
8598
8599/** Opcode 0xdd 11/4. */
8600FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
8601{
8602 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
8603 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
8604}
8605
8606
8607/** Opcode 0xdd. */
8608FNIEMOP_DEF(iemOp_EscF5)
8609{
8610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8611 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
8612 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8613 {
8614 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8615 {
8616 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
8617 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
8618 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
8619 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
8620 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
8621 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
8622 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8623 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8625 }
8626 }
8627 else
8628 {
8629 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8630 {
8631 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
8632 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
8633 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
8634 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
8635 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
8636 case 5: return IEMOP_RAISE_INVALID_OPCODE();
8637 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
8638 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
8639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8640 }
8641 }
8642}
8643
8644
8645/** Opcode 0xde 11/0. */
8646FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
8647{
8648 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
8649 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
8650}
8651
8652
8653/** Opcode 0xde 11/0. */
8654FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
8655{
8656 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
8657 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
8658}
8659
8660
8661/** Opcode 0xde 0xd9. */
8662FNIEMOP_DEF(iemOp_fcompp)
8663{
8664 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
8665 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
8666}
8667
8668
8669/** Opcode 0xde 11/4. */
8670FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
8671{
8672 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
8673 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
8674}
8675
8676
8677/** Opcode 0xde 11/5. */
8678FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
8679{
8680 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
8681 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
8682}
8683
8684
8685/** Opcode 0xde 11/6. */
8686FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
8687{
8688 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
8689 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
8690}
8691
8692
8693/** Opcode 0xde 11/7. */
8694FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
8695{
8696 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
8697 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
8698}
8699
8700
8701/**
8702 * Common worker for FPU instructions working on ST0 and an m16i, and storing
8703 * the result in ST0.
8704 *
8705 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8706 */
8707FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
8708{
8709 IEM_MC_BEGIN(3, 3);
8710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8711 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8712 IEM_MC_LOCAL(int16_t, i16Val2);
8713 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8714 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8715 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8716
8717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8719
8720 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8721 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8722 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8723
8724 IEM_MC_PREPARE_FPU_USAGE();
8725 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8726 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
8727 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8728 IEM_MC_ELSE()
8729 IEM_MC_FPU_STACK_UNDERFLOW(0);
8730 IEM_MC_ENDIF();
8731 IEM_MC_ADVANCE_RIP();
8732
8733 IEM_MC_END();
8734 return VINF_SUCCESS;
8735}
8736
8737
8738/** Opcode 0xde !11/0. */
8739FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
8740{
8741 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
8742 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
8743}
8744
8745
8746/** Opcode 0xde !11/1. */
8747FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
8748{
8749 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
8750 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
8751}
8752
8753
8754/** Opcode 0xde !11/2. */
8755FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
8756{
8757 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
8758
8759 IEM_MC_BEGIN(3, 3);
8760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8761 IEM_MC_LOCAL(uint16_t, u16Fsw);
8762 IEM_MC_LOCAL(int16_t, i16Val2);
8763 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8764 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8765 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8766
8767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8769
8770 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8771 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8772 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8773
8774 IEM_MC_PREPARE_FPU_USAGE();
8775 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8776 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
8777 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8778 IEM_MC_ELSE()
8779 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8780 IEM_MC_ENDIF();
8781 IEM_MC_ADVANCE_RIP();
8782
8783 IEM_MC_END();
8784 return VINF_SUCCESS;
8785}
8786
8787
8788/** Opcode 0xde !11/3. */
8789FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
8790{
8791 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
8792
8793 IEM_MC_BEGIN(3, 3);
8794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8795 IEM_MC_LOCAL(uint16_t, u16Fsw);
8796 IEM_MC_LOCAL(int16_t, i16Val2);
8797 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8798 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8799 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8800
8801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8803
8804 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8805 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8806 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8807
8808 IEM_MC_PREPARE_FPU_USAGE();
8809 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8810 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
8811 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8812 IEM_MC_ELSE()
8813 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8814 IEM_MC_ENDIF();
8815 IEM_MC_ADVANCE_RIP();
8816
8817 IEM_MC_END();
8818 return VINF_SUCCESS;
8819}
8820
8821
8822/** Opcode 0xde !11/4. */
8823FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
8824{
8825 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
8826 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
8827}
8828
8829
8830/** Opcode 0xde !11/5. */
8831FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
8832{
8833 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
8834 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
8835}
8836
8837
8838/** Opcode 0xde !11/6. */
8839FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
8840{
8841 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
8842 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
8843}
8844
8845
8846/** Opcode 0xde !11/7. */
8847FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
8848{
8849 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
8850 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
8851}
8852
8853
8854/** Opcode 0xde. */
8855FNIEMOP_DEF(iemOp_EscF6)
8856{
8857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8858 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
8859 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8860 {
8861 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8862 {
8863 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
8864 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
8865 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8866 case 3: if (bRm == 0xd9)
8867 return FNIEMOP_CALL(iemOp_fcompp);
8868 return IEMOP_RAISE_INVALID_OPCODE();
8869 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
8870 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
8871 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
8872 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
8873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8874 }
8875 }
8876 else
8877 {
8878 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8879 {
8880 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
8881 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
8882 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
8883 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
8884 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
8885 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
8886 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
8887 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
8888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8889 }
8890 }
8891}
8892
8893
8894/** Opcode 0xdf 11/0.
8895 * Undocument instruction, assumed to work like ffree + fincstp. */
8896FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
8897{
8898 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
8899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8900
8901 IEM_MC_BEGIN(0, 0);
8902
8903 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8904 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8905
8906 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8907 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
8908 IEM_MC_FPU_STACK_INC_TOP();
8909 IEM_MC_UPDATE_FPU_OPCODE_IP();
8910
8911 IEM_MC_ADVANCE_RIP();
8912 IEM_MC_END();
8913 return VINF_SUCCESS;
8914}
8915
8916
8917/** Opcode 0xdf 0xe0. */
8918FNIEMOP_DEF(iemOp_fnstsw_ax)
8919{
8920 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
8921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8922
8923 IEM_MC_BEGIN(0, 1);
8924 IEM_MC_LOCAL(uint16_t, u16Tmp);
8925 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8926 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8927 IEM_MC_FETCH_FSW(u16Tmp);
8928 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
8929 IEM_MC_ADVANCE_RIP();
8930 IEM_MC_END();
8931 return VINF_SUCCESS;
8932}
8933
8934
8935/** Opcode 0xdf 11/5. */
8936FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
8937{
8938 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
8939 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
8940}
8941
8942
8943/** Opcode 0xdf 11/6. */
8944FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
8945{
8946 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
8947 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
8948}
8949
8950
8951/** Opcode 0xdf !11/0. */
8952FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
8953{
8954 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
8955
8956 IEM_MC_BEGIN(2, 3);
8957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8958 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8959 IEM_MC_LOCAL(int16_t, i16Val);
8960 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8961 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
8962
8963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8965
8966 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8967 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8968 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8969
8970 IEM_MC_PREPARE_FPU_USAGE();
8971 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8972 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
8973 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8974 IEM_MC_ELSE()
8975 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8976 IEM_MC_ENDIF();
8977 IEM_MC_ADVANCE_RIP();
8978
8979 IEM_MC_END();
8980 return VINF_SUCCESS;
8981}
8982
8983
8984/** Opcode 0xdf !11/1. */
8985FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
8986{
8987 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
8988 IEM_MC_BEGIN(3, 2);
8989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8990 IEM_MC_LOCAL(uint16_t, u16Fsw);
8991 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8992 IEM_MC_ARG(int16_t *, pi16Dst, 1);
8993 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8994
8995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8997 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8998 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8999
9000 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9001 IEM_MC_PREPARE_FPU_USAGE();
9002 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9003 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9004 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9005 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9006 IEM_MC_ELSE()
9007 IEM_MC_IF_FCW_IM()
9008 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9009 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9010 IEM_MC_ENDIF();
9011 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9012 IEM_MC_ENDIF();
9013 IEM_MC_ADVANCE_RIP();
9014
9015 IEM_MC_END();
9016 return VINF_SUCCESS;
9017}
9018
9019
9020/** Opcode 0xdf !11/2. */
9021FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9022{
9023 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9024 IEM_MC_BEGIN(3, 2);
9025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9026 IEM_MC_LOCAL(uint16_t, u16Fsw);
9027 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9028 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9029 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9030
9031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9033 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9034 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9035
9036 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9037 IEM_MC_PREPARE_FPU_USAGE();
9038 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9039 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9040 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9041 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9042 IEM_MC_ELSE()
9043 IEM_MC_IF_FCW_IM()
9044 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9045 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9046 IEM_MC_ENDIF();
9047 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9048 IEM_MC_ENDIF();
9049 IEM_MC_ADVANCE_RIP();
9050
9051 IEM_MC_END();
9052 return VINF_SUCCESS;
9053}
9054
9055
9056/** Opcode 0xdf !11/3. */
9057FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9058{
9059 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9060 IEM_MC_BEGIN(3, 2);
9061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9062 IEM_MC_LOCAL(uint16_t, u16Fsw);
9063 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9064 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9065 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9066
9067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9069 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9070 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9071
9072 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9073 IEM_MC_PREPARE_FPU_USAGE();
9074 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9075 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9076 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9077 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9078 IEM_MC_ELSE()
9079 IEM_MC_IF_FCW_IM()
9080 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9081 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9082 IEM_MC_ENDIF();
9083 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9084 IEM_MC_ENDIF();
9085 IEM_MC_ADVANCE_RIP();
9086
9087 IEM_MC_END();
9088 return VINF_SUCCESS;
9089}
9090
9091
9092/** Opcode 0xdf !11/4. */
9093FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
9094
9095
9096/** Opcode 0xdf !11/5. */
9097FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
9098{
9099 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
9100
9101 IEM_MC_BEGIN(2, 3);
9102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9103 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9104 IEM_MC_LOCAL(int64_t, i64Val);
9105 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9106 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
9107
9108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9110
9111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9112 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9113 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9114
9115 IEM_MC_PREPARE_FPU_USAGE();
9116 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9117 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
9118 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9119 IEM_MC_ELSE()
9120 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9121 IEM_MC_ENDIF();
9122 IEM_MC_ADVANCE_RIP();
9123
9124 IEM_MC_END();
9125 return VINF_SUCCESS;
9126}
9127
9128
9129/** Opcode 0xdf !11/6. */
9130FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
9131
9132
9133/** Opcode 0xdf !11/7. */
9134FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
9135{
9136 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
9137 IEM_MC_BEGIN(3, 2);
9138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9139 IEM_MC_LOCAL(uint16_t, u16Fsw);
9140 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9141 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9142 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9143
9144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9146 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9147 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9148
9149 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9150 IEM_MC_PREPARE_FPU_USAGE();
9151 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9152 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9153 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9154 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9155 IEM_MC_ELSE()
9156 IEM_MC_IF_FCW_IM()
9157 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9158 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9159 IEM_MC_ENDIF();
9160 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9161 IEM_MC_ENDIF();
9162 IEM_MC_ADVANCE_RIP();
9163
9164 IEM_MC_END();
9165 return VINF_SUCCESS;
9166}
9167
9168
9169/** Opcode 0xdf. */
9170FNIEMOP_DEF(iemOp_EscF7)
9171{
9172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9173 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9174 {
9175 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9176 {
9177 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
9178 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
9179 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9180 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9181 case 4: if (bRm == 0xe0)
9182 return FNIEMOP_CALL(iemOp_fnstsw_ax);
9183 return IEMOP_RAISE_INVALID_OPCODE();
9184 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
9185 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
9186 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9188 }
9189 }
9190 else
9191 {
9192 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9193 {
9194 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
9195 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
9196 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
9197 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
9198 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
9199 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
9200 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
9201 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
9202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9203 }
9204 }
9205}
9206
9207
9208/** Opcode 0xe0. */
9209FNIEMOP_DEF(iemOp_loopne_Jb)
9210{
9211 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
9212 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9214 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9215
9216 switch (pVCpu->iem.s.enmEffAddrMode)
9217 {
9218 case IEMMODE_16BIT:
9219 IEM_MC_BEGIN(0,0);
9220 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9221 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9222 IEM_MC_REL_JMP_S8(i8Imm);
9223 } IEM_MC_ELSE() {
9224 IEM_MC_ADVANCE_RIP();
9225 } IEM_MC_ENDIF();
9226 IEM_MC_END();
9227 return VINF_SUCCESS;
9228
9229 case IEMMODE_32BIT:
9230 IEM_MC_BEGIN(0,0);
9231 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9232 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9233 IEM_MC_REL_JMP_S8(i8Imm);
9234 } IEM_MC_ELSE() {
9235 IEM_MC_ADVANCE_RIP();
9236 } IEM_MC_ENDIF();
9237 IEM_MC_END();
9238 return VINF_SUCCESS;
9239
9240 case IEMMODE_64BIT:
9241 IEM_MC_BEGIN(0,0);
9242 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9243 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9244 IEM_MC_REL_JMP_S8(i8Imm);
9245 } IEM_MC_ELSE() {
9246 IEM_MC_ADVANCE_RIP();
9247 } IEM_MC_ENDIF();
9248 IEM_MC_END();
9249 return VINF_SUCCESS;
9250
9251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9252 }
9253}
9254
9255
9256/** Opcode 0xe1. */
9257FNIEMOP_DEF(iemOp_loope_Jb)
9258{
9259 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
9260 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9262 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9263
9264 switch (pVCpu->iem.s.enmEffAddrMode)
9265 {
9266 case IEMMODE_16BIT:
9267 IEM_MC_BEGIN(0,0);
9268 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9269 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9270 IEM_MC_REL_JMP_S8(i8Imm);
9271 } IEM_MC_ELSE() {
9272 IEM_MC_ADVANCE_RIP();
9273 } IEM_MC_ENDIF();
9274 IEM_MC_END();
9275 return VINF_SUCCESS;
9276
9277 case IEMMODE_32BIT:
9278 IEM_MC_BEGIN(0,0);
9279 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9280 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9281 IEM_MC_REL_JMP_S8(i8Imm);
9282 } IEM_MC_ELSE() {
9283 IEM_MC_ADVANCE_RIP();
9284 } IEM_MC_ENDIF();
9285 IEM_MC_END();
9286 return VINF_SUCCESS;
9287
9288 case IEMMODE_64BIT:
9289 IEM_MC_BEGIN(0,0);
9290 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9291 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9292 IEM_MC_REL_JMP_S8(i8Imm);
9293 } IEM_MC_ELSE() {
9294 IEM_MC_ADVANCE_RIP();
9295 } IEM_MC_ENDIF();
9296 IEM_MC_END();
9297 return VINF_SUCCESS;
9298
9299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9300 }
9301}
9302
9303
9304/** Opcode 0xe2. */
9305FNIEMOP_DEF(iemOp_loop_Jb)
9306{
9307 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
9308 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9310 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9311
9312 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
9313 * using the 32-bit operand size override. How can that be restarted? See
9314 * weird pseudo code in intel manual. */
9315 switch (pVCpu->iem.s.enmEffAddrMode)
9316 {
9317 case IEMMODE_16BIT:
9318 IEM_MC_BEGIN(0,0);
9319 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9320 {
9321 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9322 IEM_MC_IF_CX_IS_NZ() {
9323 IEM_MC_REL_JMP_S8(i8Imm);
9324 } IEM_MC_ELSE() {
9325 IEM_MC_ADVANCE_RIP();
9326 } IEM_MC_ENDIF();
9327 }
9328 else
9329 {
9330 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
9331 IEM_MC_ADVANCE_RIP();
9332 }
9333 IEM_MC_END();
9334 return VINF_SUCCESS;
9335
9336 case IEMMODE_32BIT:
9337 IEM_MC_BEGIN(0,0);
9338 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9339 {
9340 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9341 IEM_MC_IF_ECX_IS_NZ() {
9342 IEM_MC_REL_JMP_S8(i8Imm);
9343 } IEM_MC_ELSE() {
9344 IEM_MC_ADVANCE_RIP();
9345 } IEM_MC_ENDIF();
9346 }
9347 else
9348 {
9349 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
9350 IEM_MC_ADVANCE_RIP();
9351 }
9352 IEM_MC_END();
9353 return VINF_SUCCESS;
9354
9355 case IEMMODE_64BIT:
9356 IEM_MC_BEGIN(0,0);
9357 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9358 {
9359 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9360 IEM_MC_IF_RCX_IS_NZ() {
9361 IEM_MC_REL_JMP_S8(i8Imm);
9362 } IEM_MC_ELSE() {
9363 IEM_MC_ADVANCE_RIP();
9364 } IEM_MC_ENDIF();
9365 }
9366 else
9367 {
9368 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
9369 IEM_MC_ADVANCE_RIP();
9370 }
9371 IEM_MC_END();
9372 return VINF_SUCCESS;
9373
9374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9375 }
9376}
9377
9378
9379/** Opcode 0xe3. */
9380FNIEMOP_DEF(iemOp_jecxz_Jb)
9381{
9382 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
9383 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9385 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9386
9387 switch (pVCpu->iem.s.enmEffAddrMode)
9388 {
9389 case IEMMODE_16BIT:
9390 IEM_MC_BEGIN(0,0);
9391 IEM_MC_IF_CX_IS_NZ() {
9392 IEM_MC_ADVANCE_RIP();
9393 } IEM_MC_ELSE() {
9394 IEM_MC_REL_JMP_S8(i8Imm);
9395 } IEM_MC_ENDIF();
9396 IEM_MC_END();
9397 return VINF_SUCCESS;
9398
9399 case IEMMODE_32BIT:
9400 IEM_MC_BEGIN(0,0);
9401 IEM_MC_IF_ECX_IS_NZ() {
9402 IEM_MC_ADVANCE_RIP();
9403 } IEM_MC_ELSE() {
9404 IEM_MC_REL_JMP_S8(i8Imm);
9405 } IEM_MC_ENDIF();
9406 IEM_MC_END();
9407 return VINF_SUCCESS;
9408
9409 case IEMMODE_64BIT:
9410 IEM_MC_BEGIN(0,0);
9411 IEM_MC_IF_RCX_IS_NZ() {
9412 IEM_MC_ADVANCE_RIP();
9413 } IEM_MC_ELSE() {
9414 IEM_MC_REL_JMP_S8(i8Imm);
9415 } IEM_MC_ENDIF();
9416 IEM_MC_END();
9417 return VINF_SUCCESS;
9418
9419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9420 }
9421}
9422
9423
9424/** Opcode 0xe4 */
9425FNIEMOP_DEF(iemOp_in_AL_Ib)
9426{
9427 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
9428 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9430 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
9431}
9432
9433
9434/** Opcode 0xe5 */
9435FNIEMOP_DEF(iemOp_in_eAX_Ib)
9436{
9437 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
9438 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9440 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9441}
9442
9443
9444/** Opcode 0xe6 */
9445FNIEMOP_DEF(iemOp_out_Ib_AL)
9446{
9447 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
9448 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9450 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
9451}
9452
9453
9454/** Opcode 0xe7 */
9455FNIEMOP_DEF(iemOp_out_Ib_eAX)
9456{
9457 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
9458 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9460 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9461}
9462
9463
9464/** Opcode 0xe8. */
9465FNIEMOP_DEF(iemOp_call_Jv)
9466{
9467 IEMOP_MNEMONIC(call_Jv, "call Jv");
9468 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9469 switch (pVCpu->iem.s.enmEffOpSize)
9470 {
9471 case IEMMODE_16BIT:
9472 {
9473 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9474 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
9475 }
9476
9477 case IEMMODE_32BIT:
9478 {
9479 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9480 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
9481 }
9482
9483 case IEMMODE_64BIT:
9484 {
9485 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9486 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
9487 }
9488
9489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9490 }
9491}
9492
9493
9494/** Opcode 0xe9. */
9495FNIEMOP_DEF(iemOp_jmp_Jv)
9496{
9497 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
9498 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9499 switch (pVCpu->iem.s.enmEffOpSize)
9500 {
9501 case IEMMODE_16BIT:
9502 {
9503 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
9504 IEM_MC_BEGIN(0, 0);
9505 IEM_MC_REL_JMP_S16(i16Imm);
9506 IEM_MC_END();
9507 return VINF_SUCCESS;
9508 }
9509
9510 case IEMMODE_64BIT:
9511 case IEMMODE_32BIT:
9512 {
9513 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
9514 IEM_MC_BEGIN(0, 0);
9515 IEM_MC_REL_JMP_S32(i32Imm);
9516 IEM_MC_END();
9517 return VINF_SUCCESS;
9518 }
9519
9520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9521 }
9522}
9523
9524
9525/** Opcode 0xea. */
9526FNIEMOP_DEF(iemOp_jmp_Ap)
9527{
9528 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
9529 IEMOP_HLP_NO_64BIT();
9530
9531 /* Decode the far pointer address and pass it on to the far call C implementation. */
9532 uint32_t offSeg;
9533 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
9534 IEM_OPCODE_GET_NEXT_U32(&offSeg);
9535 else
9536 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
9537 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
9538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9539 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
9540}
9541
9542
9543/** Opcode 0xeb. */
9544FNIEMOP_DEF(iemOp_jmp_Jb)
9545{
9546 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
9547 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9550
9551 IEM_MC_BEGIN(0, 0);
9552 IEM_MC_REL_JMP_S8(i8Imm);
9553 IEM_MC_END();
9554 return VINF_SUCCESS;
9555}
9556
9557
9558/** Opcode 0xec */
9559FNIEMOP_DEF(iemOp_in_AL_DX)
9560{
9561 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
9562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9563 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
9564}
9565
9566
9567/** Opcode 0xed */
9568FNIEMOP_DEF(iemOp_eAX_DX)
9569{
9570 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
9571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9572 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9573}
9574
9575
9576/** Opcode 0xee */
9577FNIEMOP_DEF(iemOp_out_DX_AL)
9578{
9579 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
9580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9581 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
9582}
9583
9584
9585/** Opcode 0xef */
9586FNIEMOP_DEF(iemOp_out_DX_eAX)
9587{
9588 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
9589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9590 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9591}
9592
9593
9594/** Opcode 0xf0. */
9595FNIEMOP_DEF(iemOp_lock)
9596{
9597 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
9598 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
9599
9600 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9601 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9602}
9603
9604
9605/** Opcode 0xf1. */
9606FNIEMOP_DEF(iemOp_int_1)
9607{
9608 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
9609 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
9610 /** @todo testcase! */
9611 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
9612}
9613
9614
9615/** Opcode 0xf2. */
9616FNIEMOP_DEF(iemOp_repne)
9617{
9618 /* This overrides any previous REPE prefix. */
9619 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
9620 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
9621 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
9622
9623 /* For the 4 entry opcode tables, REPNZ overrides any previous
9624 REPZ and operand size prefixes. */
9625 pVCpu->iem.s.idxPrefix = 3;
9626
9627 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9628 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9629}
9630
9631
9632/** Opcode 0xf3. */
9633FNIEMOP_DEF(iemOp_repe)
9634{
9635 /* This overrides any previous REPNE prefix. */
9636 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
9637 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
9638 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
9639
9640 /* For the 4 entry opcode tables, REPNZ overrides any previous
9641 REPNZ and operand size prefixes. */
9642 pVCpu->iem.s.idxPrefix = 2;
9643
9644 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9645 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9646}
9647
9648
9649/** Opcode 0xf4. */
9650FNIEMOP_DEF(iemOp_hlt)
9651{
9652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9653 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
9654}
9655
9656
9657/** Opcode 0xf5. */
9658FNIEMOP_DEF(iemOp_cmc)
9659{
9660 IEMOP_MNEMONIC(cmc, "cmc");
9661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9662 IEM_MC_BEGIN(0, 0);
9663 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
9664 IEM_MC_ADVANCE_RIP();
9665 IEM_MC_END();
9666 return VINF_SUCCESS;
9667}
9668
9669
9670/**
9671 * Common implementation of 'inc/dec/not/neg Eb'.
9672 *
9673 * @param bRm The RM byte.
9674 * @param pImpl The instruction implementation.
9675 */
9676FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
9677{
9678 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9679 {
9680 /* register access */
9681 IEM_MC_BEGIN(2, 0);
9682 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9683 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9684 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9685 IEM_MC_REF_EFLAGS(pEFlags);
9686 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
9687 IEM_MC_ADVANCE_RIP();
9688 IEM_MC_END();
9689 }
9690 else
9691 {
9692 /* memory access. */
9693 IEM_MC_BEGIN(2, 2);
9694 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9695 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9697
9698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9699 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9700 IEM_MC_FETCH_EFLAGS(EFlags);
9701 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9702 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
9703 else
9704 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
9705
9706 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9707 IEM_MC_COMMIT_EFLAGS(EFlags);
9708 IEM_MC_ADVANCE_RIP();
9709 IEM_MC_END();
9710 }
9711 return VINF_SUCCESS;
9712}
9713
9714
9715/**
9716 * Common implementation of 'inc/dec/not/neg Ev'.
9717 *
9718 * @param bRm The RM byte.
9719 * @param pImpl The instruction implementation.
9720 */
9721FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
9722{
9723 /* Registers are handled by a common worker. */
9724 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9725 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9726
9727 /* Memory we do here. */
9728 switch (pVCpu->iem.s.enmEffOpSize)
9729 {
9730 case IEMMODE_16BIT:
9731 IEM_MC_BEGIN(2, 2);
9732 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9733 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9735
9736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9737 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9738 IEM_MC_FETCH_EFLAGS(EFlags);
9739 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9740 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
9741 else
9742 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
9743
9744 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9745 IEM_MC_COMMIT_EFLAGS(EFlags);
9746 IEM_MC_ADVANCE_RIP();
9747 IEM_MC_END();
9748 return VINF_SUCCESS;
9749
9750 case IEMMODE_32BIT:
9751 IEM_MC_BEGIN(2, 2);
9752 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9753 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9755
9756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9757 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9758 IEM_MC_FETCH_EFLAGS(EFlags);
9759 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9760 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
9761 else
9762 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
9763
9764 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9765 IEM_MC_COMMIT_EFLAGS(EFlags);
9766 IEM_MC_ADVANCE_RIP();
9767 IEM_MC_END();
9768 return VINF_SUCCESS;
9769
9770 case IEMMODE_64BIT:
9771 IEM_MC_BEGIN(2, 2);
9772 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9773 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9775
9776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9777 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9778 IEM_MC_FETCH_EFLAGS(EFlags);
9779 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9780 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
9781 else
9782 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
9783
9784 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9785 IEM_MC_COMMIT_EFLAGS(EFlags);
9786 IEM_MC_ADVANCE_RIP();
9787 IEM_MC_END();
9788 return VINF_SUCCESS;
9789
9790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9791 }
9792}
9793
9794
9795/** Opcode 0xf6 /0. */
9796FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
9797{
9798 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
9799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9800
9801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9802 {
9803 /* register access */
9804 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9806
9807 IEM_MC_BEGIN(3, 0);
9808 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9809 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
9810 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9811 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9812 IEM_MC_REF_EFLAGS(pEFlags);
9813 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
9814 IEM_MC_ADVANCE_RIP();
9815 IEM_MC_END();
9816 }
9817 else
9818 {
9819 /* memory access. */
9820 IEM_MC_BEGIN(3, 2);
9821 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9822 IEM_MC_ARG(uint8_t, u8Src, 1);
9823 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9825
9826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9827 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9828 IEM_MC_ASSIGN(u8Src, u8Imm);
9829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9830 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9831 IEM_MC_FETCH_EFLAGS(EFlags);
9832 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
9833
9834 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
9835 IEM_MC_COMMIT_EFLAGS(EFlags);
9836 IEM_MC_ADVANCE_RIP();
9837 IEM_MC_END();
9838 }
9839 return VINF_SUCCESS;
9840}
9841
9842
9843/** Opcode 0xf7 /0. */
9844FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
9845{
9846 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
9847 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9848
9849 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9850 {
9851 /* register access */
9852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9853 switch (pVCpu->iem.s.enmEffOpSize)
9854 {
9855 case IEMMODE_16BIT:
9856 {
9857 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9858 IEM_MC_BEGIN(3, 0);
9859 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9860 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
9861 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9862 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9863 IEM_MC_REF_EFLAGS(pEFlags);
9864 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
9865 IEM_MC_ADVANCE_RIP();
9866 IEM_MC_END();
9867 return VINF_SUCCESS;
9868 }
9869
9870 case IEMMODE_32BIT:
9871 {
9872 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9873 IEM_MC_BEGIN(3, 0);
9874 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9875 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
9876 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9877 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9878 IEM_MC_REF_EFLAGS(pEFlags);
9879 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
9880 /* No clearing the high dword here - test doesn't write back the result. */
9881 IEM_MC_ADVANCE_RIP();
9882 IEM_MC_END();
9883 return VINF_SUCCESS;
9884 }
9885
9886 case IEMMODE_64BIT:
9887 {
9888 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9889 IEM_MC_BEGIN(3, 0);
9890 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9891 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
9892 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9893 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9894 IEM_MC_REF_EFLAGS(pEFlags);
9895 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
9896 IEM_MC_ADVANCE_RIP();
9897 IEM_MC_END();
9898 return VINF_SUCCESS;
9899 }
9900
9901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9902 }
9903 }
9904 else
9905 {
9906 /* memory access. */
9907 switch (pVCpu->iem.s.enmEffOpSize)
9908 {
9909 case IEMMODE_16BIT:
9910 {
9911 IEM_MC_BEGIN(3, 2);
9912 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9913 IEM_MC_ARG(uint16_t, u16Src, 1);
9914 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9916
9917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9918 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9919 IEM_MC_ASSIGN(u16Src, u16Imm);
9920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9921 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9922 IEM_MC_FETCH_EFLAGS(EFlags);
9923 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
9924
9925 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
9926 IEM_MC_COMMIT_EFLAGS(EFlags);
9927 IEM_MC_ADVANCE_RIP();
9928 IEM_MC_END();
9929 return VINF_SUCCESS;
9930 }
9931
9932 case IEMMODE_32BIT:
9933 {
9934 IEM_MC_BEGIN(3, 2);
9935 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9936 IEM_MC_ARG(uint32_t, u32Src, 1);
9937 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9939
9940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9941 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9942 IEM_MC_ASSIGN(u32Src, u32Imm);
9943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9944 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9945 IEM_MC_FETCH_EFLAGS(EFlags);
9946 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
9947
9948 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
9949 IEM_MC_COMMIT_EFLAGS(EFlags);
9950 IEM_MC_ADVANCE_RIP();
9951 IEM_MC_END();
9952 return VINF_SUCCESS;
9953 }
9954
9955 case IEMMODE_64BIT:
9956 {
9957 IEM_MC_BEGIN(3, 2);
9958 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9959 IEM_MC_ARG(uint64_t, u64Src, 1);
9960 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9962
9963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9964 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9965 IEM_MC_ASSIGN(u64Src, u64Imm);
9966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9967 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9968 IEM_MC_FETCH_EFLAGS(EFlags);
9969 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
9970
9971 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
9972 IEM_MC_COMMIT_EFLAGS(EFlags);
9973 IEM_MC_ADVANCE_RIP();
9974 IEM_MC_END();
9975 return VINF_SUCCESS;
9976 }
9977
9978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9979 }
9980 }
9981}
9982
9983
9984/** Opcode 0xf6 /4, /5, /6 and /7. */
9985FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
9986{
9987 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9988 {
9989 /* register access */
9990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9991 IEM_MC_BEGIN(3, 1);
9992 IEM_MC_ARG(uint16_t *, pu16AX, 0);
9993 IEM_MC_ARG(uint8_t, u8Value, 1);
9994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9995 IEM_MC_LOCAL(int32_t, rc);
9996
9997 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9998 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
9999 IEM_MC_REF_EFLAGS(pEFlags);
10000 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10001 IEM_MC_IF_LOCAL_IS_Z(rc) {
10002 IEM_MC_ADVANCE_RIP();
10003 } IEM_MC_ELSE() {
10004 IEM_MC_RAISE_DIVIDE_ERROR();
10005 } IEM_MC_ENDIF();
10006
10007 IEM_MC_END();
10008 }
10009 else
10010 {
10011 /* memory access. */
10012 IEM_MC_BEGIN(3, 2);
10013 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10014 IEM_MC_ARG(uint8_t, u8Value, 1);
10015 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10017 IEM_MC_LOCAL(int32_t, rc);
10018
10019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10021 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10022 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10023 IEM_MC_REF_EFLAGS(pEFlags);
10024 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10025 IEM_MC_IF_LOCAL_IS_Z(rc) {
10026 IEM_MC_ADVANCE_RIP();
10027 } IEM_MC_ELSE() {
10028 IEM_MC_RAISE_DIVIDE_ERROR();
10029 } IEM_MC_ENDIF();
10030
10031 IEM_MC_END();
10032 }
10033 return VINF_SUCCESS;
10034}
10035
10036
10037/** Opcode 0xf7 /4, /5, /6 and /7. */
10038FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10039{
10040 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10041
10042 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10043 {
10044 /* register access */
10045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10046 switch (pVCpu->iem.s.enmEffOpSize)
10047 {
10048 case IEMMODE_16BIT:
10049 {
10050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10051 IEM_MC_BEGIN(4, 1);
10052 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10053 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10054 IEM_MC_ARG(uint16_t, u16Value, 2);
10055 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10056 IEM_MC_LOCAL(int32_t, rc);
10057
10058 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10059 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10060 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10061 IEM_MC_REF_EFLAGS(pEFlags);
10062 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10063 IEM_MC_IF_LOCAL_IS_Z(rc) {
10064 IEM_MC_ADVANCE_RIP();
10065 } IEM_MC_ELSE() {
10066 IEM_MC_RAISE_DIVIDE_ERROR();
10067 } IEM_MC_ENDIF();
10068
10069 IEM_MC_END();
10070 return VINF_SUCCESS;
10071 }
10072
10073 case IEMMODE_32BIT:
10074 {
10075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10076 IEM_MC_BEGIN(4, 1);
10077 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10078 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10079 IEM_MC_ARG(uint32_t, u32Value, 2);
10080 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10081 IEM_MC_LOCAL(int32_t, rc);
10082
10083 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10084 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10085 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10086 IEM_MC_REF_EFLAGS(pEFlags);
10087 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10088 IEM_MC_IF_LOCAL_IS_Z(rc) {
10089 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10090 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10091 IEM_MC_ADVANCE_RIP();
10092 } IEM_MC_ELSE() {
10093 IEM_MC_RAISE_DIVIDE_ERROR();
10094 } IEM_MC_ENDIF();
10095
10096 IEM_MC_END();
10097 return VINF_SUCCESS;
10098 }
10099
10100 case IEMMODE_64BIT:
10101 {
10102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10103 IEM_MC_BEGIN(4, 1);
10104 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10105 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10106 IEM_MC_ARG(uint64_t, u64Value, 2);
10107 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10108 IEM_MC_LOCAL(int32_t, rc);
10109
10110 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10111 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10112 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10113 IEM_MC_REF_EFLAGS(pEFlags);
10114 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10115 IEM_MC_IF_LOCAL_IS_Z(rc) {
10116 IEM_MC_ADVANCE_RIP();
10117 } IEM_MC_ELSE() {
10118 IEM_MC_RAISE_DIVIDE_ERROR();
10119 } IEM_MC_ENDIF();
10120
10121 IEM_MC_END();
10122 return VINF_SUCCESS;
10123 }
10124
10125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10126 }
10127 }
10128 else
10129 {
10130 /* memory access. */
10131 switch (pVCpu->iem.s.enmEffOpSize)
10132 {
10133 case IEMMODE_16BIT:
10134 {
10135 IEM_MC_BEGIN(4, 2);
10136 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10137 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10138 IEM_MC_ARG(uint16_t, u16Value, 2);
10139 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10141 IEM_MC_LOCAL(int32_t, rc);
10142
10143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10145 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10146 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10147 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10148 IEM_MC_REF_EFLAGS(pEFlags);
10149 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10150 IEM_MC_IF_LOCAL_IS_Z(rc) {
10151 IEM_MC_ADVANCE_RIP();
10152 } IEM_MC_ELSE() {
10153 IEM_MC_RAISE_DIVIDE_ERROR();
10154 } IEM_MC_ENDIF();
10155
10156 IEM_MC_END();
10157 return VINF_SUCCESS;
10158 }
10159
10160 case IEMMODE_32BIT:
10161 {
10162 IEM_MC_BEGIN(4, 2);
10163 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10164 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10165 IEM_MC_ARG(uint32_t, u32Value, 2);
10166 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10168 IEM_MC_LOCAL(int32_t, rc);
10169
10170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10172 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10173 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10174 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10175 IEM_MC_REF_EFLAGS(pEFlags);
10176 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10177 IEM_MC_IF_LOCAL_IS_Z(rc) {
10178 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10179 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10180 IEM_MC_ADVANCE_RIP();
10181 } IEM_MC_ELSE() {
10182 IEM_MC_RAISE_DIVIDE_ERROR();
10183 } IEM_MC_ENDIF();
10184
10185 IEM_MC_END();
10186 return VINF_SUCCESS;
10187 }
10188
10189 case IEMMODE_64BIT:
10190 {
10191 IEM_MC_BEGIN(4, 2);
10192 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10193 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10194 IEM_MC_ARG(uint64_t, u64Value, 2);
10195 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10197 IEM_MC_LOCAL(int32_t, rc);
10198
10199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10201 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10202 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10203 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10204 IEM_MC_REF_EFLAGS(pEFlags);
10205 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10206 IEM_MC_IF_LOCAL_IS_Z(rc) {
10207 IEM_MC_ADVANCE_RIP();
10208 } IEM_MC_ELSE() {
10209 IEM_MC_RAISE_DIVIDE_ERROR();
10210 } IEM_MC_ENDIF();
10211
10212 IEM_MC_END();
10213 return VINF_SUCCESS;
10214 }
10215
10216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10217 }
10218 }
10219}
10220
10221/** Opcode 0xf6. */
10222FNIEMOP_DEF(iemOp_Grp3_Eb)
10223{
10224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10226 {
10227 case 0:
10228 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
10229 case 1:
10230/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10231 return IEMOP_RAISE_INVALID_OPCODE();
10232 case 2:
10233 IEMOP_MNEMONIC(not_Eb, "not Eb");
10234 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
10235 case 3:
10236 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
10237 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
10238 case 4:
10239 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
10240 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10241 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
10242 case 5:
10243 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
10244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10245 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
10246 case 6:
10247 IEMOP_MNEMONIC(div_Eb, "div Eb");
10248 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10249 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
10250 case 7:
10251 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
10252 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10253 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
10254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10255 }
10256}
10257
10258
10259/** Opcode 0xf7. */
10260FNIEMOP_DEF(iemOp_Grp3_Ev)
10261{
10262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10263 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10264 {
10265 case 0:
10266 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
10267 case 1:
10268/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10269 return IEMOP_RAISE_INVALID_OPCODE();
10270 case 2:
10271 IEMOP_MNEMONIC(not_Ev, "not Ev");
10272 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
10273 case 3:
10274 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
10275 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
10276 case 4:
10277 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
10278 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10279 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
10280 case 5:
10281 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
10282 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10283 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
10284 case 6:
10285 IEMOP_MNEMONIC(div_Ev, "div Ev");
10286 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10287 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
10288 case 7:
10289 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
10290 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10291 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
10292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10293 }
10294}
10295
10296
10297/** Opcode 0xf8. */
10298FNIEMOP_DEF(iemOp_clc)
10299{
10300 IEMOP_MNEMONIC(clc, "clc");
10301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10302 IEM_MC_BEGIN(0, 0);
10303 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
10304 IEM_MC_ADVANCE_RIP();
10305 IEM_MC_END();
10306 return VINF_SUCCESS;
10307}
10308
10309
10310/** Opcode 0xf9. */
10311FNIEMOP_DEF(iemOp_stc)
10312{
10313 IEMOP_MNEMONIC(stc, "stc");
10314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10315 IEM_MC_BEGIN(0, 0);
10316 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
10317 IEM_MC_ADVANCE_RIP();
10318 IEM_MC_END();
10319 return VINF_SUCCESS;
10320}
10321
10322
10323/** Opcode 0xfa. */
10324FNIEMOP_DEF(iemOp_cli)
10325{
10326 IEMOP_MNEMONIC(cli, "cli");
10327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10328 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
10329}
10330
10331
10332FNIEMOP_DEF(iemOp_sti)
10333{
10334 IEMOP_MNEMONIC(sti, "sti");
10335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10336 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
10337}
10338
10339
10340/** Opcode 0xfc. */
10341FNIEMOP_DEF(iemOp_cld)
10342{
10343 IEMOP_MNEMONIC(cld, "cld");
10344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10345 IEM_MC_BEGIN(0, 0);
10346 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
10347 IEM_MC_ADVANCE_RIP();
10348 IEM_MC_END();
10349 return VINF_SUCCESS;
10350}
10351
10352
10353/** Opcode 0xfd. */
10354FNIEMOP_DEF(iemOp_std)
10355{
10356 IEMOP_MNEMONIC(std, "std");
10357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10358 IEM_MC_BEGIN(0, 0);
10359 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
10360 IEM_MC_ADVANCE_RIP();
10361 IEM_MC_END();
10362 return VINF_SUCCESS;
10363}
10364
10365
10366/** Opcode 0xfe. */
10367FNIEMOP_DEF(iemOp_Grp4)
10368{
10369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10370 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10371 {
10372 case 0:
10373 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
10374 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
10375 case 1:
10376 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
10377 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
10378 default:
10379 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
10380 return IEMOP_RAISE_INVALID_OPCODE();
10381 }
10382}
10383
10384
10385/**
10386 * Opcode 0xff /2.
10387 * @param bRm The RM byte.
10388 */
10389FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
10390{
10391 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
10392 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10393
10394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10395 {
10396 /* The new RIP is taken from a register. */
10397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10398 switch (pVCpu->iem.s.enmEffOpSize)
10399 {
10400 case IEMMODE_16BIT:
10401 IEM_MC_BEGIN(1, 0);
10402 IEM_MC_ARG(uint16_t, u16Target, 0);
10403 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10404 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
10405 IEM_MC_END()
10406 return VINF_SUCCESS;
10407
10408 case IEMMODE_32BIT:
10409 IEM_MC_BEGIN(1, 0);
10410 IEM_MC_ARG(uint32_t, u32Target, 0);
10411 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10412 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
10413 IEM_MC_END()
10414 return VINF_SUCCESS;
10415
10416 case IEMMODE_64BIT:
10417 IEM_MC_BEGIN(1, 0);
10418 IEM_MC_ARG(uint64_t, u64Target, 0);
10419 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10420 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
10421 IEM_MC_END()
10422 return VINF_SUCCESS;
10423
10424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10425 }
10426 }
10427 else
10428 {
10429 /* The new RIP is taken from a register. */
10430 switch (pVCpu->iem.s.enmEffOpSize)
10431 {
10432 case IEMMODE_16BIT:
10433 IEM_MC_BEGIN(1, 1);
10434 IEM_MC_ARG(uint16_t, u16Target, 0);
10435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10438 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10439 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
10440 IEM_MC_END()
10441 return VINF_SUCCESS;
10442
10443 case IEMMODE_32BIT:
10444 IEM_MC_BEGIN(1, 1);
10445 IEM_MC_ARG(uint32_t, u32Target, 0);
10446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10449 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10450 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
10451 IEM_MC_END()
10452 return VINF_SUCCESS;
10453
10454 case IEMMODE_64BIT:
10455 IEM_MC_BEGIN(1, 1);
10456 IEM_MC_ARG(uint64_t, u64Target, 0);
10457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10460 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10461 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
10462 IEM_MC_END()
10463 return VINF_SUCCESS;
10464
10465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10466 }
10467 }
10468}
10469
10470typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
10471
10472FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
10473{
10474 /* Registers? How?? */
10475 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
10476 { /* likely */ }
10477 else
10478 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
10479
10480 /* Far pointer loaded from memory. */
10481 switch (pVCpu->iem.s.enmEffOpSize)
10482 {
10483 case IEMMODE_16BIT:
10484 IEM_MC_BEGIN(3, 1);
10485 IEM_MC_ARG(uint16_t, u16Sel, 0);
10486 IEM_MC_ARG(uint16_t, offSeg, 1);
10487 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
10488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10491 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10492 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
10493 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10494 IEM_MC_END();
10495 return VINF_SUCCESS;
10496
10497 case IEMMODE_64BIT:
10498 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
10499 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
10500 * and call far qword [rsp] encodings. */
10501 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
10502 {
10503 IEM_MC_BEGIN(3, 1);
10504 IEM_MC_ARG(uint16_t, u16Sel, 0);
10505 IEM_MC_ARG(uint64_t, offSeg, 1);
10506 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
10507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10510 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10511 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
10512 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10513 IEM_MC_END();
10514 return VINF_SUCCESS;
10515 }
10516 /* AMD falls thru. */
10517 /* fall thru */
10518
10519 case IEMMODE_32BIT:
10520 IEM_MC_BEGIN(3, 1);
10521 IEM_MC_ARG(uint16_t, u16Sel, 0);
10522 IEM_MC_ARG(uint32_t, offSeg, 1);
10523 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
10524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10527 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10528 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
10529 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10530 IEM_MC_END();
10531 return VINF_SUCCESS;
10532
10533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10534 }
10535}
10536
10537
10538/**
10539 * Opcode 0xff /3.
10540 * @param bRm The RM byte.
10541 */
10542FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
10543{
10544 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
10545 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
10546}
10547
10548
10549/**
10550 * Opcode 0xff /4.
10551 * @param bRm The RM byte.
10552 */
10553FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
10554{
10555 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
10556 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10557
10558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10559 {
10560 /* The new RIP is taken from a register. */
10561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10562 switch (pVCpu->iem.s.enmEffOpSize)
10563 {
10564 case IEMMODE_16BIT:
10565 IEM_MC_BEGIN(0, 1);
10566 IEM_MC_LOCAL(uint16_t, u16Target);
10567 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10568 IEM_MC_SET_RIP_U16(u16Target);
10569 IEM_MC_END()
10570 return VINF_SUCCESS;
10571
10572 case IEMMODE_32BIT:
10573 IEM_MC_BEGIN(0, 1);
10574 IEM_MC_LOCAL(uint32_t, u32Target);
10575 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10576 IEM_MC_SET_RIP_U32(u32Target);
10577 IEM_MC_END()
10578 return VINF_SUCCESS;
10579
10580 case IEMMODE_64BIT:
10581 IEM_MC_BEGIN(0, 1);
10582 IEM_MC_LOCAL(uint64_t, u64Target);
10583 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10584 IEM_MC_SET_RIP_U64(u64Target);
10585 IEM_MC_END()
10586 return VINF_SUCCESS;
10587
10588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10589 }
10590 }
10591 else
10592 {
10593 /* The new RIP is taken from a memory location. */
10594 switch (pVCpu->iem.s.enmEffOpSize)
10595 {
10596 case IEMMODE_16BIT:
10597 IEM_MC_BEGIN(0, 2);
10598 IEM_MC_LOCAL(uint16_t, u16Target);
10599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10602 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10603 IEM_MC_SET_RIP_U16(u16Target);
10604 IEM_MC_END()
10605 return VINF_SUCCESS;
10606
10607 case IEMMODE_32BIT:
10608 IEM_MC_BEGIN(0, 2);
10609 IEM_MC_LOCAL(uint32_t, u32Target);
10610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10613 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10614 IEM_MC_SET_RIP_U32(u32Target);
10615 IEM_MC_END()
10616 return VINF_SUCCESS;
10617
10618 case IEMMODE_64BIT:
10619 IEM_MC_BEGIN(0, 2);
10620 IEM_MC_LOCAL(uint64_t, u64Target);
10621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10624 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10625 IEM_MC_SET_RIP_U64(u64Target);
10626 IEM_MC_END()
10627 return VINF_SUCCESS;
10628
10629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10630 }
10631 }
10632}
10633
10634
10635/**
10636 * Opcode 0xff /5.
10637 * @param bRm The RM byte.
10638 */
10639FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
10640{
10641 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
10642 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
10643}
10644
10645
10646/**
10647 * Opcode 0xff /6.
10648 * @param bRm The RM byte.
10649 */
10650FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
10651{
10652 IEMOP_MNEMONIC(push_Ev, "push Ev");
10653
10654 /* Registers are handled by a common worker. */
10655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10656 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10657
10658 /* Memory we do here. */
10659 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10660 switch (pVCpu->iem.s.enmEffOpSize)
10661 {
10662 case IEMMODE_16BIT:
10663 IEM_MC_BEGIN(0, 2);
10664 IEM_MC_LOCAL(uint16_t, u16Src);
10665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10668 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10669 IEM_MC_PUSH_U16(u16Src);
10670 IEM_MC_ADVANCE_RIP();
10671 IEM_MC_END();
10672 return VINF_SUCCESS;
10673
10674 case IEMMODE_32BIT:
10675 IEM_MC_BEGIN(0, 2);
10676 IEM_MC_LOCAL(uint32_t, u32Src);
10677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10680 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10681 IEM_MC_PUSH_U32(u32Src);
10682 IEM_MC_ADVANCE_RIP();
10683 IEM_MC_END();
10684 return VINF_SUCCESS;
10685
10686 case IEMMODE_64BIT:
10687 IEM_MC_BEGIN(0, 2);
10688 IEM_MC_LOCAL(uint64_t, u64Src);
10689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10692 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10693 IEM_MC_PUSH_U64(u64Src);
10694 IEM_MC_ADVANCE_RIP();
10695 IEM_MC_END();
10696 return VINF_SUCCESS;
10697
10698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10699 }
10700}
10701
10702
10703/** Opcode 0xff. */
10704FNIEMOP_DEF(iemOp_Grp5)
10705{
10706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10707 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10708 {
10709 case 0:
10710 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
10711 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
10712 case 1:
10713 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
10714 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
10715 case 2:
10716 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
10717 case 3:
10718 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
10719 case 4:
10720 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
10721 case 5:
10722 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
10723 case 6:
10724 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
10725 case 7:
10726 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
10727 return IEMOP_RAISE_INVALID_OPCODE();
10728 }
10729 AssertFailedReturn(VERR_IEM_IPE_3);
10730}
10731
10732
10733
10734const PFNIEMOP g_apfnOneByteMap[256] =
10735{
10736 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
10737 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
10738 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
10739 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
10740 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
10741 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
10742 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
10743 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
10744 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
10745 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
10746 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
10747 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
10748 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
10749 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
10750 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
10751 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
10752 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
10753 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
10754 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
10755 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
10756 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
10757 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
10758 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
10759 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
10760 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
10761 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
10762 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
10763 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
10764 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
10765 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
10766 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
10767 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
10768 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
10769 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
10770 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
10771 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
10772 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
10773 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
10774 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
10775 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
10776 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
10777 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
10778 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
10779 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
10780 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
10781 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
10782 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
10783 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
10784 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
10785 /* 0xc4 */ iemOp_les_Gv_Mp__vex2, iemOp_lds_Gv_Mp__vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
10786 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
10787 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
10788 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
10789 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
10790 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
10791 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
10792 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
10793 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
10794 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
10795 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
10796 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
10797 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
10798 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
10799 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
10800};
10801
10802
10803/** @} */
10804
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette