VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 66135

Last change on this file since 66135 was 66135, checked in by vboxsync, 8 years ago

IEM: Implemented AAA.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 380.9 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 66135 2017-03-16 15:53:06Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/* Instruction group definitions: */
25
26/** @defgroup og_gen General
27 * @{ */
28 /** @defgroup og_gen_arith Arithmetic
29 * @{ */
30 /** @defgroup og_gen_arith_bin Binary numbers */
31 /** @defgroup og_gen_arith_dec Decimal numbers */
32 /** @} */
33/** @} */
34
35/** @defgroup og_stack Stack
36 * @{ */
37 /** @defgroup og_stack_sreg Segment registers */
38/** @} */
39
40/** @defgroup og_prefix Prefixes */
41/** @defgroup og_escapes Escape bytes */
42
43
44
45/** @name One byte opcodes.
46 * @{
47 */
48
49/* Instruction specification format - work in progress: */
50
51/**
52 * @opcode 0x00
53 * @opmnemonic add
54 * @op1 rm:Eb
55 * @op2 reg:Gb
56 * @opmaps one
57 * @openc ModR/M
58 * @opflmodify cf,pf,af,zf,sf,of
59 * @ophints harmless ignores_op_size
60 * @opstats add_Eb_Gb
61 * @opgroup og_gen_arith_bin
62 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
63 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
64 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
65 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
66 */
67FNIEMOP_DEF(iemOp_add_Eb_Gb)
68{
69 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
70 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
71}
72
73
74/**
75 * @opcode 0x01
76 * @opgroup og_gen_arith_bin
77 * @opflmodify cf,pf,af,zf,sf,of
78 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
79 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
80 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
81 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
82 */
83FNIEMOP_DEF(iemOp_add_Ev_Gv)
84{
85 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, 0);
86 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
87}
88
89
90/**
91 * @opcode 0x02
92 * @opgroup og_gen_arith_bin
93 * @opflmodify cf,pf,af,zf,sf,of
94 * @opcopytests iemOp_add_Eb_Gb
95 */
96FNIEMOP_DEF(iemOp_add_Gb_Eb)
97{
98 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
99 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
100}
101
102
103/**
104 * @opcode 0x03
105 * @opgroup og_gen_arith_bin
106 * @opflmodify cf,pf,af,zf,sf,of
107 * @opcopytests iemOp_add_Ev_Gv
108 */
109FNIEMOP_DEF(iemOp_add_Gv_Ev)
110{
111 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
112 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
113}
114
115
116/**
117 * @opcode 0x04
118 * @opgroup og_gen_arith_bin
119 * @opflmodify cf,pf,af,zf,sf,of
120 * @opcopytests iemOp_add_Eb_Gb
121 */
122FNIEMOP_DEF(iemOp_add_Al_Ib)
123{
124 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
125 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
126}
127
128
129/**
130 * @opcode 0x05
131 * @opgroup og_gen_arith_bin
132 * @opflmodify cf,pf,af,zf,sf,of
133 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
134 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
135 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
136 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
137 */
138FNIEMOP_DEF(iemOp_add_eAX_Iz)
139{
140 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
142}
143
144
145/**
146 * @opcode 0x06
147 * @opgroup og_stack_sreg
148 */
149FNIEMOP_DEF(iemOp_push_ES)
150{
151 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
152 IEMOP_HLP_NO_64BIT();
153 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
154}
155
156
157/**
158 * @opcode 0x07
159 * @opgroup og_stack_sreg
160 */
161FNIEMOP_DEF(iemOp_pop_ES)
162{
163 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
164 IEMOP_HLP_NO_64BIT();
165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
167}
168
169
170/**
171 * @opcode 0x08
172 * @opgroup og_gen_arith_bin
173 * @opflmodify cf,pf,af,zf,sf,of
174 * @opflundef af
175 * @opflclear of,cf
176 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
177 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
178 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
179 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
180 */
181FNIEMOP_DEF(iemOp_or_Eb_Gb)
182{
183 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
186}
187
188
189/*
190 * @opcode 0x09
191 * @opgroup og_gen_arith_bin
192 * @opflmodify cf,pf,af,zf,sf,of
193 * @opflundef af
194 * @opflclear of,cf
195 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
196 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
197 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
198 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
199 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
200 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
201 */
202FNIEMOP_DEF(iemOp_or_Ev_Gv)
203{
204 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, 0);
205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
206 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
207}
208
209
210/**
211 * @opcode 0x0a
212 * @opgroup og_gen_arith_bin
213 * @opflmodify cf,pf,af,zf,sf,of
214 * @opflundef af
215 * @opflclear of,cf
216 * @opcopytests iemOp_or_Eb_Gb
217 */
218FNIEMOP_DEF(iemOp_or_Gb_Eb)
219{
220 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
221 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
222 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
223}
224
225
226/**
227 * @opcode 0x0b
228 * @opgroup og_gen_arith_bin
229 * @opflmodify cf,pf,af,zf,sf,of
230 * @opflundef af
231 * @opflclear of,cf
232 * @opcopytests iemOp_or_Ev_Gv
233 */
234FNIEMOP_DEF(iemOp_or_Gv_Ev)
235{
236 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
238 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
239}
240
241
242/**
243 * @opcode 0x0c
244 * @opgroup og_gen_arith_bin
245 * @opflmodify cf,pf,af,zf,sf,of
246 * @opflundef af
247 * @opflclear of,cf
248 * @opcopytests iemOp_or_Eb_Gb
249 */
250FNIEMOP_DEF(iemOp_or_Al_Ib)
251{
252 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
255}
256
257
258/**
259 * @opcode 0x0d
260 * @opgroup og_gen_arith_bin
261 * @opflmodify cf,pf,af,zf,sf,of
262 * @opflundef af
263 * @opflclear of,cf
264 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
265 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
266 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
267 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
268 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
269 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
270 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
271 */
272FNIEMOP_DEF(iemOp_or_eAX_Iz)
273{
274 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
277}
278
279
280/**
281 * @opcode 0x0e
282 * @opgroup og_stack_sreg
283 */
284FNIEMOP_DEF(iemOp_push_CS)
285{
286 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
287 IEMOP_HLP_NO_64BIT();
288 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
289}
290
291
292/**
293 * @opcode 0x0f
294 * @opmnemonic EscTwo0f
295 * @openc two0f
296 * @opdisenum OP_2B_ESC
297 * @ophints harmless
298 * @opgroup og_escapes
299 */
300FNIEMOP_DEF(iemOp_2byteEscape)
301{
302#ifdef VBOX_STRICT
303 /* Sanity check the table the first time around. */
304 static bool s_fTested = false;
305 if (RT_LIKELY(s_fTested)) { /* likely */ }
306 else
307 {
308 s_fTested = true;
309 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
310 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
311 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
312 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
313 }
314#endif
315
316 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
317 {
318 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
319 IEMOP_HLP_MIN_286();
320 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
321 }
322 /* @opdone */
323
324 /*
325 * On the 8086 this is a POP CS instruction.
326 * For the time being we don't specify this this.
327 */
328 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
329 IEMOP_HLP_NO_64BIT();
330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
331 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
332}
333
334/**
335 * @opcode 0x10
336 * @opgroup og_gen_arith_bin
337 * @opfltest cf
338 * @opflmodify cf,pf,af,zf,sf,of
339 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
340 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
341 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
342 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
343 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
344 */
345FNIEMOP_DEF(iemOp_adc_Eb_Gb)
346{
347 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
349}
350
351
352/**
353 * @opcode 0x11
354 * @opgroup og_gen_arith_bin
355 * @opfltest cf
356 * @opflmodify cf,pf,af,zf,sf,of
357 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
358 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
359 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
360 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
361 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
362 */
363FNIEMOP_DEF(iemOp_adc_Ev_Gv)
364{
365 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, 0);
366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
367}
368
369
370/**
371 * @opcode 0x12
372 * @opgroup og_gen_arith_bin
373 * @opfltest cf
374 * @opflmodify cf,pf,af,zf,sf,of
375 * @opcopytests iemOp_adc_Eb_Gb
376 */
377FNIEMOP_DEF(iemOp_adc_Gb_Eb)
378{
379 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
381}
382
383
384/**
385 * @opcode 0x13
386 * @opgroup og_gen_arith_bin
387 * @opfltest cf
388 * @opflmodify cf,pf,af,zf,sf,of
389 * @opcopytests iemOp_adc_Ev_Gv
390 */
391FNIEMOP_DEF(iemOp_adc_Gv_Ev)
392{
393 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
394 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
395}
396
397
398/**
399 * @opcode 0x14
400 * @opgroup og_gen_arith_bin
401 * @opfltest cf
402 * @opflmodify cf,pf,af,zf,sf,of
403 * @opcopytests iemOp_adc_Eb_Gb
404 */
405FNIEMOP_DEF(iemOp_adc_Al_Ib)
406{
407 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
409}
410
411
412/**
413 * @opcode 0x15
414 * @opgroup og_gen_arith_bin
415 * @opfltest cf
416 * @opflmodify cf,pf,af,zf,sf,of
417 * @opcopytests iemOp_adc_Ev_Gv
418 */
419FNIEMOP_DEF(iemOp_adc_eAX_Iz)
420{
421 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
422 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
423}
424
425
426/**
427 * @opcode 0x16
428 */
429FNIEMOP_DEF(iemOp_push_SS)
430{
431 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
432 IEMOP_HLP_NO_64BIT();
433 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
434}
435
436
437/**
438 * @opcode 0x17
439 * @opgroup og_gen_arith_bin
440 * @opfltest cf
441 * @opflmodify cf,pf,af,zf,sf,of
442 */
443FNIEMOP_DEF(iemOp_pop_SS)
444{
445 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
447 IEMOP_HLP_NO_64BIT();
448 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
449}
450
451
452/**
453 * @opcode 0x18
454 * @opgroup og_gen_arith_bin
455 * @opfltest cf
456 * @opflmodify cf,pf,af,zf,sf,of
457 */
458FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
459{
460 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
461 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
462}
463
464
465/**
466 * @opcode 0x19
467 * @opgroup og_gen_arith_bin
468 * @opfltest cf
469 * @opflmodify cf,pf,af,zf,sf,of
470 */
471FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
472{
473 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, 0);
474 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
475}
476
477
478/**
479 * @opcode 0x1a
480 * @opgroup og_gen_arith_bin
481 * @opfltest cf
482 * @opflmodify cf,pf,af,zf,sf,of
483 */
484FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
485{
486 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
488}
489
490
491/**
492 * @opcode 0x1b
493 * @opgroup og_gen_arith_bin
494 * @opfltest cf
495 * @opflmodify cf,pf,af,zf,sf,of
496 */
497FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
498{
499 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
501}
502
503
504/**
505 * @opcode 0x1c
506 * @opgroup og_gen_arith_bin
507 * @opfltest cf
508 * @opflmodify cf,pf,af,zf,sf,of
509 */
510FNIEMOP_DEF(iemOp_sbb_Al_Ib)
511{
512 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
514}
515
516
517/**
518 * @opcode 0x1d
519 * @opgroup og_gen_arith_bin
520 * @opfltest cf
521 * @opflmodify cf,pf,af,zf,sf,of
522 */
523FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
524{
525 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
526 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
527}
528
529
530/**
531 * @opcode 0x1e
532 * @opgroup og_stack_sreg
533 */
534FNIEMOP_DEF(iemOp_push_DS)
535{
536 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
537 IEMOP_HLP_NO_64BIT();
538 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
539}
540
541
542/**
543 * @opcode 0x1f
544 * @opgroup og_stack_sreg
545 */
546FNIEMOP_DEF(iemOp_pop_DS)
547{
548 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
550 IEMOP_HLP_NO_64BIT();
551 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
552}
553
554
555/**
556 * @opcode 0x20
557 * @opgroup og_gen_arith_bin
558 * @opflmodify cf,pf,af,zf,sf,of
559 * @opflundef af
560 * @opflclear of,cf
561 */
562FNIEMOP_DEF(iemOp_and_Eb_Gb)
563{
564 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
567}
568
569
570/**
571 * @opcode 0x21
572 * @opgroup og_gen_arith_bin
573 * @opflmodify cf,pf,af,zf,sf,of
574 * @opflundef af
575 * @opflclear of,cf
576 */
577FNIEMOP_DEF(iemOp_and_Ev_Gv)
578{
579 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, 0);
580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
582}
583
584
585/**
586 * @opcode 0x22
587 * @opgroup og_gen_arith_bin
588 * @opflmodify cf,pf,af,zf,sf,of
589 * @opflundef af
590 * @opflclear of,cf
591 */
592FNIEMOP_DEF(iemOp_and_Gb_Eb)
593{
594 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
596 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
597}
598
599
600/**
601 * @opcode 0x23
602 * @opgroup og_gen_arith_bin
603 * @opflmodify cf,pf,af,zf,sf,of
604 * @opflundef af
605 * @opflclear of,cf
606 */
607FNIEMOP_DEF(iemOp_and_Gv_Ev)
608{
609 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
611 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
612}
613
614
615/**
616 * @opcode 0x24
617 * @opgroup og_gen_arith_bin
618 * @opflmodify cf,pf,af,zf,sf,of
619 * @opflundef af
620 * @opflclear of,cf
621 */
622FNIEMOP_DEF(iemOp_and_Al_Ib)
623{
624 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
626 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
627}
628
629
630/**
631 * @opcode 0x25
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 */
637FNIEMOP_DEF(iemOp_and_eAX_Iz)
638{
639 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
641 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
642}
643
644
645/**
646 * @opcode 0x26
647 * @opmnemonic SEG
648 * @op1 ES
649 * @opgroup og_prefix
650 * @openc prefix
651 * @opdisenum OP_SEG
652 * @ophints harmless
653 */
654FNIEMOP_DEF(iemOp_seg_ES)
655{
656 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
657 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
658 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
659
660 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
661 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
662}
663
664
665/**
666 * @opcode 0x27
667 * @opfltest af,cf
668 * @opflmodify cf,pf,af,zf,sf,of
669 * @opflundef of
670 */
671FNIEMOP_DEF(iemOp_daa)
672{
673 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
674 IEMOP_HLP_NO_64BIT();
675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
678}
679
680
681/**
682 * @opcode 0x28
683 * @opgroup og_gen_arith_bin
684 * @opflmodify cf,pf,af,zf,sf,of
685 */
686FNIEMOP_DEF(iemOp_sub_Eb_Gb)
687{
688 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
690}
691
692
693/**
694 * @opcode 0x29
695 * @opgroup og_gen_arith_bin
696 * @opflmodify cf,pf,af,zf,sf,of
697 */
698FNIEMOP_DEF(iemOp_sub_Ev_Gv)
699{
700 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, 0);
701 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
702}
703
704
705/**
706 * @opcode 0x2a
707 * @opgroup og_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 */
710FNIEMOP_DEF(iemOp_sub_Gb_Eb)
711{
712 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
713 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
714}
715
716
717/**
718 * @opcode 0x2b
719 * @opgroup og_gen_arith_bin
720 * @opflmodify cf,pf,af,zf,sf,of
721 */
722FNIEMOP_DEF(iemOp_sub_Gv_Ev)
723{
724 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
726}
727
728
729/**
730 * @opcode 0x2c
731 * @opgroup og_gen_arith_bin
732 * @opflmodify cf,pf,af,zf,sf,of
733 */
734FNIEMOP_DEF(iemOp_sub_Al_Ib)
735{
736 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
738}
739
740
741/**
742 * @opcode 0x2d
743 * @opgroup og_gen_arith_bin
744 * @opflmodify cf,pf,af,zf,sf,of
745 */
746FNIEMOP_DEF(iemOp_sub_eAX_Iz)
747{
748 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
749 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
750}
751
752
753/**
754 * @opcode 0x2e
755 * @opmnemonic SEG
756 * @op1 CS
757 * @opgroup og_prefix
758 * @openc prefix
759 * @opdisenum OP_SEG
760 * @ophints harmless
761 */
762FNIEMOP_DEF(iemOp_seg_CS)
763{
764 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
765 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
766 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
767
768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
770}
771
772
773/**
774 * @opcode 0x2f
775 * @opfltest af,cf
776 * @opflmodify cf,pf,af,zf,sf,of
777 * @opflundef of
778 */
779FNIEMOP_DEF(iemOp_das)
780{
781 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
782 IEMOP_HLP_NO_64BIT();
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
784 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
785 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
786}
787
788
789/**
790 * @opcode 0x30
791 * @opgroup og_gen_arith_bin
792 * @opflmodify cf,pf,af,zf,sf,of
793 * @opflundef af
794 * @opflclear of,cf
795 */
796FNIEMOP_DEF(iemOp_xor_Eb_Gb)
797{
798 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
801}
802
803
804/**
805 * @opcode 0x31
806 * @opgroup og_gen_arith_bin
807 * @opflmodify cf,pf,af,zf,sf,of
808 * @opflundef af
809 * @opflclear of,cf
810 */
811FNIEMOP_DEF(iemOp_xor_Ev_Gv)
812{
813 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, 0);
814 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
815 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
816}
817
818
819/**
820 * @opcode 0x32
821 * @opgroup og_gen_arith_bin
822 * @opflmodify cf,pf,af,zf,sf,of
823 * @opflundef af
824 * @opflclear of,cf
825 */
826FNIEMOP_DEF(iemOp_xor_Gb_Eb)
827{
828 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
830 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
831}
832
833
834/**
835 * @opcode 0x33
836 * @opgroup og_gen_arith_bin
837 * @opflmodify cf,pf,af,zf,sf,of
838 * @opflundef af
839 * @opflclear of,cf
840 */
841FNIEMOP_DEF(iemOp_xor_Gv_Ev)
842{
843 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
844 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
845 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
846}
847
848
849/**
850 * @opcode 0x34
851 * @opgroup og_gen_arith_bin
852 * @opflmodify cf,pf,af,zf,sf,of
853 * @opflundef af
854 * @opflclear of,cf
855 */
856FNIEMOP_DEF(iemOp_xor_Al_Ib)
857{
858 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
861}
862
863
864/**
865 * @opcode 0x35
866 * @opgroup og_gen_arith_bin
867 * @opflmodify cf,pf,af,zf,sf,of
868 * @opflundef af
869 * @opflclear of,cf
870 */
871FNIEMOP_DEF(iemOp_xor_eAX_Iz)
872{
873 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
876}
877
878
879/**
880 * @opcode 0x36
881 * @opmnemonic SEG
882 * @op1 SS
883 * @opgroup og_prefix
884 * @openc prefix
885 * @opdisenum OP_SEG
886 * @ophints harmless
887 */
888FNIEMOP_DEF(iemOp_seg_SS)
889{
890 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
892 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
893
894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
896}
897
898
899/**
900 * @opcode 0x37
901 * @opfltest af,cf
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef of
904 * @opgroup og_gen_arith_dec
905 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
906 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
907 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
908 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
909 * @optest efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
910 * @optest efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
911 * @optest efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
912 * @optest efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
913 * @optest efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
914 * @optest efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
915 * @optest efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
916 * @optest efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
917 * @optest efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
918 * @optest efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
919 * @optest efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
920 * @optest efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
921 */
922FNIEMOP_DEF(iemOp_aaa)
923{
924 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
925 IEMOP_HLP_NO_64BIT();
926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
928
929 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
930}
931
932
933/**
934 * @opcode 0x38
935 */
936FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
937{
938 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
939 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
940}
941
942
943/**
944 * @opcode 0x39
945 */
946FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
947{
948 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
949 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
950}
951
952
953/**
954 * @opcode 0x3a
955 */
956FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
957{
958 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
959 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
960}
961
962
963/**
964 * @opcode 0x3b
965 */
966FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
967{
968 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
969 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
970}
971
972
973/**
974 * @opcode 0x3c
975 */
976FNIEMOP_DEF(iemOp_cmp_Al_Ib)
977{
978 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
979 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
980}
981
982
983/**
984 * @opcode 0x3d
985 */
986FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
987{
988 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
989 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
990}
991
992
993/**
994 * @opcode 0x3e
995 */
996FNIEMOP_DEF(iemOp_seg_DS)
997{
998 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
999 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1000 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1001
1002 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1003 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1004}
1005
1006
1007/**
1008 * @opcode 0x3f
1009 */
1010FNIEMOP_STUB(iemOp_aas);
1011
1012/**
1013 * Common 'inc/dec/not/neg register' helper.
1014 */
1015FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1016{
1017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1018 switch (pVCpu->iem.s.enmEffOpSize)
1019 {
1020 case IEMMODE_16BIT:
1021 IEM_MC_BEGIN(2, 0);
1022 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1023 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1024 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1025 IEM_MC_REF_EFLAGS(pEFlags);
1026 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1027 IEM_MC_ADVANCE_RIP();
1028 IEM_MC_END();
1029 return VINF_SUCCESS;
1030
1031 case IEMMODE_32BIT:
1032 IEM_MC_BEGIN(2, 0);
1033 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1034 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1035 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1036 IEM_MC_REF_EFLAGS(pEFlags);
1037 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1038 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1039 IEM_MC_ADVANCE_RIP();
1040 IEM_MC_END();
1041 return VINF_SUCCESS;
1042
1043 case IEMMODE_64BIT:
1044 IEM_MC_BEGIN(2, 0);
1045 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1046 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1047 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1048 IEM_MC_REF_EFLAGS(pEFlags);
1049 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1050 IEM_MC_ADVANCE_RIP();
1051 IEM_MC_END();
1052 return VINF_SUCCESS;
1053 }
1054 return VINF_SUCCESS;
1055}
1056
1057
1058/**
1059 * @opcode 0x40
1060 */
1061FNIEMOP_DEF(iemOp_inc_eAX)
1062{
1063 /*
1064 * This is a REX prefix in 64-bit mode.
1065 */
1066 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1067 {
1068 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1069 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1070
1071 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1072 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1073 }
1074
1075 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1076 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1077}
1078
1079
1080/**
1081 * @opcode 0x41
1082 */
1083FNIEMOP_DEF(iemOp_inc_eCX)
1084{
1085 /*
1086 * This is a REX prefix in 64-bit mode.
1087 */
1088 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1089 {
1090 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1091 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1092 pVCpu->iem.s.uRexB = 1 << 3;
1093
1094 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1095 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1096 }
1097
1098 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1099 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1100}
1101
1102
1103/**
1104 * @opcode 0x42
1105 */
1106FNIEMOP_DEF(iemOp_inc_eDX)
1107{
1108 /*
1109 * This is a REX prefix in 64-bit mode.
1110 */
1111 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1112 {
1113 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1114 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1115 pVCpu->iem.s.uRexIndex = 1 << 3;
1116
1117 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1118 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1119 }
1120
1121 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1122 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1123}
1124
1125
1126
1127/**
1128 * @opcode 0x43
1129 */
1130FNIEMOP_DEF(iemOp_inc_eBX)
1131{
1132 /*
1133 * This is a REX prefix in 64-bit mode.
1134 */
1135 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1136 {
1137 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1138 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1139 pVCpu->iem.s.uRexB = 1 << 3;
1140 pVCpu->iem.s.uRexIndex = 1 << 3;
1141
1142 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1143 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1144 }
1145
1146 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1147 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1148}
1149
1150
1151/**
1152 * @opcode 0x44
1153 */
1154FNIEMOP_DEF(iemOp_inc_eSP)
1155{
1156 /*
1157 * This is a REX prefix in 64-bit mode.
1158 */
1159 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1160 {
1161 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1162 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1163 pVCpu->iem.s.uRexReg = 1 << 3;
1164
1165 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1166 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1167 }
1168
1169 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1170 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1171}
1172
1173
1174/**
1175 * @opcode 0x45
1176 */
1177FNIEMOP_DEF(iemOp_inc_eBP)
1178{
1179 /*
1180 * This is a REX prefix in 64-bit mode.
1181 */
1182 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1183 {
1184 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1185 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1186 pVCpu->iem.s.uRexReg = 1 << 3;
1187 pVCpu->iem.s.uRexB = 1 << 3;
1188
1189 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1190 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1191 }
1192
1193 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1194 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1195}
1196
1197
1198/**
1199 * @opcode 0x46
1200 */
1201FNIEMOP_DEF(iemOp_inc_eSI)
1202{
1203 /*
1204 * This is a REX prefix in 64-bit mode.
1205 */
1206 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1207 {
1208 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1209 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1210 pVCpu->iem.s.uRexReg = 1 << 3;
1211 pVCpu->iem.s.uRexIndex = 1 << 3;
1212
1213 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1214 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1215 }
1216
1217 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1218 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1219}
1220
1221
1222/**
1223 * @opcode 0x47
1224 */
1225FNIEMOP_DEF(iemOp_inc_eDI)
1226{
1227 /*
1228 * This is a REX prefix in 64-bit mode.
1229 */
1230 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1231 {
1232 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1233 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1234 pVCpu->iem.s.uRexReg = 1 << 3;
1235 pVCpu->iem.s.uRexB = 1 << 3;
1236 pVCpu->iem.s.uRexIndex = 1 << 3;
1237
1238 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1239 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1240 }
1241
1242 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1243 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1244}
1245
1246
1247/**
1248 * @opcode 0x48
1249 */
1250FNIEMOP_DEF(iemOp_dec_eAX)
1251{
1252 /*
1253 * This is a REX prefix in 64-bit mode.
1254 */
1255 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1256 {
1257 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1258 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1259 iemRecalEffOpSize(pVCpu);
1260
1261 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1262 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1263 }
1264
1265 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1266 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1267}
1268
1269
1270/**
1271 * @opcode 0x49
1272 */
1273FNIEMOP_DEF(iemOp_dec_eCX)
1274{
1275 /*
1276 * This is a REX prefix in 64-bit mode.
1277 */
1278 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1279 {
1280 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1281 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1282 pVCpu->iem.s.uRexB = 1 << 3;
1283 iemRecalEffOpSize(pVCpu);
1284
1285 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1286 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1287 }
1288
1289 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1290 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1291}
1292
1293
1294/**
1295 * @opcode 0x4a
1296 */
1297FNIEMOP_DEF(iemOp_dec_eDX)
1298{
1299 /*
1300 * This is a REX prefix in 64-bit mode.
1301 */
1302 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1303 {
1304 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1305 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1306 pVCpu->iem.s.uRexIndex = 1 << 3;
1307 iemRecalEffOpSize(pVCpu);
1308
1309 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1310 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1311 }
1312
1313 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1314 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1315}
1316
1317
1318/**
1319 * @opcode 0x4b
1320 */
1321FNIEMOP_DEF(iemOp_dec_eBX)
1322{
1323 /*
1324 * This is a REX prefix in 64-bit mode.
1325 */
1326 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1327 {
1328 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1329 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1330 pVCpu->iem.s.uRexB = 1 << 3;
1331 pVCpu->iem.s.uRexIndex = 1 << 3;
1332 iemRecalEffOpSize(pVCpu);
1333
1334 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1335 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1336 }
1337
1338 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1339 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1340}
1341
1342
1343/**
1344 * @opcode 0x4c
1345 */
1346FNIEMOP_DEF(iemOp_dec_eSP)
1347{
1348 /*
1349 * This is a REX prefix in 64-bit mode.
1350 */
1351 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1352 {
1353 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1354 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1355 pVCpu->iem.s.uRexReg = 1 << 3;
1356 iemRecalEffOpSize(pVCpu);
1357
1358 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1359 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1360 }
1361
1362 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1363 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1364}
1365
1366
1367/**
1368 * @opcode 0x4d
1369 */
1370FNIEMOP_DEF(iemOp_dec_eBP)
1371{
1372 /*
1373 * This is a REX prefix in 64-bit mode.
1374 */
1375 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1376 {
1377 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1378 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1379 pVCpu->iem.s.uRexReg = 1 << 3;
1380 pVCpu->iem.s.uRexB = 1 << 3;
1381 iemRecalEffOpSize(pVCpu);
1382
1383 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1384 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1385 }
1386
1387 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1388 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1389}
1390
1391
1392/**
1393 * @opcode 0x4e
1394 */
1395FNIEMOP_DEF(iemOp_dec_eSI)
1396{
1397 /*
1398 * This is a REX prefix in 64-bit mode.
1399 */
1400 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1401 {
1402 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1403 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1404 pVCpu->iem.s.uRexReg = 1 << 3;
1405 pVCpu->iem.s.uRexIndex = 1 << 3;
1406 iemRecalEffOpSize(pVCpu);
1407
1408 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1409 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1410 }
1411
1412 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1413 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1414}
1415
1416
1417/**
1418 * @opcode 0x4f
1419 */
1420FNIEMOP_DEF(iemOp_dec_eDI)
1421{
1422 /*
1423 * This is a REX prefix in 64-bit mode.
1424 */
1425 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1426 {
1427 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1428 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1429 pVCpu->iem.s.uRexReg = 1 << 3;
1430 pVCpu->iem.s.uRexB = 1 << 3;
1431 pVCpu->iem.s.uRexIndex = 1 << 3;
1432 iemRecalEffOpSize(pVCpu);
1433
1434 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1435 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1436 }
1437
1438 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1439 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1440}
1441
1442
1443/**
1444 * Common 'push register' helper.
1445 */
1446FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1447{
1448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1449 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1450 {
1451 iReg |= pVCpu->iem.s.uRexB;
1452 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1453 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1454 }
1455
1456 switch (pVCpu->iem.s.enmEffOpSize)
1457 {
1458 case IEMMODE_16BIT:
1459 IEM_MC_BEGIN(0, 1);
1460 IEM_MC_LOCAL(uint16_t, u16Value);
1461 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1462 IEM_MC_PUSH_U16(u16Value);
1463 IEM_MC_ADVANCE_RIP();
1464 IEM_MC_END();
1465 break;
1466
1467 case IEMMODE_32BIT:
1468 IEM_MC_BEGIN(0, 1);
1469 IEM_MC_LOCAL(uint32_t, u32Value);
1470 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1471 IEM_MC_PUSH_U32(u32Value);
1472 IEM_MC_ADVANCE_RIP();
1473 IEM_MC_END();
1474 break;
1475
1476 case IEMMODE_64BIT:
1477 IEM_MC_BEGIN(0, 1);
1478 IEM_MC_LOCAL(uint64_t, u64Value);
1479 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1480 IEM_MC_PUSH_U64(u64Value);
1481 IEM_MC_ADVANCE_RIP();
1482 IEM_MC_END();
1483 break;
1484 }
1485
1486 return VINF_SUCCESS;
1487}
1488
1489
1490/**
1491 * @opcode 0x50
1492 */
1493FNIEMOP_DEF(iemOp_push_eAX)
1494{
1495 IEMOP_MNEMONIC(push_rAX, "push rAX");
1496 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1497}
1498
1499
1500/**
1501 * @opcode 0x51
1502 */
1503FNIEMOP_DEF(iemOp_push_eCX)
1504{
1505 IEMOP_MNEMONIC(push_rCX, "push rCX");
1506 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1507}
1508
1509
1510/**
1511 * @opcode 0x52
1512 */
1513FNIEMOP_DEF(iemOp_push_eDX)
1514{
1515 IEMOP_MNEMONIC(push_rDX, "push rDX");
1516 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1517}
1518
1519
1520/**
1521 * @opcode 0x53
1522 */
1523FNIEMOP_DEF(iemOp_push_eBX)
1524{
1525 IEMOP_MNEMONIC(push_rBX, "push rBX");
1526 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1527}
1528
1529
1530/**
1531 * @opcode 0x54
1532 */
1533FNIEMOP_DEF(iemOp_push_eSP)
1534{
1535 IEMOP_MNEMONIC(push_rSP, "push rSP");
1536 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1537 {
1538 IEM_MC_BEGIN(0, 1);
1539 IEM_MC_LOCAL(uint16_t, u16Value);
1540 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1541 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1542 IEM_MC_PUSH_U16(u16Value);
1543 IEM_MC_ADVANCE_RIP();
1544 IEM_MC_END();
1545 }
1546 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1547}
1548
1549
1550/**
1551 * @opcode 0x55
1552 */
1553FNIEMOP_DEF(iemOp_push_eBP)
1554{
1555 IEMOP_MNEMONIC(push_rBP, "push rBP");
1556 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1557}
1558
1559
1560/**
1561 * @opcode 0x56
1562 */
1563FNIEMOP_DEF(iemOp_push_eSI)
1564{
1565 IEMOP_MNEMONIC(push_rSI, "push rSI");
1566 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1567}
1568
1569
1570/**
1571 * @opcode 0x57
1572 */
1573FNIEMOP_DEF(iemOp_push_eDI)
1574{
1575 IEMOP_MNEMONIC(push_rDI, "push rDI");
1576 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1577}
1578
1579
1580/**
1581 * Common 'pop register' helper.
1582 */
1583FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1584{
1585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1586 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1587 {
1588 iReg |= pVCpu->iem.s.uRexB;
1589 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1590 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1591 }
1592
1593 switch (pVCpu->iem.s.enmEffOpSize)
1594 {
1595 case IEMMODE_16BIT:
1596 IEM_MC_BEGIN(0, 1);
1597 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1598 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1599 IEM_MC_POP_U16(pu16Dst);
1600 IEM_MC_ADVANCE_RIP();
1601 IEM_MC_END();
1602 break;
1603
1604 case IEMMODE_32BIT:
1605 IEM_MC_BEGIN(0, 1);
1606 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1607 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1608 IEM_MC_POP_U32(pu32Dst);
1609 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1610 IEM_MC_ADVANCE_RIP();
1611 IEM_MC_END();
1612 break;
1613
1614 case IEMMODE_64BIT:
1615 IEM_MC_BEGIN(0, 1);
1616 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1617 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1618 IEM_MC_POP_U64(pu64Dst);
1619 IEM_MC_ADVANCE_RIP();
1620 IEM_MC_END();
1621 break;
1622 }
1623
1624 return VINF_SUCCESS;
1625}
1626
1627
1628/**
1629 * @opcode 0x58
1630 */
1631FNIEMOP_DEF(iemOp_pop_eAX)
1632{
1633 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1634 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1635}
1636
1637
1638/**
1639 * @opcode 0x59
1640 */
1641FNIEMOP_DEF(iemOp_pop_eCX)
1642{
1643 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1644 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1645}
1646
1647
1648/**
1649 * @opcode 0x5a
1650 */
1651FNIEMOP_DEF(iemOp_pop_eDX)
1652{
1653 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1654 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1655}
1656
1657
1658/**
1659 * @opcode 0x5b
1660 */
1661FNIEMOP_DEF(iemOp_pop_eBX)
1662{
1663 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1664 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1665}
1666
1667
1668/**
1669 * @opcode 0x5c
1670 */
1671FNIEMOP_DEF(iemOp_pop_eSP)
1672{
1673 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1674 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1675 {
1676 if (pVCpu->iem.s.uRexB)
1677 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1678 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1679 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1680 }
1681
1682 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1683 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1684 /** @todo add testcase for this instruction. */
1685 switch (pVCpu->iem.s.enmEffOpSize)
1686 {
1687 case IEMMODE_16BIT:
1688 IEM_MC_BEGIN(0, 1);
1689 IEM_MC_LOCAL(uint16_t, u16Dst);
1690 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1691 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1692 IEM_MC_ADVANCE_RIP();
1693 IEM_MC_END();
1694 break;
1695
1696 case IEMMODE_32BIT:
1697 IEM_MC_BEGIN(0, 1);
1698 IEM_MC_LOCAL(uint32_t, u32Dst);
1699 IEM_MC_POP_U32(&u32Dst);
1700 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1701 IEM_MC_ADVANCE_RIP();
1702 IEM_MC_END();
1703 break;
1704
1705 case IEMMODE_64BIT:
1706 IEM_MC_BEGIN(0, 1);
1707 IEM_MC_LOCAL(uint64_t, u64Dst);
1708 IEM_MC_POP_U64(&u64Dst);
1709 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1710 IEM_MC_ADVANCE_RIP();
1711 IEM_MC_END();
1712 break;
1713 }
1714
1715 return VINF_SUCCESS;
1716}
1717
1718
1719/**
1720 * @opcode 0x5d
1721 */
1722FNIEMOP_DEF(iemOp_pop_eBP)
1723{
1724 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1725 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1726}
1727
1728
1729/**
1730 * @opcode 0x5e
1731 */
1732FNIEMOP_DEF(iemOp_pop_eSI)
1733{
1734 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1735 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1736}
1737
1738
1739/**
1740 * @opcode 0x5f
1741 */
1742FNIEMOP_DEF(iemOp_pop_eDI)
1743{
1744 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1745 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1746}
1747
1748
1749/**
1750 * @opcode 0x60
1751 */
1752FNIEMOP_DEF(iemOp_pusha)
1753{
1754 IEMOP_MNEMONIC(pusha, "pusha");
1755 IEMOP_HLP_MIN_186();
1756 IEMOP_HLP_NO_64BIT();
1757 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1758 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1759 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1760 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1761}
1762
1763
1764/**
1765 * @opcode 0x61
1766 */
1767FNIEMOP_DEF(iemOp_popa__mvex)
1768{
1769 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1770 {
1771 IEMOP_MNEMONIC(popa, "popa");
1772 IEMOP_HLP_MIN_186();
1773 IEMOP_HLP_NO_64BIT();
1774 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1775 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1776 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1777 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1778 }
1779 IEMOP_MNEMONIC(mvex, "mvex");
1780 Log(("mvex prefix is not supported!\n"));
1781 return IEMOP_RAISE_INVALID_OPCODE();
1782}
1783
1784
1785/**
1786 * @opcode 0x62
1787 * @opmnemonic bound
1788 * @op1 Gv
1789 * @op2 Ma
1790 * @opmincpu 80186
1791 * @ophints harmless invalid_64
1792 */
1793FNIEMOP_STUB(iemOp_bound_Gv_Ma__evex);
1794// IEMOP_HLP_MIN_186();
1795
1796
1797/** Opcode 0x63 - non-64-bit modes. */
1798FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1799{
1800 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1801 IEMOP_HLP_MIN_286();
1802 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1804
1805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1806 {
1807 /* Register */
1808 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1809 IEM_MC_BEGIN(3, 0);
1810 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1811 IEM_MC_ARG(uint16_t, u16Src, 1);
1812 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1813
1814 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1815 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1816 IEM_MC_REF_EFLAGS(pEFlags);
1817 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1818
1819 IEM_MC_ADVANCE_RIP();
1820 IEM_MC_END();
1821 }
1822 else
1823 {
1824 /* Memory */
1825 IEM_MC_BEGIN(3, 2);
1826 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1827 IEM_MC_ARG(uint16_t, u16Src, 1);
1828 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1830
1831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1832 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1833 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1834 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1835 IEM_MC_FETCH_EFLAGS(EFlags);
1836 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1837
1838 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
1839 IEM_MC_COMMIT_EFLAGS(EFlags);
1840 IEM_MC_ADVANCE_RIP();
1841 IEM_MC_END();
1842 }
1843 return VINF_SUCCESS;
1844
1845}
1846
1847
1848/**
1849 * @opcode 0x63
1850 *
1851 * @note This is a weird one. It works like a regular move instruction if
1852 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
1853 * @todo This definitely needs a testcase to verify the odd cases. */
1854FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
1855{
1856 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
1857
1858 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
1859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1860
1861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1862 {
1863 /*
1864 * Register to register.
1865 */
1866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1867 IEM_MC_BEGIN(0, 1);
1868 IEM_MC_LOCAL(uint64_t, u64Value);
1869 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1870 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1871 IEM_MC_ADVANCE_RIP();
1872 IEM_MC_END();
1873 }
1874 else
1875 {
1876 /*
1877 * We're loading a register from memory.
1878 */
1879 IEM_MC_BEGIN(0, 2);
1880 IEM_MC_LOCAL(uint64_t, u64Value);
1881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1884 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1885 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1886 IEM_MC_ADVANCE_RIP();
1887 IEM_MC_END();
1888 }
1889 return VINF_SUCCESS;
1890}
1891
1892
1893/**
1894 * @opcode 0x64
1895 * @opmnemonic segfs
1896 * @opmincpu 80386
1897 * @opgroup og_prefixes
1898 */
1899FNIEMOP_DEF(iemOp_seg_FS)
1900{
1901 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
1902 IEMOP_HLP_MIN_386();
1903
1904 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
1905 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
1906
1907 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1908 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1909}
1910
1911
1912/**
1913 * @opcode 0x65
1914 * @opmnemonic seggs
1915 * @opmincpu 80386
1916 * @opgroup og_prefixes
1917 */
1918FNIEMOP_DEF(iemOp_seg_GS)
1919{
1920 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
1921 IEMOP_HLP_MIN_386();
1922
1923 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
1924 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
1925
1926 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1927 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1928}
1929
1930
1931/**
1932 * @opcode 0x66
1933 * @opmnemonic opsize
1934 * @openc prefix
1935 * @opmincpu 80386
1936 * @ophints harmless
1937 * @opgroup og_prefixes
1938 */
1939FNIEMOP_DEF(iemOp_op_size)
1940{
1941 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
1942 IEMOP_HLP_MIN_386();
1943
1944 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
1945 iemRecalEffOpSize(pVCpu);
1946
1947 /* For the 4 entry opcode tables, the operand prefix doesn't not count
1948 when REPZ or REPNZ are present. */
1949 if (pVCpu->iem.s.idxPrefix == 0)
1950 pVCpu->iem.s.idxPrefix = 1;
1951
1952 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1953 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1954}
1955
1956
1957/**
1958 * @opcode 0x67
1959 * @opmnemonic addrsize
1960 * @openc prefix
1961 * @opmincpu 80386
1962 * @ophints harmless
1963 * @opgroup og_prefixes
1964 */
1965FNIEMOP_DEF(iemOp_addr_size)
1966{
1967 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
1968 IEMOP_HLP_MIN_386();
1969
1970 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
1971 switch (pVCpu->iem.s.enmDefAddrMode)
1972 {
1973 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1974 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
1975 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1976 default: AssertFailed();
1977 }
1978
1979 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1980 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1981}
1982
1983
1984/**
1985 * @opcode 0x68
1986 */
1987FNIEMOP_DEF(iemOp_push_Iz)
1988{
1989 IEMOP_MNEMONIC(push_Iz, "push Iz");
1990 IEMOP_HLP_MIN_186();
1991 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1992 switch (pVCpu->iem.s.enmEffOpSize)
1993 {
1994 case IEMMODE_16BIT:
1995 {
1996 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1998 IEM_MC_BEGIN(0,0);
1999 IEM_MC_PUSH_U16(u16Imm);
2000 IEM_MC_ADVANCE_RIP();
2001 IEM_MC_END();
2002 return VINF_SUCCESS;
2003 }
2004
2005 case IEMMODE_32BIT:
2006 {
2007 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2009 IEM_MC_BEGIN(0,0);
2010 IEM_MC_PUSH_U32(u32Imm);
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 return VINF_SUCCESS;
2014 }
2015
2016 case IEMMODE_64BIT:
2017 {
2018 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2020 IEM_MC_BEGIN(0,0);
2021 IEM_MC_PUSH_U64(u64Imm);
2022 IEM_MC_ADVANCE_RIP();
2023 IEM_MC_END();
2024 return VINF_SUCCESS;
2025 }
2026
2027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2028 }
2029}
2030
2031
2032/**
2033 * @opcode 0x69
2034 */
2035FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2036{
2037 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2038 IEMOP_HLP_MIN_186();
2039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2040 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2041
2042 switch (pVCpu->iem.s.enmEffOpSize)
2043 {
2044 case IEMMODE_16BIT:
2045 {
2046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2047 {
2048 /* register operand */
2049 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2051
2052 IEM_MC_BEGIN(3, 1);
2053 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2054 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2055 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2056 IEM_MC_LOCAL(uint16_t, u16Tmp);
2057
2058 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2059 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2060 IEM_MC_REF_EFLAGS(pEFlags);
2061 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2062 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2063
2064 IEM_MC_ADVANCE_RIP();
2065 IEM_MC_END();
2066 }
2067 else
2068 {
2069 /* memory operand */
2070 IEM_MC_BEGIN(3, 2);
2071 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2072 IEM_MC_ARG(uint16_t, u16Src, 1);
2073 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2074 IEM_MC_LOCAL(uint16_t, u16Tmp);
2075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2076
2077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2078 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2079 IEM_MC_ASSIGN(u16Src, u16Imm);
2080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2081 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2082 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2083 IEM_MC_REF_EFLAGS(pEFlags);
2084 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2085 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2086
2087 IEM_MC_ADVANCE_RIP();
2088 IEM_MC_END();
2089 }
2090 return VINF_SUCCESS;
2091 }
2092
2093 case IEMMODE_32BIT:
2094 {
2095 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2096 {
2097 /* register operand */
2098 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2100
2101 IEM_MC_BEGIN(3, 1);
2102 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2103 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2104 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2105 IEM_MC_LOCAL(uint32_t, u32Tmp);
2106
2107 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2108 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2109 IEM_MC_REF_EFLAGS(pEFlags);
2110 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2111 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2112
2113 IEM_MC_ADVANCE_RIP();
2114 IEM_MC_END();
2115 }
2116 else
2117 {
2118 /* memory operand */
2119 IEM_MC_BEGIN(3, 2);
2120 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2121 IEM_MC_ARG(uint32_t, u32Src, 1);
2122 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2123 IEM_MC_LOCAL(uint32_t, u32Tmp);
2124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2125
2126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2127 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2128 IEM_MC_ASSIGN(u32Src, u32Imm);
2129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2130 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2131 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2132 IEM_MC_REF_EFLAGS(pEFlags);
2133 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2134 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2135
2136 IEM_MC_ADVANCE_RIP();
2137 IEM_MC_END();
2138 }
2139 return VINF_SUCCESS;
2140 }
2141
2142 case IEMMODE_64BIT:
2143 {
2144 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2145 {
2146 /* register operand */
2147 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2149
2150 IEM_MC_BEGIN(3, 1);
2151 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2152 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2153 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2154 IEM_MC_LOCAL(uint64_t, u64Tmp);
2155
2156 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2157 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2158 IEM_MC_REF_EFLAGS(pEFlags);
2159 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2160 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2161
2162 IEM_MC_ADVANCE_RIP();
2163 IEM_MC_END();
2164 }
2165 else
2166 {
2167 /* memory operand */
2168 IEM_MC_BEGIN(3, 2);
2169 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2170 IEM_MC_ARG(uint64_t, u64Src, 1);
2171 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2172 IEM_MC_LOCAL(uint64_t, u64Tmp);
2173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2174
2175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2176 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2177 IEM_MC_ASSIGN(u64Src, u64Imm);
2178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2179 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2180 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2181 IEM_MC_REF_EFLAGS(pEFlags);
2182 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2183 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2184
2185 IEM_MC_ADVANCE_RIP();
2186 IEM_MC_END();
2187 }
2188 return VINF_SUCCESS;
2189 }
2190 }
2191 AssertFailedReturn(VERR_IEM_IPE_9);
2192}
2193
2194
2195/**
2196 * @opcode 0x6a
2197 */
2198FNIEMOP_DEF(iemOp_push_Ib)
2199{
2200 IEMOP_MNEMONIC(push_Ib, "push Ib");
2201 IEMOP_HLP_MIN_186();
2202 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2204 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2205
2206 IEM_MC_BEGIN(0,0);
2207 switch (pVCpu->iem.s.enmEffOpSize)
2208 {
2209 case IEMMODE_16BIT:
2210 IEM_MC_PUSH_U16(i8Imm);
2211 break;
2212 case IEMMODE_32BIT:
2213 IEM_MC_PUSH_U32(i8Imm);
2214 break;
2215 case IEMMODE_64BIT:
2216 IEM_MC_PUSH_U64(i8Imm);
2217 break;
2218 }
2219 IEM_MC_ADVANCE_RIP();
2220 IEM_MC_END();
2221 return VINF_SUCCESS;
2222}
2223
2224
2225/**
2226 * @opcode 0x6b
2227 */
2228FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2229{
2230 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2231 IEMOP_HLP_MIN_186();
2232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2233 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2234
2235 switch (pVCpu->iem.s.enmEffOpSize)
2236 {
2237 case IEMMODE_16BIT:
2238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2239 {
2240 /* register operand */
2241 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2243
2244 IEM_MC_BEGIN(3, 1);
2245 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2246 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2247 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2248 IEM_MC_LOCAL(uint16_t, u16Tmp);
2249
2250 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2251 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2252 IEM_MC_REF_EFLAGS(pEFlags);
2253 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2254 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2255
2256 IEM_MC_ADVANCE_RIP();
2257 IEM_MC_END();
2258 }
2259 else
2260 {
2261 /* memory operand */
2262 IEM_MC_BEGIN(3, 2);
2263 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2264 IEM_MC_ARG(uint16_t, u16Src, 1);
2265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2266 IEM_MC_LOCAL(uint16_t, u16Tmp);
2267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2270 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2271 IEM_MC_ASSIGN(u16Src, u16Imm);
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2273 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2274 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2275 IEM_MC_REF_EFLAGS(pEFlags);
2276 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2277 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2278
2279 IEM_MC_ADVANCE_RIP();
2280 IEM_MC_END();
2281 }
2282 return VINF_SUCCESS;
2283
2284 case IEMMODE_32BIT:
2285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2286 {
2287 /* register operand */
2288 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2290
2291 IEM_MC_BEGIN(3, 1);
2292 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2293 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2294 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2295 IEM_MC_LOCAL(uint32_t, u32Tmp);
2296
2297 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2298 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2299 IEM_MC_REF_EFLAGS(pEFlags);
2300 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2301 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2302
2303 IEM_MC_ADVANCE_RIP();
2304 IEM_MC_END();
2305 }
2306 else
2307 {
2308 /* memory operand */
2309 IEM_MC_BEGIN(3, 2);
2310 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2311 IEM_MC_ARG(uint32_t, u32Src, 1);
2312 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2313 IEM_MC_LOCAL(uint32_t, u32Tmp);
2314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2315
2316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2317 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2318 IEM_MC_ASSIGN(u32Src, u32Imm);
2319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2320 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2321 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2322 IEM_MC_REF_EFLAGS(pEFlags);
2323 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2324 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2325
2326 IEM_MC_ADVANCE_RIP();
2327 IEM_MC_END();
2328 }
2329 return VINF_SUCCESS;
2330
2331 case IEMMODE_64BIT:
2332 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2333 {
2334 /* register operand */
2335 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2337
2338 IEM_MC_BEGIN(3, 1);
2339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2340 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2342 IEM_MC_LOCAL(uint64_t, u64Tmp);
2343
2344 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2345 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2346 IEM_MC_REF_EFLAGS(pEFlags);
2347 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2348 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2349
2350 IEM_MC_ADVANCE_RIP();
2351 IEM_MC_END();
2352 }
2353 else
2354 {
2355 /* memory operand */
2356 IEM_MC_BEGIN(3, 2);
2357 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2358 IEM_MC_ARG(uint64_t, u64Src, 1);
2359 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2360 IEM_MC_LOCAL(uint64_t, u64Tmp);
2361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2362
2363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2364 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2365 IEM_MC_ASSIGN(u64Src, u64Imm);
2366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2367 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2368 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2369 IEM_MC_REF_EFLAGS(pEFlags);
2370 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2371 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2372
2373 IEM_MC_ADVANCE_RIP();
2374 IEM_MC_END();
2375 }
2376 return VINF_SUCCESS;
2377 }
2378 AssertFailedReturn(VERR_IEM_IPE_8);
2379}
2380
2381
2382/**
2383 * @opcode 0x6c
2384 */
2385FNIEMOP_DEF(iemOp_insb_Yb_DX)
2386{
2387 IEMOP_HLP_MIN_186();
2388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2389 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2390 {
2391 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2392 switch (pVCpu->iem.s.enmEffAddrMode)
2393 {
2394 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2395 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2396 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2398 }
2399 }
2400 else
2401 {
2402 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2403 switch (pVCpu->iem.s.enmEffAddrMode)
2404 {
2405 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2406 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2407 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2409 }
2410 }
2411}
2412
2413
2414/**
2415 * @opcode 0x6d
2416 */
2417FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2418{
2419 IEMOP_HLP_MIN_186();
2420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2421 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2422 {
2423 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2424 switch (pVCpu->iem.s.enmEffOpSize)
2425 {
2426 case IEMMODE_16BIT:
2427 switch (pVCpu->iem.s.enmEffAddrMode)
2428 {
2429 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2430 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2431 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2433 }
2434 break;
2435 case IEMMODE_64BIT:
2436 case IEMMODE_32BIT:
2437 switch (pVCpu->iem.s.enmEffAddrMode)
2438 {
2439 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2440 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2441 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2443 }
2444 break;
2445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2446 }
2447 }
2448 else
2449 {
2450 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2451 switch (pVCpu->iem.s.enmEffOpSize)
2452 {
2453 case IEMMODE_16BIT:
2454 switch (pVCpu->iem.s.enmEffAddrMode)
2455 {
2456 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2457 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2458 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2460 }
2461 break;
2462 case IEMMODE_64BIT:
2463 case IEMMODE_32BIT:
2464 switch (pVCpu->iem.s.enmEffAddrMode)
2465 {
2466 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2467 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2468 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2470 }
2471 break;
2472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2473 }
2474 }
2475}
2476
2477
2478/**
2479 * @opcode 0x6e
2480 */
2481FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2482{
2483 IEMOP_HLP_MIN_186();
2484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2485 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2486 {
2487 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2488 switch (pVCpu->iem.s.enmEffAddrMode)
2489 {
2490 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2491 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2492 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2494 }
2495 }
2496 else
2497 {
2498 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2499 switch (pVCpu->iem.s.enmEffAddrMode)
2500 {
2501 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2502 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2503 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2505 }
2506 }
2507}
2508
2509
2510/**
2511 * @opcode 0x6f
2512 */
2513FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2514{
2515 IEMOP_HLP_MIN_186();
2516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2517 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2518 {
2519 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2520 switch (pVCpu->iem.s.enmEffOpSize)
2521 {
2522 case IEMMODE_16BIT:
2523 switch (pVCpu->iem.s.enmEffAddrMode)
2524 {
2525 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2526 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2527 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2529 }
2530 break;
2531 case IEMMODE_64BIT:
2532 case IEMMODE_32BIT:
2533 switch (pVCpu->iem.s.enmEffAddrMode)
2534 {
2535 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2536 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2537 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2539 }
2540 break;
2541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2542 }
2543 }
2544 else
2545 {
2546 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2547 switch (pVCpu->iem.s.enmEffOpSize)
2548 {
2549 case IEMMODE_16BIT:
2550 switch (pVCpu->iem.s.enmEffAddrMode)
2551 {
2552 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2553 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2554 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2556 }
2557 break;
2558 case IEMMODE_64BIT:
2559 case IEMMODE_32BIT:
2560 switch (pVCpu->iem.s.enmEffAddrMode)
2561 {
2562 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2563 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2564 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2566 }
2567 break;
2568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2569 }
2570 }
2571}
2572
2573
2574/**
2575 * @opcode 0x70
2576 */
2577FNIEMOP_DEF(iemOp_jo_Jb)
2578{
2579 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2580 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2582 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2583
2584 IEM_MC_BEGIN(0, 0);
2585 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2586 IEM_MC_REL_JMP_S8(i8Imm);
2587 } IEM_MC_ELSE() {
2588 IEM_MC_ADVANCE_RIP();
2589 } IEM_MC_ENDIF();
2590 IEM_MC_END();
2591 return VINF_SUCCESS;
2592}
2593
2594
2595/**
2596 * @opcode 0x71
2597 */
2598FNIEMOP_DEF(iemOp_jno_Jb)
2599{
2600 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2601 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2603 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2604
2605 IEM_MC_BEGIN(0, 0);
2606 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2607 IEM_MC_ADVANCE_RIP();
2608 } IEM_MC_ELSE() {
2609 IEM_MC_REL_JMP_S8(i8Imm);
2610 } IEM_MC_ENDIF();
2611 IEM_MC_END();
2612 return VINF_SUCCESS;
2613}
2614
2615/**
2616 * @opcode 0x72
2617 */
2618FNIEMOP_DEF(iemOp_jc_Jb)
2619{
2620 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2621 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2623 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2624
2625 IEM_MC_BEGIN(0, 0);
2626 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2627 IEM_MC_REL_JMP_S8(i8Imm);
2628 } IEM_MC_ELSE() {
2629 IEM_MC_ADVANCE_RIP();
2630 } IEM_MC_ENDIF();
2631 IEM_MC_END();
2632 return VINF_SUCCESS;
2633}
2634
2635
2636/**
2637 * @opcode 0x73
2638 */
2639FNIEMOP_DEF(iemOp_jnc_Jb)
2640{
2641 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2642 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2644 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2645
2646 IEM_MC_BEGIN(0, 0);
2647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2648 IEM_MC_ADVANCE_RIP();
2649 } IEM_MC_ELSE() {
2650 IEM_MC_REL_JMP_S8(i8Imm);
2651 } IEM_MC_ENDIF();
2652 IEM_MC_END();
2653 return VINF_SUCCESS;
2654}
2655
2656
2657/**
2658 * @opcode 0x74
2659 */
2660FNIEMOP_DEF(iemOp_je_Jb)
2661{
2662 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2663 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2665 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2666
2667 IEM_MC_BEGIN(0, 0);
2668 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2669 IEM_MC_REL_JMP_S8(i8Imm);
2670 } IEM_MC_ELSE() {
2671 IEM_MC_ADVANCE_RIP();
2672 } IEM_MC_ENDIF();
2673 IEM_MC_END();
2674 return VINF_SUCCESS;
2675}
2676
2677
2678/**
2679 * @opcode 0x75
2680 */
2681FNIEMOP_DEF(iemOp_jne_Jb)
2682{
2683 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2684 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2686 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2687
2688 IEM_MC_BEGIN(0, 0);
2689 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2690 IEM_MC_ADVANCE_RIP();
2691 } IEM_MC_ELSE() {
2692 IEM_MC_REL_JMP_S8(i8Imm);
2693 } IEM_MC_ENDIF();
2694 IEM_MC_END();
2695 return VINF_SUCCESS;
2696}
2697
2698
2699/**
2700 * @opcode 0x76
2701 */
2702FNIEMOP_DEF(iemOp_jbe_Jb)
2703{
2704 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2705 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2707 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2708
2709 IEM_MC_BEGIN(0, 0);
2710 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2711 IEM_MC_REL_JMP_S8(i8Imm);
2712 } IEM_MC_ELSE() {
2713 IEM_MC_ADVANCE_RIP();
2714 } IEM_MC_ENDIF();
2715 IEM_MC_END();
2716 return VINF_SUCCESS;
2717}
2718
2719
2720/**
2721 * @opcode 0x77
2722 */
2723FNIEMOP_DEF(iemOp_jnbe_Jb)
2724{
2725 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2726 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2728 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2729
2730 IEM_MC_BEGIN(0, 0);
2731 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2732 IEM_MC_ADVANCE_RIP();
2733 } IEM_MC_ELSE() {
2734 IEM_MC_REL_JMP_S8(i8Imm);
2735 } IEM_MC_ENDIF();
2736 IEM_MC_END();
2737 return VINF_SUCCESS;
2738}
2739
2740
2741/**
2742 * @opcode 0x78
2743 */
2744FNIEMOP_DEF(iemOp_js_Jb)
2745{
2746 IEMOP_MNEMONIC(js_Jb, "js Jb");
2747 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2749 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2750
2751 IEM_MC_BEGIN(0, 0);
2752 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2753 IEM_MC_REL_JMP_S8(i8Imm);
2754 } IEM_MC_ELSE() {
2755 IEM_MC_ADVANCE_RIP();
2756 } IEM_MC_ENDIF();
2757 IEM_MC_END();
2758 return VINF_SUCCESS;
2759}
2760
2761
2762/**
2763 * @opcode 0x79
2764 */
2765FNIEMOP_DEF(iemOp_jns_Jb)
2766{
2767 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2768 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2770 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2771
2772 IEM_MC_BEGIN(0, 0);
2773 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2774 IEM_MC_ADVANCE_RIP();
2775 } IEM_MC_ELSE() {
2776 IEM_MC_REL_JMP_S8(i8Imm);
2777 } IEM_MC_ENDIF();
2778 IEM_MC_END();
2779 return VINF_SUCCESS;
2780}
2781
2782
2783/**
2784 * @opcode 0x7a
2785 */
2786FNIEMOP_DEF(iemOp_jp_Jb)
2787{
2788 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2789 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2791 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2792
2793 IEM_MC_BEGIN(0, 0);
2794 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2795 IEM_MC_REL_JMP_S8(i8Imm);
2796 } IEM_MC_ELSE() {
2797 IEM_MC_ADVANCE_RIP();
2798 } IEM_MC_ENDIF();
2799 IEM_MC_END();
2800 return VINF_SUCCESS;
2801}
2802
2803
2804/**
2805 * @opcode 0x7b
2806 */
2807FNIEMOP_DEF(iemOp_jnp_Jb)
2808{
2809 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2810 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2812 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2813
2814 IEM_MC_BEGIN(0, 0);
2815 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2816 IEM_MC_ADVANCE_RIP();
2817 } IEM_MC_ELSE() {
2818 IEM_MC_REL_JMP_S8(i8Imm);
2819 } IEM_MC_ENDIF();
2820 IEM_MC_END();
2821 return VINF_SUCCESS;
2822}
2823
2824
2825/**
2826 * @opcode 0x7c
2827 */
2828FNIEMOP_DEF(iemOp_jl_Jb)
2829{
2830 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2831 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2833 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2834
2835 IEM_MC_BEGIN(0, 0);
2836 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2837 IEM_MC_REL_JMP_S8(i8Imm);
2838 } IEM_MC_ELSE() {
2839 IEM_MC_ADVANCE_RIP();
2840 } IEM_MC_ENDIF();
2841 IEM_MC_END();
2842 return VINF_SUCCESS;
2843}
2844
2845
2846/**
2847 * @opcode 0x7d
2848 */
2849FNIEMOP_DEF(iemOp_jnl_Jb)
2850{
2851 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
2852 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2854 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2855
2856 IEM_MC_BEGIN(0, 0);
2857 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2858 IEM_MC_ADVANCE_RIP();
2859 } IEM_MC_ELSE() {
2860 IEM_MC_REL_JMP_S8(i8Imm);
2861 } IEM_MC_ENDIF();
2862 IEM_MC_END();
2863 return VINF_SUCCESS;
2864}
2865
2866
2867/**
2868 * @opcode 0x7e
2869 */
2870FNIEMOP_DEF(iemOp_jle_Jb)
2871{
2872 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
2873 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2875 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2876
2877 IEM_MC_BEGIN(0, 0);
2878 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2879 IEM_MC_REL_JMP_S8(i8Imm);
2880 } IEM_MC_ELSE() {
2881 IEM_MC_ADVANCE_RIP();
2882 } IEM_MC_ENDIF();
2883 IEM_MC_END();
2884 return VINF_SUCCESS;
2885}
2886
2887
2888/**
2889 * @opcode 0x7f
2890 */
2891FNIEMOP_DEF(iemOp_jnle_Jb)
2892{
2893 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
2894 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2896 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2897
2898 IEM_MC_BEGIN(0, 0);
2899 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2900 IEM_MC_ADVANCE_RIP();
2901 } IEM_MC_ELSE() {
2902 IEM_MC_REL_JMP_S8(i8Imm);
2903 } IEM_MC_ENDIF();
2904 IEM_MC_END();
2905 return VINF_SUCCESS;
2906}
2907
2908
2909/**
2910 * @opcode 0x80
2911 */
2912FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
2913{
2914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2915 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2916 {
2917 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
2918 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
2919 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
2920 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
2921 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
2922 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
2923 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
2924 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
2925 }
2926 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2927
2928 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2929 {
2930 /* register target */
2931 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2933 IEM_MC_BEGIN(3, 0);
2934 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2935 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2936 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2937
2938 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2939 IEM_MC_REF_EFLAGS(pEFlags);
2940 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2941
2942 IEM_MC_ADVANCE_RIP();
2943 IEM_MC_END();
2944 }
2945 else
2946 {
2947 /* memory target */
2948 uint32_t fAccess;
2949 if (pImpl->pfnLockedU8)
2950 fAccess = IEM_ACCESS_DATA_RW;
2951 else /* CMP */
2952 fAccess = IEM_ACCESS_DATA_R;
2953 IEM_MC_BEGIN(3, 2);
2954 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2955 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2957
2958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2959 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2960 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2961 if (pImpl->pfnLockedU8)
2962 IEMOP_HLP_DONE_DECODING();
2963 else
2964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2965
2966 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2967 IEM_MC_FETCH_EFLAGS(EFlags);
2968 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2969 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2970 else
2971 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
2972
2973 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
2974 IEM_MC_COMMIT_EFLAGS(EFlags);
2975 IEM_MC_ADVANCE_RIP();
2976 IEM_MC_END();
2977 }
2978 return VINF_SUCCESS;
2979}
2980
2981
2982/**
2983 * @opcode 0x81
2984 */
2985FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
2986{
2987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2988 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2989 {
2990 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
2991 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
2992 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
2993 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
2994 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
2995 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
2996 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
2997 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
2998 }
2999 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3000
3001 switch (pVCpu->iem.s.enmEffOpSize)
3002 {
3003 case IEMMODE_16BIT:
3004 {
3005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3006 {
3007 /* register target */
3008 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3010 IEM_MC_BEGIN(3, 0);
3011 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3012 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3013 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3014
3015 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3016 IEM_MC_REF_EFLAGS(pEFlags);
3017 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3018
3019 IEM_MC_ADVANCE_RIP();
3020 IEM_MC_END();
3021 }
3022 else
3023 {
3024 /* memory target */
3025 uint32_t fAccess;
3026 if (pImpl->pfnLockedU16)
3027 fAccess = IEM_ACCESS_DATA_RW;
3028 else /* CMP, TEST */
3029 fAccess = IEM_ACCESS_DATA_R;
3030 IEM_MC_BEGIN(3, 2);
3031 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3032 IEM_MC_ARG(uint16_t, u16Src, 1);
3033 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3035
3036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3037 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3038 IEM_MC_ASSIGN(u16Src, u16Imm);
3039 if (pImpl->pfnLockedU16)
3040 IEMOP_HLP_DONE_DECODING();
3041 else
3042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3043 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3044 IEM_MC_FETCH_EFLAGS(EFlags);
3045 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3046 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3047 else
3048 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3049
3050 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3051 IEM_MC_COMMIT_EFLAGS(EFlags);
3052 IEM_MC_ADVANCE_RIP();
3053 IEM_MC_END();
3054 }
3055 break;
3056 }
3057
3058 case IEMMODE_32BIT:
3059 {
3060 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3061 {
3062 /* register target */
3063 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3065 IEM_MC_BEGIN(3, 0);
3066 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3067 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3068 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3069
3070 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3071 IEM_MC_REF_EFLAGS(pEFlags);
3072 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3073 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3074
3075 IEM_MC_ADVANCE_RIP();
3076 IEM_MC_END();
3077 }
3078 else
3079 {
3080 /* memory target */
3081 uint32_t fAccess;
3082 if (pImpl->pfnLockedU32)
3083 fAccess = IEM_ACCESS_DATA_RW;
3084 else /* CMP, TEST */
3085 fAccess = IEM_ACCESS_DATA_R;
3086 IEM_MC_BEGIN(3, 2);
3087 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3088 IEM_MC_ARG(uint32_t, u32Src, 1);
3089 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3091
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3093 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3094 IEM_MC_ASSIGN(u32Src, u32Imm);
3095 if (pImpl->pfnLockedU32)
3096 IEMOP_HLP_DONE_DECODING();
3097 else
3098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3099 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3100 IEM_MC_FETCH_EFLAGS(EFlags);
3101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3102 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3103 else
3104 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3105
3106 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3107 IEM_MC_COMMIT_EFLAGS(EFlags);
3108 IEM_MC_ADVANCE_RIP();
3109 IEM_MC_END();
3110 }
3111 break;
3112 }
3113
3114 case IEMMODE_64BIT:
3115 {
3116 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3117 {
3118 /* register target */
3119 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3121 IEM_MC_BEGIN(3, 0);
3122 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3123 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3124 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3125
3126 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3127 IEM_MC_REF_EFLAGS(pEFlags);
3128 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3129
3130 IEM_MC_ADVANCE_RIP();
3131 IEM_MC_END();
3132 }
3133 else
3134 {
3135 /* memory target */
3136 uint32_t fAccess;
3137 if (pImpl->pfnLockedU64)
3138 fAccess = IEM_ACCESS_DATA_RW;
3139 else /* CMP */
3140 fAccess = IEM_ACCESS_DATA_R;
3141 IEM_MC_BEGIN(3, 2);
3142 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3143 IEM_MC_ARG(uint64_t, u64Src, 1);
3144 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3146
3147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3148 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3149 if (pImpl->pfnLockedU64)
3150 IEMOP_HLP_DONE_DECODING();
3151 else
3152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3153 IEM_MC_ASSIGN(u64Src, u64Imm);
3154 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3155 IEM_MC_FETCH_EFLAGS(EFlags);
3156 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3157 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3158 else
3159 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3160
3161 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3162 IEM_MC_COMMIT_EFLAGS(EFlags);
3163 IEM_MC_ADVANCE_RIP();
3164 IEM_MC_END();
3165 }
3166 break;
3167 }
3168 }
3169 return VINF_SUCCESS;
3170}
3171
3172
3173/**
3174 * @opcode 0x82
3175 * @opmnemonic grp1_82
3176 * @opgroup og_groups
3177 */
3178FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3179{
3180 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3181 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3182}
3183
3184
3185/**
3186 * @opcode 0x83
3187 */
3188FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3189{
3190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3191 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3192 {
3193 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3194 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3195 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3196 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3197 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3198 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3199 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3200 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3201 }
3202 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3203 to the 386 even if absent in the intel reference manuals and some
3204 3rd party opcode listings. */
3205 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3206
3207 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3208 {
3209 /*
3210 * Register target
3211 */
3212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3213 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3214 switch (pVCpu->iem.s.enmEffOpSize)
3215 {
3216 case IEMMODE_16BIT:
3217 {
3218 IEM_MC_BEGIN(3, 0);
3219 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3220 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3221 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3222
3223 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3224 IEM_MC_REF_EFLAGS(pEFlags);
3225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3226
3227 IEM_MC_ADVANCE_RIP();
3228 IEM_MC_END();
3229 break;
3230 }
3231
3232 case IEMMODE_32BIT:
3233 {
3234 IEM_MC_BEGIN(3, 0);
3235 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3236 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3237 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3238
3239 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3240 IEM_MC_REF_EFLAGS(pEFlags);
3241 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3242 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3243
3244 IEM_MC_ADVANCE_RIP();
3245 IEM_MC_END();
3246 break;
3247 }
3248
3249 case IEMMODE_64BIT:
3250 {
3251 IEM_MC_BEGIN(3, 0);
3252 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3253 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3255
3256 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3257 IEM_MC_REF_EFLAGS(pEFlags);
3258 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3259
3260 IEM_MC_ADVANCE_RIP();
3261 IEM_MC_END();
3262 break;
3263 }
3264 }
3265 }
3266 else
3267 {
3268 /*
3269 * Memory target.
3270 */
3271 uint32_t fAccess;
3272 if (pImpl->pfnLockedU16)
3273 fAccess = IEM_ACCESS_DATA_RW;
3274 else /* CMP */
3275 fAccess = IEM_ACCESS_DATA_R;
3276
3277 switch (pVCpu->iem.s.enmEffOpSize)
3278 {
3279 case IEMMODE_16BIT:
3280 {
3281 IEM_MC_BEGIN(3, 2);
3282 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3283 IEM_MC_ARG(uint16_t, u16Src, 1);
3284 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3286
3287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3288 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3289 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3290 if (pImpl->pfnLockedU16)
3291 IEMOP_HLP_DONE_DECODING();
3292 else
3293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3294 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3295 IEM_MC_FETCH_EFLAGS(EFlags);
3296 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3297 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3298 else
3299 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3300
3301 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3302 IEM_MC_COMMIT_EFLAGS(EFlags);
3303 IEM_MC_ADVANCE_RIP();
3304 IEM_MC_END();
3305 break;
3306 }
3307
3308 case IEMMODE_32BIT:
3309 {
3310 IEM_MC_BEGIN(3, 2);
3311 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3312 IEM_MC_ARG(uint32_t, u32Src, 1);
3313 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3315
3316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3317 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3318 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3319 if (pImpl->pfnLockedU32)
3320 IEMOP_HLP_DONE_DECODING();
3321 else
3322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3323 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3324 IEM_MC_FETCH_EFLAGS(EFlags);
3325 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3326 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3327 else
3328 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3329
3330 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3331 IEM_MC_COMMIT_EFLAGS(EFlags);
3332 IEM_MC_ADVANCE_RIP();
3333 IEM_MC_END();
3334 break;
3335 }
3336
3337 case IEMMODE_64BIT:
3338 {
3339 IEM_MC_BEGIN(3, 2);
3340 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3341 IEM_MC_ARG(uint64_t, u64Src, 1);
3342 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3344
3345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3346 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3347 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3348 if (pImpl->pfnLockedU64)
3349 IEMOP_HLP_DONE_DECODING();
3350 else
3351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3352 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3353 IEM_MC_FETCH_EFLAGS(EFlags);
3354 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3355 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3356 else
3357 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3358
3359 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3360 IEM_MC_COMMIT_EFLAGS(EFlags);
3361 IEM_MC_ADVANCE_RIP();
3362 IEM_MC_END();
3363 break;
3364 }
3365 }
3366 }
3367 return VINF_SUCCESS;
3368}
3369
3370
3371/**
3372 * @opcode 0x84
3373 */
3374FNIEMOP_DEF(iemOp_test_Eb_Gb)
3375{
3376 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3377 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3378 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3379}
3380
3381
3382/**
3383 * @opcode 0x85
3384 */
3385FNIEMOP_DEF(iemOp_test_Ev_Gv)
3386{
3387 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3388 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3389 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3390}
3391
3392
3393/**
3394 * @opcode 0x86
3395 */
3396FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3397{
3398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3399 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3400
3401 /*
3402 * If rm is denoting a register, no more instruction bytes.
3403 */
3404 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3405 {
3406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3407
3408 IEM_MC_BEGIN(0, 2);
3409 IEM_MC_LOCAL(uint8_t, uTmp1);
3410 IEM_MC_LOCAL(uint8_t, uTmp2);
3411
3412 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3413 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3414 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3415 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3416
3417 IEM_MC_ADVANCE_RIP();
3418 IEM_MC_END();
3419 }
3420 else
3421 {
3422 /*
3423 * We're accessing memory.
3424 */
3425/** @todo the register must be committed separately! */
3426 IEM_MC_BEGIN(2, 2);
3427 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3428 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3430
3431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3432 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3433 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3434 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3435 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3436
3437 IEM_MC_ADVANCE_RIP();
3438 IEM_MC_END();
3439 }
3440 return VINF_SUCCESS;
3441}
3442
3443
3444/**
3445 * @opcode 0x87
3446 */
3447FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3448{
3449 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3451
3452 /*
3453 * If rm is denoting a register, no more instruction bytes.
3454 */
3455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3456 {
3457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3458
3459 switch (pVCpu->iem.s.enmEffOpSize)
3460 {
3461 case IEMMODE_16BIT:
3462 IEM_MC_BEGIN(0, 2);
3463 IEM_MC_LOCAL(uint16_t, uTmp1);
3464 IEM_MC_LOCAL(uint16_t, uTmp2);
3465
3466 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3467 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3468 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3469 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3470
3471 IEM_MC_ADVANCE_RIP();
3472 IEM_MC_END();
3473 return VINF_SUCCESS;
3474
3475 case IEMMODE_32BIT:
3476 IEM_MC_BEGIN(0, 2);
3477 IEM_MC_LOCAL(uint32_t, uTmp1);
3478 IEM_MC_LOCAL(uint32_t, uTmp2);
3479
3480 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3481 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3482 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3483 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3484
3485 IEM_MC_ADVANCE_RIP();
3486 IEM_MC_END();
3487 return VINF_SUCCESS;
3488
3489 case IEMMODE_64BIT:
3490 IEM_MC_BEGIN(0, 2);
3491 IEM_MC_LOCAL(uint64_t, uTmp1);
3492 IEM_MC_LOCAL(uint64_t, uTmp2);
3493
3494 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3495 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3496 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3497 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3498
3499 IEM_MC_ADVANCE_RIP();
3500 IEM_MC_END();
3501 return VINF_SUCCESS;
3502
3503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3504 }
3505 }
3506 else
3507 {
3508 /*
3509 * We're accessing memory.
3510 */
3511 switch (pVCpu->iem.s.enmEffOpSize)
3512 {
3513/** @todo the register must be committed separately! */
3514 case IEMMODE_16BIT:
3515 IEM_MC_BEGIN(2, 2);
3516 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3517 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3519
3520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3521 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3522 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3523 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3524 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3525
3526 IEM_MC_ADVANCE_RIP();
3527 IEM_MC_END();
3528 return VINF_SUCCESS;
3529
3530 case IEMMODE_32BIT:
3531 IEM_MC_BEGIN(2, 2);
3532 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3533 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3535
3536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3537 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3538 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3539 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3540 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3541
3542 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3543 IEM_MC_ADVANCE_RIP();
3544 IEM_MC_END();
3545 return VINF_SUCCESS;
3546
3547 case IEMMODE_64BIT:
3548 IEM_MC_BEGIN(2, 2);
3549 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3550 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3552
3553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3554 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3555 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3556 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3557 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3558
3559 IEM_MC_ADVANCE_RIP();
3560 IEM_MC_END();
3561 return VINF_SUCCESS;
3562
3563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3564 }
3565 }
3566}
3567
3568
3569/**
3570 * @opcode 0x88
3571 */
3572FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3573{
3574 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3575
3576 uint8_t bRm;
3577 IEM_OPCODE_GET_NEXT_U8(&bRm);
3578
3579 /*
3580 * If rm is denoting a register, no more instruction bytes.
3581 */
3582 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3583 {
3584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3585 IEM_MC_BEGIN(0, 1);
3586 IEM_MC_LOCAL(uint8_t, u8Value);
3587 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3588 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3589 IEM_MC_ADVANCE_RIP();
3590 IEM_MC_END();
3591 }
3592 else
3593 {
3594 /*
3595 * We're writing a register to memory.
3596 */
3597 IEM_MC_BEGIN(0, 2);
3598 IEM_MC_LOCAL(uint8_t, u8Value);
3599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3602 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3603 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3604 IEM_MC_ADVANCE_RIP();
3605 IEM_MC_END();
3606 }
3607 return VINF_SUCCESS;
3608
3609}
3610
3611
3612/**
3613 * @opcode 0x89
3614 */
3615FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3616{
3617 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3618
3619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3620
3621 /*
3622 * If rm is denoting a register, no more instruction bytes.
3623 */
3624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3625 {
3626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3627 switch (pVCpu->iem.s.enmEffOpSize)
3628 {
3629 case IEMMODE_16BIT:
3630 IEM_MC_BEGIN(0, 1);
3631 IEM_MC_LOCAL(uint16_t, u16Value);
3632 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3633 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3634 IEM_MC_ADVANCE_RIP();
3635 IEM_MC_END();
3636 break;
3637
3638 case IEMMODE_32BIT:
3639 IEM_MC_BEGIN(0, 1);
3640 IEM_MC_LOCAL(uint32_t, u32Value);
3641 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3642 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3643 IEM_MC_ADVANCE_RIP();
3644 IEM_MC_END();
3645 break;
3646
3647 case IEMMODE_64BIT:
3648 IEM_MC_BEGIN(0, 1);
3649 IEM_MC_LOCAL(uint64_t, u64Value);
3650 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3651 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3652 IEM_MC_ADVANCE_RIP();
3653 IEM_MC_END();
3654 break;
3655 }
3656 }
3657 else
3658 {
3659 /*
3660 * We're writing a register to memory.
3661 */
3662 switch (pVCpu->iem.s.enmEffOpSize)
3663 {
3664 case IEMMODE_16BIT:
3665 IEM_MC_BEGIN(0, 2);
3666 IEM_MC_LOCAL(uint16_t, u16Value);
3667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3670 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3671 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3672 IEM_MC_ADVANCE_RIP();
3673 IEM_MC_END();
3674 break;
3675
3676 case IEMMODE_32BIT:
3677 IEM_MC_BEGIN(0, 2);
3678 IEM_MC_LOCAL(uint32_t, u32Value);
3679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3682 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3683 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3684 IEM_MC_ADVANCE_RIP();
3685 IEM_MC_END();
3686 break;
3687
3688 case IEMMODE_64BIT:
3689 IEM_MC_BEGIN(0, 2);
3690 IEM_MC_LOCAL(uint64_t, u64Value);
3691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3694 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3695 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3696 IEM_MC_ADVANCE_RIP();
3697 IEM_MC_END();
3698 break;
3699 }
3700 }
3701 return VINF_SUCCESS;
3702}
3703
3704
3705/**
3706 * @opcode 0x8a
3707 */
3708FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3709{
3710 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3711
3712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3713
3714 /*
3715 * If rm is denoting a register, no more instruction bytes.
3716 */
3717 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3718 {
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_BEGIN(0, 1);
3721 IEM_MC_LOCAL(uint8_t, u8Value);
3722 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3723 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3724 IEM_MC_ADVANCE_RIP();
3725 IEM_MC_END();
3726 }
3727 else
3728 {
3729 /*
3730 * We're loading a register from memory.
3731 */
3732 IEM_MC_BEGIN(0, 2);
3733 IEM_MC_LOCAL(uint8_t, u8Value);
3734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3737 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3738 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3739 IEM_MC_ADVANCE_RIP();
3740 IEM_MC_END();
3741 }
3742 return VINF_SUCCESS;
3743}
3744
3745
3746/**
3747 * @opcode 0x8b
3748 */
3749FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3750{
3751 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3752
3753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3754
3755 /*
3756 * If rm is denoting a register, no more instruction bytes.
3757 */
3758 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3759 {
3760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3761 switch (pVCpu->iem.s.enmEffOpSize)
3762 {
3763 case IEMMODE_16BIT:
3764 IEM_MC_BEGIN(0, 1);
3765 IEM_MC_LOCAL(uint16_t, u16Value);
3766 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3767 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3768 IEM_MC_ADVANCE_RIP();
3769 IEM_MC_END();
3770 break;
3771
3772 case IEMMODE_32BIT:
3773 IEM_MC_BEGIN(0, 1);
3774 IEM_MC_LOCAL(uint32_t, u32Value);
3775 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3776 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3777 IEM_MC_ADVANCE_RIP();
3778 IEM_MC_END();
3779 break;
3780
3781 case IEMMODE_64BIT:
3782 IEM_MC_BEGIN(0, 1);
3783 IEM_MC_LOCAL(uint64_t, u64Value);
3784 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3785 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3786 IEM_MC_ADVANCE_RIP();
3787 IEM_MC_END();
3788 break;
3789 }
3790 }
3791 else
3792 {
3793 /*
3794 * We're loading a register from memory.
3795 */
3796 switch (pVCpu->iem.s.enmEffOpSize)
3797 {
3798 case IEMMODE_16BIT:
3799 IEM_MC_BEGIN(0, 2);
3800 IEM_MC_LOCAL(uint16_t, u16Value);
3801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3804 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3805 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3806 IEM_MC_ADVANCE_RIP();
3807 IEM_MC_END();
3808 break;
3809
3810 case IEMMODE_32BIT:
3811 IEM_MC_BEGIN(0, 2);
3812 IEM_MC_LOCAL(uint32_t, u32Value);
3813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3816 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3817 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3818 IEM_MC_ADVANCE_RIP();
3819 IEM_MC_END();
3820 break;
3821
3822 case IEMMODE_64BIT:
3823 IEM_MC_BEGIN(0, 2);
3824 IEM_MC_LOCAL(uint64_t, u64Value);
3825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3828 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3829 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3830 IEM_MC_ADVANCE_RIP();
3831 IEM_MC_END();
3832 break;
3833 }
3834 }
3835 return VINF_SUCCESS;
3836}
3837
3838
3839/**
3840 * opcode 0x63
3841 * @todo Table fixme
3842 */
3843FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
3844{
3845 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
3846 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
3847 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
3848 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
3849 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
3850}
3851
3852
3853/**
3854 * @opcode 0x8c
3855 */
3856FNIEMOP_DEF(iemOp_mov_Ev_Sw)
3857{
3858 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
3859
3860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3861
3862 /*
3863 * Check that the destination register exists. The REX.R prefix is ignored.
3864 */
3865 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3866 if ( iSegReg > X86_SREG_GS)
3867 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3868
3869 /*
3870 * If rm is denoting a register, no more instruction bytes.
3871 * In that case, the operand size is respected and the upper bits are
3872 * cleared (starting with some pentium).
3873 */
3874 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3875 {
3876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3877 switch (pVCpu->iem.s.enmEffOpSize)
3878 {
3879 case IEMMODE_16BIT:
3880 IEM_MC_BEGIN(0, 1);
3881 IEM_MC_LOCAL(uint16_t, u16Value);
3882 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3883 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3884 IEM_MC_ADVANCE_RIP();
3885 IEM_MC_END();
3886 break;
3887
3888 case IEMMODE_32BIT:
3889 IEM_MC_BEGIN(0, 1);
3890 IEM_MC_LOCAL(uint32_t, u32Value);
3891 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
3892 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3893 IEM_MC_ADVANCE_RIP();
3894 IEM_MC_END();
3895 break;
3896
3897 case IEMMODE_64BIT:
3898 IEM_MC_BEGIN(0, 1);
3899 IEM_MC_LOCAL(uint64_t, u64Value);
3900 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
3901 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3902 IEM_MC_ADVANCE_RIP();
3903 IEM_MC_END();
3904 break;
3905 }
3906 }
3907 else
3908 {
3909 /*
3910 * We're saving the register to memory. The access is word sized
3911 * regardless of operand size prefixes.
3912 */
3913#if 0 /* not necessary */
3914 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3915#endif
3916 IEM_MC_BEGIN(0, 2);
3917 IEM_MC_LOCAL(uint16_t, u16Value);
3918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3921 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3922 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3923 IEM_MC_ADVANCE_RIP();
3924 IEM_MC_END();
3925 }
3926 return VINF_SUCCESS;
3927}
3928
3929
3930
3931
3932/**
3933 * @opcode 0x8d
3934 */
3935FNIEMOP_DEF(iemOp_lea_Gv_M)
3936{
3937 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
3938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3940 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
3941
3942 switch (pVCpu->iem.s.enmEffOpSize)
3943 {
3944 case IEMMODE_16BIT:
3945 IEM_MC_BEGIN(0, 2);
3946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3947 IEM_MC_LOCAL(uint16_t, u16Cast);
3948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3950 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
3951 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
3952 IEM_MC_ADVANCE_RIP();
3953 IEM_MC_END();
3954 return VINF_SUCCESS;
3955
3956 case IEMMODE_32BIT:
3957 IEM_MC_BEGIN(0, 2);
3958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3959 IEM_MC_LOCAL(uint32_t, u32Cast);
3960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3962 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
3963 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
3964 IEM_MC_ADVANCE_RIP();
3965 IEM_MC_END();
3966 return VINF_SUCCESS;
3967
3968 case IEMMODE_64BIT:
3969 IEM_MC_BEGIN(0, 1);
3970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3973 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
3974 IEM_MC_ADVANCE_RIP();
3975 IEM_MC_END();
3976 return VINF_SUCCESS;
3977 }
3978 AssertFailedReturn(VERR_IEM_IPE_7);
3979}
3980
3981
3982/**
3983 * @opcode 0x8e
3984 */
3985FNIEMOP_DEF(iemOp_mov_Sw_Ev)
3986{
3987 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
3988
3989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3990
3991 /*
3992 * The practical operand size is 16-bit.
3993 */
3994#if 0 /* not necessary */
3995 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3996#endif
3997
3998 /*
3999 * Check that the destination register exists and can be used with this
4000 * instruction. The REX.R prefix is ignored.
4001 */
4002 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4003 if ( iSegReg == X86_SREG_CS
4004 || iSegReg > X86_SREG_GS)
4005 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4006
4007 /*
4008 * If rm is denoting a register, no more instruction bytes.
4009 */
4010 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4011 {
4012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4013 IEM_MC_BEGIN(2, 0);
4014 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4015 IEM_MC_ARG(uint16_t, u16Value, 1);
4016 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4017 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4018 IEM_MC_END();
4019 }
4020 else
4021 {
4022 /*
4023 * We're loading the register from memory. The access is word sized
4024 * regardless of operand size prefixes.
4025 */
4026 IEM_MC_BEGIN(2, 1);
4027 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4028 IEM_MC_ARG(uint16_t, u16Value, 1);
4029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4032 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4033 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4034 IEM_MC_END();
4035 }
4036 return VINF_SUCCESS;
4037}
4038
4039
4040/** Opcode 0x8f /0. */
4041FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4042{
4043 /* This bugger is rather annoying as it requires rSP to be updated before
4044 doing the effective address calculations. Will eventually require a
4045 split between the R/M+SIB decoding and the effective address
4046 calculation - which is something that is required for any attempt at
4047 reusing this code for a recompiler. It may also be good to have if we
4048 need to delay #UD exception caused by invalid lock prefixes.
4049
4050 For now, we'll do a mostly safe interpreter-only implementation here. */
4051 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4052 * now until tests show it's checked.. */
4053 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4054
4055 /* Register access is relatively easy and can share code. */
4056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4057 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4058
4059 /*
4060 * Memory target.
4061 *
4062 * Intel says that RSP is incremented before it's used in any effective
4063 * address calcuations. This means some serious extra annoyance here since
4064 * we decode and calculate the effective address in one step and like to
4065 * delay committing registers till everything is done.
4066 *
4067 * So, we'll decode and calculate the effective address twice. This will
4068 * require some recoding if turned into a recompiler.
4069 */
4070 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4071
4072#ifndef TST_IEM_CHECK_MC
4073 /* Calc effective address with modified ESP. */
4074/** @todo testcase */
4075 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
4076 RTGCPTR GCPtrEff;
4077 VBOXSTRICTRC rcStrict;
4078 switch (pVCpu->iem.s.enmEffOpSize)
4079 {
4080 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4081 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4082 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4084 }
4085 if (rcStrict != VINF_SUCCESS)
4086 return rcStrict;
4087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4088
4089 /* Perform the operation - this should be CImpl. */
4090 RTUINT64U TmpRsp;
4091 TmpRsp.u = pCtx->rsp;
4092 switch (pVCpu->iem.s.enmEffOpSize)
4093 {
4094 case IEMMODE_16BIT:
4095 {
4096 uint16_t u16Value;
4097 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4098 if (rcStrict == VINF_SUCCESS)
4099 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4100 break;
4101 }
4102
4103 case IEMMODE_32BIT:
4104 {
4105 uint32_t u32Value;
4106 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4107 if (rcStrict == VINF_SUCCESS)
4108 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4109 break;
4110 }
4111
4112 case IEMMODE_64BIT:
4113 {
4114 uint64_t u64Value;
4115 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4116 if (rcStrict == VINF_SUCCESS)
4117 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4118 break;
4119 }
4120
4121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4122 }
4123 if (rcStrict == VINF_SUCCESS)
4124 {
4125 pCtx->rsp = TmpRsp.u;
4126 iemRegUpdateRipAndClearRF(pVCpu);
4127 }
4128 return rcStrict;
4129
4130#else
4131 return VERR_IEM_IPE_2;
4132#endif
4133}
4134
4135
4136/**
4137 * @opcode 0x8f
4138 */
4139FNIEMOP_DEF(iemOp_Grp1A__xop)
4140{
4141 /*
4142 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4143 * three byte VEX prefix, except that the mmmmm field cannot have the values
4144 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4145 */
4146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4147 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4148 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4149
4150 IEMOP_MNEMONIC(xop, "xop");
4151 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4152 {
4153 /** @todo Test when exctly the XOP conformance checks kick in during
4154 * instruction decoding and fetching (using \#PF). */
4155 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4156 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4157 if ( ( pVCpu->iem.s.fPrefixes
4158 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4159 == 0)
4160 {
4161 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4162 if (bXop2 & 0x80 /* XOP.W */)
4163 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4164 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
4165 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
4166 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
4167 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4168 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4169 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4170
4171 /** @todo XOP: Just use new tables and decoders. */
4172 switch (bRm & 0x1f)
4173 {
4174 case 8: /* xop opcode map 8. */
4175 IEMOP_BITCH_ABOUT_STUB();
4176 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4177
4178 case 9: /* xop opcode map 9. */
4179 IEMOP_BITCH_ABOUT_STUB();
4180 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4181
4182 case 10: /* xop opcode map 10. */
4183 IEMOP_BITCH_ABOUT_STUB();
4184 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4185
4186 default:
4187 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4188 return IEMOP_RAISE_INVALID_OPCODE();
4189 }
4190 }
4191 else
4192 Log(("XOP: Invalid prefix mix!\n"));
4193 }
4194 else
4195 Log(("XOP: XOP support disabled!\n"));
4196 return IEMOP_RAISE_INVALID_OPCODE();
4197}
4198
4199
4200/**
4201 * Common 'xchg reg,rAX' helper.
4202 */
4203FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4204{
4205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4206
4207 iReg |= pVCpu->iem.s.uRexB;
4208 switch (pVCpu->iem.s.enmEffOpSize)
4209 {
4210 case IEMMODE_16BIT:
4211 IEM_MC_BEGIN(0, 2);
4212 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4213 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4214 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4215 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4216 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4217 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4218 IEM_MC_ADVANCE_RIP();
4219 IEM_MC_END();
4220 return VINF_SUCCESS;
4221
4222 case IEMMODE_32BIT:
4223 IEM_MC_BEGIN(0, 2);
4224 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4225 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4226 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4227 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4228 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4229 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4230 IEM_MC_ADVANCE_RIP();
4231 IEM_MC_END();
4232 return VINF_SUCCESS;
4233
4234 case IEMMODE_64BIT:
4235 IEM_MC_BEGIN(0, 2);
4236 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4237 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4238 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4239 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4240 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4241 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4242 IEM_MC_ADVANCE_RIP();
4243 IEM_MC_END();
4244 return VINF_SUCCESS;
4245
4246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4247 }
4248}
4249
4250
4251/**
4252 * @opcode 0x90
4253 */
4254FNIEMOP_DEF(iemOp_nop)
4255{
4256 /* R8/R8D and RAX/EAX can be exchanged. */
4257 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4258 {
4259 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4260 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4261 }
4262
4263 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4264 IEMOP_MNEMONIC(pause, "pause");
4265 else
4266 IEMOP_MNEMONIC(nop, "nop");
4267 IEM_MC_BEGIN(0, 0);
4268 IEM_MC_ADVANCE_RIP();
4269 IEM_MC_END();
4270 return VINF_SUCCESS;
4271}
4272
4273
4274/**
4275 * @opcode 0x91
4276 */
4277FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4278{
4279 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4280 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4281}
4282
4283
4284/**
4285 * @opcode 0x92
4286 */
4287FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4288{
4289 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4290 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4291}
4292
4293
4294/**
4295 * @opcode 0x93
4296 */
4297FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4298{
4299 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4300 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4301}
4302
4303
4304/**
4305 * @opcode 0x94
4306 */
4307FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4308{
4309 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4310 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4311}
4312
4313
4314/**
4315 * @opcode 0x95
4316 */
4317FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4318{
4319 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4320 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4321}
4322
4323
4324/**
4325 * @opcode 0x96
4326 */
4327FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4328{
4329 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4330 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4331}
4332
4333
4334/**
4335 * @opcode 0x97
4336 */
4337FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4338{
4339 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4340 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4341}
4342
4343
4344/**
4345 * @opcode 0x98
4346 */
4347FNIEMOP_DEF(iemOp_cbw)
4348{
4349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4350 switch (pVCpu->iem.s.enmEffOpSize)
4351 {
4352 case IEMMODE_16BIT:
4353 IEMOP_MNEMONIC(cbw, "cbw");
4354 IEM_MC_BEGIN(0, 1);
4355 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4356 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4357 } IEM_MC_ELSE() {
4358 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4359 } IEM_MC_ENDIF();
4360 IEM_MC_ADVANCE_RIP();
4361 IEM_MC_END();
4362 return VINF_SUCCESS;
4363
4364 case IEMMODE_32BIT:
4365 IEMOP_MNEMONIC(cwde, "cwde");
4366 IEM_MC_BEGIN(0, 1);
4367 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4368 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4369 } IEM_MC_ELSE() {
4370 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4371 } IEM_MC_ENDIF();
4372 IEM_MC_ADVANCE_RIP();
4373 IEM_MC_END();
4374 return VINF_SUCCESS;
4375
4376 case IEMMODE_64BIT:
4377 IEMOP_MNEMONIC(cdqe, "cdqe");
4378 IEM_MC_BEGIN(0, 1);
4379 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4380 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4381 } IEM_MC_ELSE() {
4382 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4383 } IEM_MC_ENDIF();
4384 IEM_MC_ADVANCE_RIP();
4385 IEM_MC_END();
4386 return VINF_SUCCESS;
4387
4388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4389 }
4390}
4391
4392
4393/**
4394 * @opcode 0x99
4395 */
4396FNIEMOP_DEF(iemOp_cwd)
4397{
4398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4399 switch (pVCpu->iem.s.enmEffOpSize)
4400 {
4401 case IEMMODE_16BIT:
4402 IEMOP_MNEMONIC(cwd, "cwd");
4403 IEM_MC_BEGIN(0, 1);
4404 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4405 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4406 } IEM_MC_ELSE() {
4407 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4408 } IEM_MC_ENDIF();
4409 IEM_MC_ADVANCE_RIP();
4410 IEM_MC_END();
4411 return VINF_SUCCESS;
4412
4413 case IEMMODE_32BIT:
4414 IEMOP_MNEMONIC(cdq, "cdq");
4415 IEM_MC_BEGIN(0, 1);
4416 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4417 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4418 } IEM_MC_ELSE() {
4419 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4420 } IEM_MC_ENDIF();
4421 IEM_MC_ADVANCE_RIP();
4422 IEM_MC_END();
4423 return VINF_SUCCESS;
4424
4425 case IEMMODE_64BIT:
4426 IEMOP_MNEMONIC(cqo, "cqo");
4427 IEM_MC_BEGIN(0, 1);
4428 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4429 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4430 } IEM_MC_ELSE() {
4431 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4432 } IEM_MC_ENDIF();
4433 IEM_MC_ADVANCE_RIP();
4434 IEM_MC_END();
4435 return VINF_SUCCESS;
4436
4437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4438 }
4439}
4440
4441
4442/**
4443 * @opcode 0x9a
4444 */
4445FNIEMOP_DEF(iemOp_call_Ap)
4446{
4447 IEMOP_MNEMONIC(call_Ap, "call Ap");
4448 IEMOP_HLP_NO_64BIT();
4449
4450 /* Decode the far pointer address and pass it on to the far call C implementation. */
4451 uint32_t offSeg;
4452 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4453 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4454 else
4455 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4456 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4458 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4459}
4460
4461
4462/** Opcode 0x9b. (aka fwait) */
4463FNIEMOP_DEF(iemOp_wait)
4464{
4465 IEMOP_MNEMONIC(wait, "wait");
4466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4467
4468 IEM_MC_BEGIN(0, 0);
4469 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4470 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4471 IEM_MC_ADVANCE_RIP();
4472 IEM_MC_END();
4473 return VINF_SUCCESS;
4474}
4475
4476
4477/**
4478 * @opcode 0x9c
4479 */
4480FNIEMOP_DEF(iemOp_pushf_Fv)
4481{
4482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4483 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4484 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4485}
4486
4487
4488/**
4489 * @opcode 0x9d
4490 */
4491FNIEMOP_DEF(iemOp_popf_Fv)
4492{
4493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4494 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4495 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4496}
4497
4498
4499/**
4500 * @opcode 0x9e
4501 */
4502FNIEMOP_DEF(iemOp_sahf)
4503{
4504 IEMOP_MNEMONIC(sahf, "sahf");
4505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4506 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4507 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4508 return IEMOP_RAISE_INVALID_OPCODE();
4509 IEM_MC_BEGIN(0, 2);
4510 IEM_MC_LOCAL(uint32_t, u32Flags);
4511 IEM_MC_LOCAL(uint32_t, EFlags);
4512 IEM_MC_FETCH_EFLAGS(EFlags);
4513 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4514 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4515 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4516 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4517 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4518 IEM_MC_COMMIT_EFLAGS(EFlags);
4519 IEM_MC_ADVANCE_RIP();
4520 IEM_MC_END();
4521 return VINF_SUCCESS;
4522}
4523
4524
4525/**
4526 * @opcode 0x9f
4527 */
4528FNIEMOP_DEF(iemOp_lahf)
4529{
4530 IEMOP_MNEMONIC(lahf, "lahf");
4531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4532 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4533 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4534 return IEMOP_RAISE_INVALID_OPCODE();
4535 IEM_MC_BEGIN(0, 1);
4536 IEM_MC_LOCAL(uint8_t, u8Flags);
4537 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4538 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4539 IEM_MC_ADVANCE_RIP();
4540 IEM_MC_END();
4541 return VINF_SUCCESS;
4542}
4543
4544
4545/**
4546 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4547 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
4548 * prefixes. Will return on failures.
4549 * @param a_GCPtrMemOff The variable to store the offset in.
4550 */
4551#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4552 do \
4553 { \
4554 switch (pVCpu->iem.s.enmEffAddrMode) \
4555 { \
4556 case IEMMODE_16BIT: \
4557 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4558 break; \
4559 case IEMMODE_32BIT: \
4560 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4561 break; \
4562 case IEMMODE_64BIT: \
4563 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4564 break; \
4565 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4566 } \
4567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4568 } while (0)
4569
4570/**
4571 * @opcode 0xa0
4572 */
4573FNIEMOP_DEF(iemOp_mov_AL_Ob)
4574{
4575 /*
4576 * Get the offset and fend of lock prefixes.
4577 */
4578 RTGCPTR GCPtrMemOff;
4579 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4580
4581 /*
4582 * Fetch AL.
4583 */
4584 IEM_MC_BEGIN(0,1);
4585 IEM_MC_LOCAL(uint8_t, u8Tmp);
4586 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4587 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4588 IEM_MC_ADVANCE_RIP();
4589 IEM_MC_END();
4590 return VINF_SUCCESS;
4591}
4592
4593
4594/**
4595 * @opcode 0xa1
4596 */
4597FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4598{
4599 /*
4600 * Get the offset and fend of lock prefixes.
4601 */
4602 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4603 RTGCPTR GCPtrMemOff;
4604 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4605
4606 /*
4607 * Fetch rAX.
4608 */
4609 switch (pVCpu->iem.s.enmEffOpSize)
4610 {
4611 case IEMMODE_16BIT:
4612 IEM_MC_BEGIN(0,1);
4613 IEM_MC_LOCAL(uint16_t, u16Tmp);
4614 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4615 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4616 IEM_MC_ADVANCE_RIP();
4617 IEM_MC_END();
4618 return VINF_SUCCESS;
4619
4620 case IEMMODE_32BIT:
4621 IEM_MC_BEGIN(0,1);
4622 IEM_MC_LOCAL(uint32_t, u32Tmp);
4623 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4624 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4625 IEM_MC_ADVANCE_RIP();
4626 IEM_MC_END();
4627 return VINF_SUCCESS;
4628
4629 case IEMMODE_64BIT:
4630 IEM_MC_BEGIN(0,1);
4631 IEM_MC_LOCAL(uint64_t, u64Tmp);
4632 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4633 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4634 IEM_MC_ADVANCE_RIP();
4635 IEM_MC_END();
4636 return VINF_SUCCESS;
4637
4638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4639 }
4640}
4641
4642
4643/**
4644 * @opcode 0xa2
4645 */
4646FNIEMOP_DEF(iemOp_mov_Ob_AL)
4647{
4648 /*
4649 * Get the offset and fend of lock prefixes.
4650 */
4651 RTGCPTR GCPtrMemOff;
4652 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4653
4654 /*
4655 * Store AL.
4656 */
4657 IEM_MC_BEGIN(0,1);
4658 IEM_MC_LOCAL(uint8_t, u8Tmp);
4659 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4660 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4661 IEM_MC_ADVANCE_RIP();
4662 IEM_MC_END();
4663 return VINF_SUCCESS;
4664}
4665
4666
4667/**
4668 * @opcode 0xa3
4669 */
4670FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4671{
4672 /*
4673 * Get the offset and fend of lock prefixes.
4674 */
4675 RTGCPTR GCPtrMemOff;
4676 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4677
4678 /*
4679 * Store rAX.
4680 */
4681 switch (pVCpu->iem.s.enmEffOpSize)
4682 {
4683 case IEMMODE_16BIT:
4684 IEM_MC_BEGIN(0,1);
4685 IEM_MC_LOCAL(uint16_t, u16Tmp);
4686 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4687 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4688 IEM_MC_ADVANCE_RIP();
4689 IEM_MC_END();
4690 return VINF_SUCCESS;
4691
4692 case IEMMODE_32BIT:
4693 IEM_MC_BEGIN(0,1);
4694 IEM_MC_LOCAL(uint32_t, u32Tmp);
4695 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4696 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4697 IEM_MC_ADVANCE_RIP();
4698 IEM_MC_END();
4699 return VINF_SUCCESS;
4700
4701 case IEMMODE_64BIT:
4702 IEM_MC_BEGIN(0,1);
4703 IEM_MC_LOCAL(uint64_t, u64Tmp);
4704 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4705 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4706 IEM_MC_ADVANCE_RIP();
4707 IEM_MC_END();
4708 return VINF_SUCCESS;
4709
4710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4711 }
4712}
4713
4714/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4715#define IEM_MOVS_CASE(ValBits, AddrBits) \
4716 IEM_MC_BEGIN(0, 2); \
4717 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4718 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4719 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4720 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4721 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4722 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4724 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4725 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4726 } IEM_MC_ELSE() { \
4727 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4728 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4729 } IEM_MC_ENDIF(); \
4730 IEM_MC_ADVANCE_RIP(); \
4731 IEM_MC_END();
4732
4733/**
4734 * @opcode 0xa4
4735 */
4736FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4737{
4738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4739
4740 /*
4741 * Use the C implementation if a repeat prefix is encountered.
4742 */
4743 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4744 {
4745 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4746 switch (pVCpu->iem.s.enmEffAddrMode)
4747 {
4748 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4749 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4750 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4751 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4752 }
4753 }
4754 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4755
4756 /*
4757 * Sharing case implementation with movs[wdq] below.
4758 */
4759 switch (pVCpu->iem.s.enmEffAddrMode)
4760 {
4761 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4762 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4763 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4765 }
4766 return VINF_SUCCESS;
4767}
4768
4769
4770/**
4771 * @opcode 0xa5
4772 */
4773FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4774{
4775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4776
4777 /*
4778 * Use the C implementation if a repeat prefix is encountered.
4779 */
4780 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4781 {
4782 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4783 switch (pVCpu->iem.s.enmEffOpSize)
4784 {
4785 case IEMMODE_16BIT:
4786 switch (pVCpu->iem.s.enmEffAddrMode)
4787 {
4788 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4789 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4790 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4792 }
4793 break;
4794 case IEMMODE_32BIT:
4795 switch (pVCpu->iem.s.enmEffAddrMode)
4796 {
4797 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4798 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4799 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4801 }
4802 case IEMMODE_64BIT:
4803 switch (pVCpu->iem.s.enmEffAddrMode)
4804 {
4805 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4806 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4807 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4809 }
4810 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4811 }
4812 }
4813 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4814
4815 /*
4816 * Annoying double switch here.
4817 * Using ugly macro for implementing the cases, sharing it with movsb.
4818 */
4819 switch (pVCpu->iem.s.enmEffOpSize)
4820 {
4821 case IEMMODE_16BIT:
4822 switch (pVCpu->iem.s.enmEffAddrMode)
4823 {
4824 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4825 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4826 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4828 }
4829 break;
4830
4831 case IEMMODE_32BIT:
4832 switch (pVCpu->iem.s.enmEffAddrMode)
4833 {
4834 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4835 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4836 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
4837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4838 }
4839 break;
4840
4841 case IEMMODE_64BIT:
4842 switch (pVCpu->iem.s.enmEffAddrMode)
4843 {
4844 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4845 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
4846 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
4847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4848 }
4849 break;
4850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4851 }
4852 return VINF_SUCCESS;
4853}
4854
4855#undef IEM_MOVS_CASE
4856
4857/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
4858#define IEM_CMPS_CASE(ValBits, AddrBits) \
4859 IEM_MC_BEGIN(3, 3); \
4860 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
4861 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
4862 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4863 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
4864 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4865 \
4866 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4867 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
4868 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4869 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
4870 IEM_MC_REF_LOCAL(puValue1, uValue1); \
4871 IEM_MC_REF_EFLAGS(pEFlags); \
4872 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
4873 \
4874 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4875 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4876 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4877 } IEM_MC_ELSE() { \
4878 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4879 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4880 } IEM_MC_ENDIF(); \
4881 IEM_MC_ADVANCE_RIP(); \
4882 IEM_MC_END(); \
4883
4884/**
4885 * @opcode 0xa6
4886 */
4887FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
4888{
4889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4890
4891 /*
4892 * Use the C implementation if a repeat prefix is encountered.
4893 */
4894 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4895 {
4896 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
4897 switch (pVCpu->iem.s.enmEffAddrMode)
4898 {
4899 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4900 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4901 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4903 }
4904 }
4905 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4906 {
4907 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
4908 switch (pVCpu->iem.s.enmEffAddrMode)
4909 {
4910 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4911 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4912 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4914 }
4915 }
4916 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
4917
4918 /*
4919 * Sharing case implementation with cmps[wdq] below.
4920 */
4921 switch (pVCpu->iem.s.enmEffAddrMode)
4922 {
4923 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
4924 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
4925 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
4926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4927 }
4928 return VINF_SUCCESS;
4929
4930}
4931
4932
4933/**
4934 * @opcode 0xa7
4935 */
4936FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
4937{
4938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4939
4940 /*
4941 * Use the C implementation if a repeat prefix is encountered.
4942 */
4943 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4944 {
4945 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
4946 switch (pVCpu->iem.s.enmEffOpSize)
4947 {
4948 case IEMMODE_16BIT:
4949 switch (pVCpu->iem.s.enmEffAddrMode)
4950 {
4951 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4952 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4953 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4955 }
4956 break;
4957 case IEMMODE_32BIT:
4958 switch (pVCpu->iem.s.enmEffAddrMode)
4959 {
4960 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4961 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4962 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4964 }
4965 case IEMMODE_64BIT:
4966 switch (pVCpu->iem.s.enmEffAddrMode)
4967 {
4968 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
4969 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4970 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4972 }
4973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4974 }
4975 }
4976
4977 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4978 {
4979 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
4980 switch (pVCpu->iem.s.enmEffOpSize)
4981 {
4982 case IEMMODE_16BIT:
4983 switch (pVCpu->iem.s.enmEffAddrMode)
4984 {
4985 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4986 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4987 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4989 }
4990 break;
4991 case IEMMODE_32BIT:
4992 switch (pVCpu->iem.s.enmEffAddrMode)
4993 {
4994 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4995 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4996 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4998 }
4999 case IEMMODE_64BIT:
5000 switch (pVCpu->iem.s.enmEffAddrMode)
5001 {
5002 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5003 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5004 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5006 }
5007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5008 }
5009 }
5010
5011 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5012
5013 /*
5014 * Annoying double switch here.
5015 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5016 */
5017 switch (pVCpu->iem.s.enmEffOpSize)
5018 {
5019 case IEMMODE_16BIT:
5020 switch (pVCpu->iem.s.enmEffAddrMode)
5021 {
5022 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5023 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5024 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5026 }
5027 break;
5028
5029 case IEMMODE_32BIT:
5030 switch (pVCpu->iem.s.enmEffAddrMode)
5031 {
5032 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5033 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5034 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5036 }
5037 break;
5038
5039 case IEMMODE_64BIT:
5040 switch (pVCpu->iem.s.enmEffAddrMode)
5041 {
5042 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5043 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5044 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5046 }
5047 break;
5048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5049 }
5050 return VINF_SUCCESS;
5051
5052}
5053
5054#undef IEM_CMPS_CASE
5055
5056/**
5057 * @opcode 0xa8
5058 */
5059FNIEMOP_DEF(iemOp_test_AL_Ib)
5060{
5061 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5062 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5063 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5064}
5065
5066
5067/**
5068 * @opcode 0xa9
5069 */
5070FNIEMOP_DEF(iemOp_test_eAX_Iz)
5071{
5072 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5073 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5074 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5075}
5076
5077
5078/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5079#define IEM_STOS_CASE(ValBits, AddrBits) \
5080 IEM_MC_BEGIN(0, 2); \
5081 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5082 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5083 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5084 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5085 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5086 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5087 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5088 } IEM_MC_ELSE() { \
5089 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5090 } IEM_MC_ENDIF(); \
5091 IEM_MC_ADVANCE_RIP(); \
5092 IEM_MC_END(); \
5093
5094/**
5095 * @opcode 0xaa
5096 */
5097FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5098{
5099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5100
5101 /*
5102 * Use the C implementation if a repeat prefix is encountered.
5103 */
5104 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5105 {
5106 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5107 switch (pVCpu->iem.s.enmEffAddrMode)
5108 {
5109 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5110 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5111 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5113 }
5114 }
5115 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5116
5117 /*
5118 * Sharing case implementation with stos[wdq] below.
5119 */
5120 switch (pVCpu->iem.s.enmEffAddrMode)
5121 {
5122 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5123 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5124 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5126 }
5127 return VINF_SUCCESS;
5128}
5129
5130
5131/**
5132 * @opcode 0xab
5133 */
5134FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5135{
5136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5137
5138 /*
5139 * Use the C implementation if a repeat prefix is encountered.
5140 */
5141 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5142 {
5143 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5144 switch (pVCpu->iem.s.enmEffOpSize)
5145 {
5146 case IEMMODE_16BIT:
5147 switch (pVCpu->iem.s.enmEffAddrMode)
5148 {
5149 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5150 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5151 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5153 }
5154 break;
5155 case IEMMODE_32BIT:
5156 switch (pVCpu->iem.s.enmEffAddrMode)
5157 {
5158 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5159 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5160 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5162 }
5163 case IEMMODE_64BIT:
5164 switch (pVCpu->iem.s.enmEffAddrMode)
5165 {
5166 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5167 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5168 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5170 }
5171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5172 }
5173 }
5174 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5175
5176 /*
5177 * Annoying double switch here.
5178 * Using ugly macro for implementing the cases, sharing it with stosb.
5179 */
5180 switch (pVCpu->iem.s.enmEffOpSize)
5181 {
5182 case IEMMODE_16BIT:
5183 switch (pVCpu->iem.s.enmEffAddrMode)
5184 {
5185 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5186 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5187 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5189 }
5190 break;
5191
5192 case IEMMODE_32BIT:
5193 switch (pVCpu->iem.s.enmEffAddrMode)
5194 {
5195 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5196 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5197 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5199 }
5200 break;
5201
5202 case IEMMODE_64BIT:
5203 switch (pVCpu->iem.s.enmEffAddrMode)
5204 {
5205 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5206 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5207 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5209 }
5210 break;
5211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5212 }
5213 return VINF_SUCCESS;
5214}
5215
5216#undef IEM_STOS_CASE
5217
5218/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5219#define IEM_LODS_CASE(ValBits, AddrBits) \
5220 IEM_MC_BEGIN(0, 2); \
5221 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5222 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5223 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5224 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5225 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5226 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5227 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5228 } IEM_MC_ELSE() { \
5229 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5230 } IEM_MC_ENDIF(); \
5231 IEM_MC_ADVANCE_RIP(); \
5232 IEM_MC_END();
5233
5234/**
5235 * @opcode 0xac
5236 */
5237FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5238{
5239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5240
5241 /*
5242 * Use the C implementation if a repeat prefix is encountered.
5243 */
5244 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5245 {
5246 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5247 switch (pVCpu->iem.s.enmEffAddrMode)
5248 {
5249 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5250 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5251 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5253 }
5254 }
5255 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5256
5257 /*
5258 * Sharing case implementation with stos[wdq] below.
5259 */
5260 switch (pVCpu->iem.s.enmEffAddrMode)
5261 {
5262 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5263 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5264 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5266 }
5267 return VINF_SUCCESS;
5268}
5269
5270
5271/**
5272 * @opcode 0xad
5273 */
5274FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5275{
5276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5277
5278 /*
5279 * Use the C implementation if a repeat prefix is encountered.
5280 */
5281 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5282 {
5283 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5284 switch (pVCpu->iem.s.enmEffOpSize)
5285 {
5286 case IEMMODE_16BIT:
5287 switch (pVCpu->iem.s.enmEffAddrMode)
5288 {
5289 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5290 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5291 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5293 }
5294 break;
5295 case IEMMODE_32BIT:
5296 switch (pVCpu->iem.s.enmEffAddrMode)
5297 {
5298 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5299 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5300 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5301 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5302 }
5303 case IEMMODE_64BIT:
5304 switch (pVCpu->iem.s.enmEffAddrMode)
5305 {
5306 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5307 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5308 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5310 }
5311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5312 }
5313 }
5314 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5315
5316 /*
5317 * Annoying double switch here.
5318 * Using ugly macro for implementing the cases, sharing it with lodsb.
5319 */
5320 switch (pVCpu->iem.s.enmEffOpSize)
5321 {
5322 case IEMMODE_16BIT:
5323 switch (pVCpu->iem.s.enmEffAddrMode)
5324 {
5325 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5326 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5327 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5329 }
5330 break;
5331
5332 case IEMMODE_32BIT:
5333 switch (pVCpu->iem.s.enmEffAddrMode)
5334 {
5335 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5336 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5337 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5339 }
5340 break;
5341
5342 case IEMMODE_64BIT:
5343 switch (pVCpu->iem.s.enmEffAddrMode)
5344 {
5345 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5346 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5347 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5349 }
5350 break;
5351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5352 }
5353 return VINF_SUCCESS;
5354}
5355
5356#undef IEM_LODS_CASE
5357
5358/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5359#define IEM_SCAS_CASE(ValBits, AddrBits) \
5360 IEM_MC_BEGIN(3, 2); \
5361 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5362 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5363 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5364 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5365 \
5366 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5367 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5368 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5369 IEM_MC_REF_EFLAGS(pEFlags); \
5370 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5371 \
5372 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5373 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5374 } IEM_MC_ELSE() { \
5375 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5376 } IEM_MC_ENDIF(); \
5377 IEM_MC_ADVANCE_RIP(); \
5378 IEM_MC_END();
5379
5380/**
5381 * @opcode 0xae
5382 */
5383FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5384{
5385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5386
5387 /*
5388 * Use the C implementation if a repeat prefix is encountered.
5389 */
5390 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5391 {
5392 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5393 switch (pVCpu->iem.s.enmEffAddrMode)
5394 {
5395 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5396 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5397 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5399 }
5400 }
5401 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5402 {
5403 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5404 switch (pVCpu->iem.s.enmEffAddrMode)
5405 {
5406 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5407 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5408 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5410 }
5411 }
5412 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5413
5414 /*
5415 * Sharing case implementation with stos[wdq] below.
5416 */
5417 switch (pVCpu->iem.s.enmEffAddrMode)
5418 {
5419 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5420 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5421 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5423 }
5424 return VINF_SUCCESS;
5425}
5426
5427
5428/**
5429 * @opcode 0xaf
5430 */
5431FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5432{
5433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5434
5435 /*
5436 * Use the C implementation if a repeat prefix is encountered.
5437 */
5438 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5439 {
5440 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5441 switch (pVCpu->iem.s.enmEffOpSize)
5442 {
5443 case IEMMODE_16BIT:
5444 switch (pVCpu->iem.s.enmEffAddrMode)
5445 {
5446 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5447 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5448 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5450 }
5451 break;
5452 case IEMMODE_32BIT:
5453 switch (pVCpu->iem.s.enmEffAddrMode)
5454 {
5455 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5456 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5457 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5459 }
5460 case IEMMODE_64BIT:
5461 switch (pVCpu->iem.s.enmEffAddrMode)
5462 {
5463 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5464 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5465 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5467 }
5468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5469 }
5470 }
5471 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5472 {
5473 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5474 switch (pVCpu->iem.s.enmEffOpSize)
5475 {
5476 case IEMMODE_16BIT:
5477 switch (pVCpu->iem.s.enmEffAddrMode)
5478 {
5479 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5480 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5481 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5483 }
5484 break;
5485 case IEMMODE_32BIT:
5486 switch (pVCpu->iem.s.enmEffAddrMode)
5487 {
5488 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5489 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5490 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5492 }
5493 case IEMMODE_64BIT:
5494 switch (pVCpu->iem.s.enmEffAddrMode)
5495 {
5496 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5497 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5498 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5499 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5500 }
5501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5502 }
5503 }
5504 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5505
5506 /*
5507 * Annoying double switch here.
5508 * Using ugly macro for implementing the cases, sharing it with scasb.
5509 */
5510 switch (pVCpu->iem.s.enmEffOpSize)
5511 {
5512 case IEMMODE_16BIT:
5513 switch (pVCpu->iem.s.enmEffAddrMode)
5514 {
5515 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5516 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5517 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5519 }
5520 break;
5521
5522 case IEMMODE_32BIT:
5523 switch (pVCpu->iem.s.enmEffAddrMode)
5524 {
5525 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5526 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5527 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5529 }
5530 break;
5531
5532 case IEMMODE_64BIT:
5533 switch (pVCpu->iem.s.enmEffAddrMode)
5534 {
5535 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5536 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5537 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5539 }
5540 break;
5541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5542 }
5543 return VINF_SUCCESS;
5544}
5545
5546#undef IEM_SCAS_CASE
5547
5548/**
5549 * Common 'mov r8, imm8' helper.
5550 */
5551FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5552{
5553 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5555
5556 IEM_MC_BEGIN(0, 1);
5557 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5558 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5559 IEM_MC_ADVANCE_RIP();
5560 IEM_MC_END();
5561
5562 return VINF_SUCCESS;
5563}
5564
5565
5566/**
5567 * @opcode 0xb0
5568 */
5569FNIEMOP_DEF(iemOp_mov_AL_Ib)
5570{
5571 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5572 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5573}
5574
5575
5576/**
5577 * @opcode 0xb1
5578 */
5579FNIEMOP_DEF(iemOp_CL_Ib)
5580{
5581 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5582 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5583}
5584
5585
5586/**
5587 * @opcode 0xb2
5588 */
5589FNIEMOP_DEF(iemOp_DL_Ib)
5590{
5591 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5592 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5593}
5594
5595
5596/**
5597 * @opcode 0xb3
5598 */
5599FNIEMOP_DEF(iemOp_BL_Ib)
5600{
5601 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5602 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5603}
5604
5605
5606/**
5607 * @opcode 0xb4
5608 */
5609FNIEMOP_DEF(iemOp_mov_AH_Ib)
5610{
5611 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5612 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5613}
5614
5615
5616/**
5617 * @opcode 0xb5
5618 */
5619FNIEMOP_DEF(iemOp_CH_Ib)
5620{
5621 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5622 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5623}
5624
5625
5626/**
5627 * @opcode 0xb6
5628 */
5629FNIEMOP_DEF(iemOp_DH_Ib)
5630{
5631 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5632 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5633}
5634
5635
5636/**
5637 * @opcode 0xb7
5638 */
5639FNIEMOP_DEF(iemOp_BH_Ib)
5640{
5641 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5642 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5643}
5644
5645
5646/**
5647 * Common 'mov regX,immX' helper.
5648 */
5649FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5650{
5651 switch (pVCpu->iem.s.enmEffOpSize)
5652 {
5653 case IEMMODE_16BIT:
5654 {
5655 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5657
5658 IEM_MC_BEGIN(0, 1);
5659 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5660 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5661 IEM_MC_ADVANCE_RIP();
5662 IEM_MC_END();
5663 break;
5664 }
5665
5666 case IEMMODE_32BIT:
5667 {
5668 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5670
5671 IEM_MC_BEGIN(0, 1);
5672 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5673 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5674 IEM_MC_ADVANCE_RIP();
5675 IEM_MC_END();
5676 break;
5677 }
5678 case IEMMODE_64BIT:
5679 {
5680 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5682
5683 IEM_MC_BEGIN(0, 1);
5684 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5685 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5686 IEM_MC_ADVANCE_RIP();
5687 IEM_MC_END();
5688 break;
5689 }
5690 }
5691
5692 return VINF_SUCCESS;
5693}
5694
5695
5696/**
5697 * @opcode 0xb8
5698 */
5699FNIEMOP_DEF(iemOp_eAX_Iv)
5700{
5701 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5702 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5703}
5704
5705
5706/**
5707 * @opcode 0xb9
5708 */
5709FNIEMOP_DEF(iemOp_eCX_Iv)
5710{
5711 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5712 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5713}
5714
5715
5716/**
5717 * @opcode 0xba
5718 */
5719FNIEMOP_DEF(iemOp_eDX_Iv)
5720{
5721 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5722 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5723}
5724
5725
5726/**
5727 * @opcode 0xbb
5728 */
5729FNIEMOP_DEF(iemOp_eBX_Iv)
5730{
5731 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5732 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5733}
5734
5735
5736/**
5737 * @opcode 0xbc
5738 */
5739FNIEMOP_DEF(iemOp_eSP_Iv)
5740{
5741 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5742 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5743}
5744
5745
5746/**
5747 * @opcode 0xbd
5748 */
5749FNIEMOP_DEF(iemOp_eBP_Iv)
5750{
5751 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5752 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5753}
5754
5755
5756/**
5757 * @opcode 0xbe
5758 */
5759FNIEMOP_DEF(iemOp_eSI_Iv)
5760{
5761 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5762 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5763}
5764
5765
5766/**
5767 * @opcode 0xbf
5768 */
5769FNIEMOP_DEF(iemOp_eDI_Iv)
5770{
5771 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5772 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5773}
5774
5775
5776/**
5777 * @opcode 0xc0
5778 */
5779FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5780{
5781 IEMOP_HLP_MIN_186();
5782 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5783 PCIEMOPSHIFTSIZES pImpl;
5784 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5785 {
5786 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5787 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5788 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5789 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5790 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5791 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5792 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5793 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5794 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5795 }
5796 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5797
5798 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5799 {
5800 /* register */
5801 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5803 IEM_MC_BEGIN(3, 0);
5804 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5805 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5806 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5807 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5808 IEM_MC_REF_EFLAGS(pEFlags);
5809 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5810 IEM_MC_ADVANCE_RIP();
5811 IEM_MC_END();
5812 }
5813 else
5814 {
5815 /* memory */
5816 IEM_MC_BEGIN(3, 2);
5817 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5818 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5819 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5821
5822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5823 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5824 IEM_MC_ASSIGN(cShiftArg, cShift);
5825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5826 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5827 IEM_MC_FETCH_EFLAGS(EFlags);
5828 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5829
5830 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5831 IEM_MC_COMMIT_EFLAGS(EFlags);
5832 IEM_MC_ADVANCE_RIP();
5833 IEM_MC_END();
5834 }
5835 return VINF_SUCCESS;
5836}
5837
5838
5839/**
5840 * @opcode 0xc1
5841 */
5842FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
5843{
5844 IEMOP_HLP_MIN_186();
5845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5846 PCIEMOPSHIFTSIZES pImpl;
5847 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5848 {
5849 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
5850 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
5851 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
5852 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
5853 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
5854 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
5855 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
5856 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5857 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5858 }
5859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5860
5861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5862 {
5863 /* register */
5864 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5866 switch (pVCpu->iem.s.enmEffOpSize)
5867 {
5868 case IEMMODE_16BIT:
5869 IEM_MC_BEGIN(3, 0);
5870 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5871 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5872 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5873 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5874 IEM_MC_REF_EFLAGS(pEFlags);
5875 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5876 IEM_MC_ADVANCE_RIP();
5877 IEM_MC_END();
5878 return VINF_SUCCESS;
5879
5880 case IEMMODE_32BIT:
5881 IEM_MC_BEGIN(3, 0);
5882 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5883 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5884 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5885 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5886 IEM_MC_REF_EFLAGS(pEFlags);
5887 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5888 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5889 IEM_MC_ADVANCE_RIP();
5890 IEM_MC_END();
5891 return VINF_SUCCESS;
5892
5893 case IEMMODE_64BIT:
5894 IEM_MC_BEGIN(3, 0);
5895 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5896 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5897 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5898 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5899 IEM_MC_REF_EFLAGS(pEFlags);
5900 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5901 IEM_MC_ADVANCE_RIP();
5902 IEM_MC_END();
5903 return VINF_SUCCESS;
5904
5905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5906 }
5907 }
5908 else
5909 {
5910 /* memory */
5911 switch (pVCpu->iem.s.enmEffOpSize)
5912 {
5913 case IEMMODE_16BIT:
5914 IEM_MC_BEGIN(3, 2);
5915 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5916 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5917 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5919
5920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5921 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5922 IEM_MC_ASSIGN(cShiftArg, cShift);
5923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5924 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5925 IEM_MC_FETCH_EFLAGS(EFlags);
5926 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5927
5928 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5929 IEM_MC_COMMIT_EFLAGS(EFlags);
5930 IEM_MC_ADVANCE_RIP();
5931 IEM_MC_END();
5932 return VINF_SUCCESS;
5933
5934 case IEMMODE_32BIT:
5935 IEM_MC_BEGIN(3, 2);
5936 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5937 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5938 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5940
5941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5942 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5943 IEM_MC_ASSIGN(cShiftArg, cShift);
5944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5945 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5946 IEM_MC_FETCH_EFLAGS(EFlags);
5947 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5948
5949 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5950 IEM_MC_COMMIT_EFLAGS(EFlags);
5951 IEM_MC_ADVANCE_RIP();
5952 IEM_MC_END();
5953 return VINF_SUCCESS;
5954
5955 case IEMMODE_64BIT:
5956 IEM_MC_BEGIN(3, 2);
5957 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5958 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5959 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5961
5962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5963 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5964 IEM_MC_ASSIGN(cShiftArg, cShift);
5965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5966 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5967 IEM_MC_FETCH_EFLAGS(EFlags);
5968 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5969
5970 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5971 IEM_MC_COMMIT_EFLAGS(EFlags);
5972 IEM_MC_ADVANCE_RIP();
5973 IEM_MC_END();
5974 return VINF_SUCCESS;
5975
5976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5977 }
5978 }
5979}
5980
5981
5982/**
5983 * @opcode 0xc2
5984 */
5985FNIEMOP_DEF(iemOp_retn_Iw)
5986{
5987 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
5988 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5990 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5991 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
5992}
5993
5994
5995/**
5996 * @opcode 0xc3
5997 */
5998FNIEMOP_DEF(iemOp_retn)
5999{
6000 IEMOP_MNEMONIC(retn, "retn");
6001 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6003 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6004}
6005
6006
6007/**
6008 * @opcode 0xc4
6009 */
6010FNIEMOP_DEF(iemOp_les_Gv_Mp__vex2)
6011{
6012 /* The LES instruction is invalid 64-bit mode. In legacy and
6013 compatability mode it is invalid with MOD=3.
6014 The use as a VEX prefix is made possible by assigning the inverted
6015 REX.R to the top MOD bit, and the top bit in the inverted register
6016 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6017 to accessing registers 0..7 in this VEX form. */
6018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6019 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6020 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6021 {
6022 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6023 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6024 {
6025 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6026 if ( ( pVCpu->iem.s.fPrefixes
6027 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6028 == 0)
6029 {
6030 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6031 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6032 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6033 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6034 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6035
6036 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6037 }
6038
6039 Log(("VEX2: Invalid prefix mix!\n"));
6040 }
6041 else
6042 Log(("VEX2: AVX support disabled!\n"));
6043
6044 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6045 return IEMOP_RAISE_INVALID_OPCODE();
6046 }
6047 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6048 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6049}
6050
6051
6052/**
6053 * @opcode 0xc5
6054 */
6055FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex3)
6056{
6057 /* The LDS instruction is invalid 64-bit mode. In legacy and
6058 compatability mode it is invalid with MOD=3.
6059 The use as a VEX prefix is made possible by assigning the inverted
6060 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6061 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6063 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
6064 {
6065 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6066 {
6067 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6068 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6069 }
6070 IEMOP_HLP_NO_REAL_OR_V86_MODE();
6071 }
6072
6073 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6074 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6075 {
6076 /** @todo Test when exctly the VEX conformance checks kick in during
6077 * instruction decoding and fetching (using \#PF). */
6078 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6079 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6080 if ( ( pVCpu->iem.s.fPrefixes
6081 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6082 == 0)
6083 {
6084 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6085 if (bVex2 & 0x80 /* VEX.W */)
6086 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6087 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6088 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
6089 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
6090 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6091 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6092 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6093
6094 switch (bRm & 0x1f)
6095 {
6096 case 1: /* 0x0f lead opcode byte. */
6097 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6098
6099 case 2: /* 0x0f 0x38 lead opcode bytes. */
6100 /** @todo VEX: Just use new tables and decoders. */
6101 IEMOP_BITCH_ABOUT_STUB();
6102 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6103
6104 case 3: /* 0x0f 0x3a lead opcode bytes. */
6105 /** @todo VEX: Just use new tables and decoders. */
6106 IEMOP_BITCH_ABOUT_STUB();
6107 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6108
6109 default:
6110 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6111 return IEMOP_RAISE_INVALID_OPCODE();
6112 }
6113 }
6114 else
6115 Log(("VEX3: Invalid prefix mix!\n"));
6116 }
6117 else
6118 Log(("VEX3: AVX support disabled!\n"));
6119 return IEMOP_RAISE_INVALID_OPCODE();
6120}
6121
6122
6123/**
6124 * @opcode 0xc6
6125 */
6126FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6127{
6128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6129 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6130 return IEMOP_RAISE_INVALID_OPCODE();
6131 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6132
6133 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6134 {
6135 /* register access */
6136 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6138 IEM_MC_BEGIN(0, 0);
6139 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6140 IEM_MC_ADVANCE_RIP();
6141 IEM_MC_END();
6142 }
6143 else
6144 {
6145 /* memory access. */
6146 IEM_MC_BEGIN(0, 1);
6147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6149 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6151 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6152 IEM_MC_ADVANCE_RIP();
6153 IEM_MC_END();
6154 }
6155 return VINF_SUCCESS;
6156}
6157
6158
6159/**
6160 * @opcode 0xc7
6161 */
6162FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6163{
6164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6165 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6166 return IEMOP_RAISE_INVALID_OPCODE();
6167 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6168
6169 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6170 {
6171 /* register access */
6172 switch (pVCpu->iem.s.enmEffOpSize)
6173 {
6174 case IEMMODE_16BIT:
6175 IEM_MC_BEGIN(0, 0);
6176 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6178 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6179 IEM_MC_ADVANCE_RIP();
6180 IEM_MC_END();
6181 return VINF_SUCCESS;
6182
6183 case IEMMODE_32BIT:
6184 IEM_MC_BEGIN(0, 0);
6185 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6187 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6188 IEM_MC_ADVANCE_RIP();
6189 IEM_MC_END();
6190 return VINF_SUCCESS;
6191
6192 case IEMMODE_64BIT:
6193 IEM_MC_BEGIN(0, 0);
6194 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6196 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6197 IEM_MC_ADVANCE_RIP();
6198 IEM_MC_END();
6199 return VINF_SUCCESS;
6200
6201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6202 }
6203 }
6204 else
6205 {
6206 /* memory access. */
6207 switch (pVCpu->iem.s.enmEffOpSize)
6208 {
6209 case IEMMODE_16BIT:
6210 IEM_MC_BEGIN(0, 1);
6211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6213 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6215 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6216 IEM_MC_ADVANCE_RIP();
6217 IEM_MC_END();
6218 return VINF_SUCCESS;
6219
6220 case IEMMODE_32BIT:
6221 IEM_MC_BEGIN(0, 1);
6222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6224 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6226 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6227 IEM_MC_ADVANCE_RIP();
6228 IEM_MC_END();
6229 return VINF_SUCCESS;
6230
6231 case IEMMODE_64BIT:
6232 IEM_MC_BEGIN(0, 1);
6233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6235 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6237 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6238 IEM_MC_ADVANCE_RIP();
6239 IEM_MC_END();
6240 return VINF_SUCCESS;
6241
6242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6243 }
6244 }
6245}
6246
6247
6248
6249
6250/**
6251 * @opcode 0xc8
6252 */
6253FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6254{
6255 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6256 IEMOP_HLP_MIN_186();
6257 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6258 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6259 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6261 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6262}
6263
6264
6265/**
6266 * @opcode 0xc9
6267 */
6268FNIEMOP_DEF(iemOp_leave)
6269{
6270 IEMOP_MNEMONIC(leave, "leave");
6271 IEMOP_HLP_MIN_186();
6272 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6274 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6275}
6276
6277
6278/**
6279 * @opcode 0xca
6280 */
6281FNIEMOP_DEF(iemOp_retf_Iw)
6282{
6283 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6284 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6286 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6287 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6288}
6289
6290
6291/**
6292 * @opcode 0xcb
6293 */
6294FNIEMOP_DEF(iemOp_retf)
6295{
6296 IEMOP_MNEMONIC(retf, "retf");
6297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6298 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6299 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6300}
6301
6302
6303/**
6304 * @opcode 0xcc
6305 */
6306FNIEMOP_DEF(iemOp_int3)
6307{
6308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6309 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
6310}
6311
6312
6313/**
6314 * @opcode 0xcd
6315 */
6316FNIEMOP_DEF(iemOp_int_Ib)
6317{
6318 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6320 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
6321}
6322
6323
6324/**
6325 * @opcode 0xce
6326 */
6327FNIEMOP_DEF(iemOp_into)
6328{
6329 IEMOP_MNEMONIC(into, "into");
6330 IEMOP_HLP_NO_64BIT();
6331
6332 IEM_MC_BEGIN(2, 0);
6333 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6334 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
6335 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
6336 IEM_MC_END();
6337 return VINF_SUCCESS;
6338}
6339
6340
6341/**
6342 * @opcode 0xcf
6343 */
6344FNIEMOP_DEF(iemOp_iret)
6345{
6346 IEMOP_MNEMONIC(iret, "iret");
6347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6348 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6349}
6350
6351
6352/**
6353 * @opcode 0xd0
6354 */
6355FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6356{
6357 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6358 PCIEMOPSHIFTSIZES pImpl;
6359 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6360 {
6361 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6362 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6363 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6364 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6365 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6366 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6367 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6368 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6369 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6370 }
6371 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6372
6373 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6374 {
6375 /* register */
6376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6377 IEM_MC_BEGIN(3, 0);
6378 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6379 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6380 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6381 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6382 IEM_MC_REF_EFLAGS(pEFlags);
6383 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6384 IEM_MC_ADVANCE_RIP();
6385 IEM_MC_END();
6386 }
6387 else
6388 {
6389 /* memory */
6390 IEM_MC_BEGIN(3, 2);
6391 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6392 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6393 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6395
6396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6398 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6399 IEM_MC_FETCH_EFLAGS(EFlags);
6400 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6401
6402 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6403 IEM_MC_COMMIT_EFLAGS(EFlags);
6404 IEM_MC_ADVANCE_RIP();
6405 IEM_MC_END();
6406 }
6407 return VINF_SUCCESS;
6408}
6409
6410
6411
6412/**
6413 * @opcode 0xd1
6414 */
6415FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6416{
6417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6418 PCIEMOPSHIFTSIZES pImpl;
6419 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6420 {
6421 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6422 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6423 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6424 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6425 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6426 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6427 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6428 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6429 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6430 }
6431 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6432
6433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6434 {
6435 /* register */
6436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6437 switch (pVCpu->iem.s.enmEffOpSize)
6438 {
6439 case IEMMODE_16BIT:
6440 IEM_MC_BEGIN(3, 0);
6441 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6442 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6443 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6444 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6445 IEM_MC_REF_EFLAGS(pEFlags);
6446 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6447 IEM_MC_ADVANCE_RIP();
6448 IEM_MC_END();
6449 return VINF_SUCCESS;
6450
6451 case IEMMODE_32BIT:
6452 IEM_MC_BEGIN(3, 0);
6453 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6454 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6455 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6456 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6457 IEM_MC_REF_EFLAGS(pEFlags);
6458 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6459 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6460 IEM_MC_ADVANCE_RIP();
6461 IEM_MC_END();
6462 return VINF_SUCCESS;
6463
6464 case IEMMODE_64BIT:
6465 IEM_MC_BEGIN(3, 0);
6466 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6467 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6468 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6469 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6470 IEM_MC_REF_EFLAGS(pEFlags);
6471 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6472 IEM_MC_ADVANCE_RIP();
6473 IEM_MC_END();
6474 return VINF_SUCCESS;
6475
6476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6477 }
6478 }
6479 else
6480 {
6481 /* memory */
6482 switch (pVCpu->iem.s.enmEffOpSize)
6483 {
6484 case IEMMODE_16BIT:
6485 IEM_MC_BEGIN(3, 2);
6486 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6487 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6488 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6490
6491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6493 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6494 IEM_MC_FETCH_EFLAGS(EFlags);
6495 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6496
6497 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6498 IEM_MC_COMMIT_EFLAGS(EFlags);
6499 IEM_MC_ADVANCE_RIP();
6500 IEM_MC_END();
6501 return VINF_SUCCESS;
6502
6503 case IEMMODE_32BIT:
6504 IEM_MC_BEGIN(3, 2);
6505 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6506 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6507 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6509
6510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6512 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6513 IEM_MC_FETCH_EFLAGS(EFlags);
6514 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6515
6516 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6517 IEM_MC_COMMIT_EFLAGS(EFlags);
6518 IEM_MC_ADVANCE_RIP();
6519 IEM_MC_END();
6520 return VINF_SUCCESS;
6521
6522 case IEMMODE_64BIT:
6523 IEM_MC_BEGIN(3, 2);
6524 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6525 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6526 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6528
6529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6531 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6532 IEM_MC_FETCH_EFLAGS(EFlags);
6533 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6534
6535 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6536 IEM_MC_COMMIT_EFLAGS(EFlags);
6537 IEM_MC_ADVANCE_RIP();
6538 IEM_MC_END();
6539 return VINF_SUCCESS;
6540
6541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6542 }
6543 }
6544}
6545
6546
6547/**
6548 * @opcode 0xd2
6549 */
6550FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6551{
6552 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6553 PCIEMOPSHIFTSIZES pImpl;
6554 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6555 {
6556 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6557 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6558 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6559 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6560 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6561 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6562 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6563 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6564 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6565 }
6566 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6567
6568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6569 {
6570 /* register */
6571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6572 IEM_MC_BEGIN(3, 0);
6573 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6574 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6575 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6576 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6577 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6578 IEM_MC_REF_EFLAGS(pEFlags);
6579 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6580 IEM_MC_ADVANCE_RIP();
6581 IEM_MC_END();
6582 }
6583 else
6584 {
6585 /* memory */
6586 IEM_MC_BEGIN(3, 2);
6587 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6588 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6589 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6591
6592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6594 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6595 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6596 IEM_MC_FETCH_EFLAGS(EFlags);
6597 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6598
6599 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6600 IEM_MC_COMMIT_EFLAGS(EFlags);
6601 IEM_MC_ADVANCE_RIP();
6602 IEM_MC_END();
6603 }
6604 return VINF_SUCCESS;
6605}
6606
6607
6608/**
6609 * @opcode 0xd3
6610 */
6611FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6612{
6613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6614 PCIEMOPSHIFTSIZES pImpl;
6615 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6616 {
6617 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6618 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6619 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6620 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6621 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6622 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6623 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6624 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6625 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6626 }
6627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6628
6629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6630 {
6631 /* register */
6632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6633 switch (pVCpu->iem.s.enmEffOpSize)
6634 {
6635 case IEMMODE_16BIT:
6636 IEM_MC_BEGIN(3, 0);
6637 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6638 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6639 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6640 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6641 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6642 IEM_MC_REF_EFLAGS(pEFlags);
6643 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6644 IEM_MC_ADVANCE_RIP();
6645 IEM_MC_END();
6646 return VINF_SUCCESS;
6647
6648 case IEMMODE_32BIT:
6649 IEM_MC_BEGIN(3, 0);
6650 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6651 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6652 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6653 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6654 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6655 IEM_MC_REF_EFLAGS(pEFlags);
6656 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6657 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6658 IEM_MC_ADVANCE_RIP();
6659 IEM_MC_END();
6660 return VINF_SUCCESS;
6661
6662 case IEMMODE_64BIT:
6663 IEM_MC_BEGIN(3, 0);
6664 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6665 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6666 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6667 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6668 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6669 IEM_MC_REF_EFLAGS(pEFlags);
6670 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6671 IEM_MC_ADVANCE_RIP();
6672 IEM_MC_END();
6673 return VINF_SUCCESS;
6674
6675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6676 }
6677 }
6678 else
6679 {
6680 /* memory */
6681 switch (pVCpu->iem.s.enmEffOpSize)
6682 {
6683 case IEMMODE_16BIT:
6684 IEM_MC_BEGIN(3, 2);
6685 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6686 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6687 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6689
6690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6692 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6693 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6694 IEM_MC_FETCH_EFLAGS(EFlags);
6695 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6696
6697 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6698 IEM_MC_COMMIT_EFLAGS(EFlags);
6699 IEM_MC_ADVANCE_RIP();
6700 IEM_MC_END();
6701 return VINF_SUCCESS;
6702
6703 case IEMMODE_32BIT:
6704 IEM_MC_BEGIN(3, 2);
6705 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6706 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6707 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6709
6710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6712 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6713 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6714 IEM_MC_FETCH_EFLAGS(EFlags);
6715 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6716
6717 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6718 IEM_MC_COMMIT_EFLAGS(EFlags);
6719 IEM_MC_ADVANCE_RIP();
6720 IEM_MC_END();
6721 return VINF_SUCCESS;
6722
6723 case IEMMODE_64BIT:
6724 IEM_MC_BEGIN(3, 2);
6725 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6726 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6727 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6729
6730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6732 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6733 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6734 IEM_MC_FETCH_EFLAGS(EFlags);
6735 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6736
6737 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6738 IEM_MC_COMMIT_EFLAGS(EFlags);
6739 IEM_MC_ADVANCE_RIP();
6740 IEM_MC_END();
6741 return VINF_SUCCESS;
6742
6743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6744 }
6745 }
6746}
6747
6748/**
6749 * @opcode 0xd4
6750 */
6751FNIEMOP_DEF(iemOp_aam_Ib)
6752{
6753 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6754 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6756 IEMOP_HLP_NO_64BIT();
6757 if (!bImm)
6758 return IEMOP_RAISE_DIVIDE_ERROR();
6759 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6760}
6761
6762
6763/**
6764 * @opcode 0xd5
6765 */
6766FNIEMOP_DEF(iemOp_aad_Ib)
6767{
6768 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6769 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6771 IEMOP_HLP_NO_64BIT();
6772 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6773}
6774
6775
6776/**
6777 * @opcode 0xd6
6778 */
6779FNIEMOP_DEF(iemOp_salc)
6780{
6781 IEMOP_MNEMONIC(salc, "salc");
6782 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6783 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6785 IEMOP_HLP_NO_64BIT();
6786
6787 IEM_MC_BEGIN(0, 0);
6788 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6789 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6790 } IEM_MC_ELSE() {
6791 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6792 } IEM_MC_ENDIF();
6793 IEM_MC_ADVANCE_RIP();
6794 IEM_MC_END();
6795 return VINF_SUCCESS;
6796}
6797
6798
6799/**
6800 * @opcode 0xd7
6801 */
6802FNIEMOP_DEF(iemOp_xlat)
6803{
6804 IEMOP_MNEMONIC(xlat, "xlat");
6805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6806 switch (pVCpu->iem.s.enmEffAddrMode)
6807 {
6808 case IEMMODE_16BIT:
6809 IEM_MC_BEGIN(2, 0);
6810 IEM_MC_LOCAL(uint8_t, u8Tmp);
6811 IEM_MC_LOCAL(uint16_t, u16Addr);
6812 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6813 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6814 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6815 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6816 IEM_MC_ADVANCE_RIP();
6817 IEM_MC_END();
6818 return VINF_SUCCESS;
6819
6820 case IEMMODE_32BIT:
6821 IEM_MC_BEGIN(2, 0);
6822 IEM_MC_LOCAL(uint8_t, u8Tmp);
6823 IEM_MC_LOCAL(uint32_t, u32Addr);
6824 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6825 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6826 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6827 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6828 IEM_MC_ADVANCE_RIP();
6829 IEM_MC_END();
6830 return VINF_SUCCESS;
6831
6832 case IEMMODE_64BIT:
6833 IEM_MC_BEGIN(2, 0);
6834 IEM_MC_LOCAL(uint8_t, u8Tmp);
6835 IEM_MC_LOCAL(uint64_t, u64Addr);
6836 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
6837 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
6838 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
6839 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6840 IEM_MC_ADVANCE_RIP();
6841 IEM_MC_END();
6842 return VINF_SUCCESS;
6843
6844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6845 }
6846}
6847
6848
6849/**
6850 * Common worker for FPU instructions working on ST0 and STn, and storing the
6851 * result in ST0.
6852 *
6853 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6854 */
6855FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6856{
6857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6858
6859 IEM_MC_BEGIN(3, 1);
6860 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6861 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6862 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6863 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6864
6865 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6866 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6867 IEM_MC_PREPARE_FPU_USAGE();
6868 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6869 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6870 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6871 IEM_MC_ELSE()
6872 IEM_MC_FPU_STACK_UNDERFLOW(0);
6873 IEM_MC_ENDIF();
6874 IEM_MC_ADVANCE_RIP();
6875
6876 IEM_MC_END();
6877 return VINF_SUCCESS;
6878}
6879
6880
6881/**
6882 * Common worker for FPU instructions working on ST0 and STn, and only affecting
6883 * flags.
6884 *
6885 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6886 */
6887FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6888{
6889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6890
6891 IEM_MC_BEGIN(3, 1);
6892 IEM_MC_LOCAL(uint16_t, u16Fsw);
6893 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6894 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6895 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6896
6897 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6898 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6899 IEM_MC_PREPARE_FPU_USAGE();
6900 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6901 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6902 IEM_MC_UPDATE_FSW(u16Fsw);
6903 IEM_MC_ELSE()
6904 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6905 IEM_MC_ENDIF();
6906 IEM_MC_ADVANCE_RIP();
6907
6908 IEM_MC_END();
6909 return VINF_SUCCESS;
6910}
6911
6912
6913/**
6914 * Common worker for FPU instructions working on ST0 and STn, only affecting
6915 * flags, and popping when done.
6916 *
6917 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6918 */
6919FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6920{
6921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6922
6923 IEM_MC_BEGIN(3, 1);
6924 IEM_MC_LOCAL(uint16_t, u16Fsw);
6925 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6926 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6927 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6928
6929 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6930 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6931 IEM_MC_PREPARE_FPU_USAGE();
6932 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6933 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6934 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6935 IEM_MC_ELSE()
6936 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
6937 IEM_MC_ENDIF();
6938 IEM_MC_ADVANCE_RIP();
6939
6940 IEM_MC_END();
6941 return VINF_SUCCESS;
6942}
6943
6944
6945/** Opcode 0xd8 11/0. */
6946FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
6947{
6948 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
6949 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
6950}
6951
6952
6953/** Opcode 0xd8 11/1. */
6954FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
6955{
6956 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
6957 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
6958}
6959
6960
6961/** Opcode 0xd8 11/2. */
6962FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
6963{
6964 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
6965 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
6966}
6967
6968
6969/** Opcode 0xd8 11/3. */
6970FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
6971{
6972 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
6973 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
6974}
6975
6976
6977/** Opcode 0xd8 11/4. */
6978FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
6979{
6980 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
6981 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
6982}
6983
6984
6985/** Opcode 0xd8 11/5. */
6986FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
6987{
6988 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
6989 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
6990}
6991
6992
6993/** Opcode 0xd8 11/6. */
6994FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
6995{
6996 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
6997 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
6998}
6999
7000
7001/** Opcode 0xd8 11/7. */
7002FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7003{
7004 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7005 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7006}
7007
7008
7009/**
7010 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7011 * the result in ST0.
7012 *
7013 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7014 */
7015FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7016{
7017 IEM_MC_BEGIN(3, 3);
7018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7019 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7020 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7021 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7022 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7023 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7024
7025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7027
7028 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7029 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7030 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7031
7032 IEM_MC_PREPARE_FPU_USAGE();
7033 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7034 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7035 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7036 IEM_MC_ELSE()
7037 IEM_MC_FPU_STACK_UNDERFLOW(0);
7038 IEM_MC_ENDIF();
7039 IEM_MC_ADVANCE_RIP();
7040
7041 IEM_MC_END();
7042 return VINF_SUCCESS;
7043}
7044
7045
7046/** Opcode 0xd8 !11/0. */
7047FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7048{
7049 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7050 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7051}
7052
7053
7054/** Opcode 0xd8 !11/1. */
7055FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7056{
7057 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7058 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7059}
7060
7061
7062/** Opcode 0xd8 !11/2. */
7063FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7064{
7065 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7066
7067 IEM_MC_BEGIN(3, 3);
7068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7069 IEM_MC_LOCAL(uint16_t, u16Fsw);
7070 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7071 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7072 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7073 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7074
7075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7077
7078 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7079 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7080 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7081
7082 IEM_MC_PREPARE_FPU_USAGE();
7083 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7084 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7085 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7086 IEM_MC_ELSE()
7087 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7088 IEM_MC_ENDIF();
7089 IEM_MC_ADVANCE_RIP();
7090
7091 IEM_MC_END();
7092 return VINF_SUCCESS;
7093}
7094
7095
7096/** Opcode 0xd8 !11/3. */
7097FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7098{
7099 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7100
7101 IEM_MC_BEGIN(3, 3);
7102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7103 IEM_MC_LOCAL(uint16_t, u16Fsw);
7104 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7105 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7106 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7107 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7108
7109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7111
7112 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7113 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7114 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7115
7116 IEM_MC_PREPARE_FPU_USAGE();
7117 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7118 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7119 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7120 IEM_MC_ELSE()
7121 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7122 IEM_MC_ENDIF();
7123 IEM_MC_ADVANCE_RIP();
7124
7125 IEM_MC_END();
7126 return VINF_SUCCESS;
7127}
7128
7129
7130/** Opcode 0xd8 !11/4. */
7131FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7132{
7133 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7134 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7135}
7136
7137
7138/** Opcode 0xd8 !11/5. */
7139FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7140{
7141 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7142 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7143}
7144
7145
7146/** Opcode 0xd8 !11/6. */
7147FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7148{
7149 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7150 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7151}
7152
7153
7154/** Opcode 0xd8 !11/7. */
7155FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7156{
7157 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7158 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7159}
7160
7161
7162/**
7163 * @opcode 0xd8
7164 */
7165FNIEMOP_DEF(iemOp_EscF0)
7166{
7167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7168 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7169
7170 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7171 {
7172 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7173 {
7174 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7175 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7176 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7177 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7178 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7179 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7180 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7181 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7183 }
7184 }
7185 else
7186 {
7187 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7188 {
7189 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7190 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7191 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7192 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7193 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7194 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7195 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7196 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7198 }
7199 }
7200}
7201
7202
7203/** Opcode 0xd9 /0 mem32real
7204 * @sa iemOp_fld_m64r */
7205FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7206{
7207 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7208
7209 IEM_MC_BEGIN(2, 3);
7210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7211 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7212 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7213 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7214 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7215
7216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7218
7219 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7220 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7221 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7222
7223 IEM_MC_PREPARE_FPU_USAGE();
7224 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7225 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7226 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7227 IEM_MC_ELSE()
7228 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7229 IEM_MC_ENDIF();
7230 IEM_MC_ADVANCE_RIP();
7231
7232 IEM_MC_END();
7233 return VINF_SUCCESS;
7234}
7235
7236
7237/** Opcode 0xd9 !11/2 mem32real */
7238FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7239{
7240 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7241 IEM_MC_BEGIN(3, 2);
7242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7243 IEM_MC_LOCAL(uint16_t, u16Fsw);
7244 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7245 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7246 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7247
7248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7250 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7251 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7252
7253 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7254 IEM_MC_PREPARE_FPU_USAGE();
7255 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7256 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7257 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7258 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7259 IEM_MC_ELSE()
7260 IEM_MC_IF_FCW_IM()
7261 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7262 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7263 IEM_MC_ENDIF();
7264 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7265 IEM_MC_ENDIF();
7266 IEM_MC_ADVANCE_RIP();
7267
7268 IEM_MC_END();
7269 return VINF_SUCCESS;
7270}
7271
7272
7273/** Opcode 0xd9 !11/3 */
7274FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7275{
7276 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7277 IEM_MC_BEGIN(3, 2);
7278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7279 IEM_MC_LOCAL(uint16_t, u16Fsw);
7280 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7281 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7282 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7283
7284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7286 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7287 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7288
7289 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7290 IEM_MC_PREPARE_FPU_USAGE();
7291 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7292 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7293 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7294 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7295 IEM_MC_ELSE()
7296 IEM_MC_IF_FCW_IM()
7297 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7298 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7299 IEM_MC_ENDIF();
7300 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7301 IEM_MC_ENDIF();
7302 IEM_MC_ADVANCE_RIP();
7303
7304 IEM_MC_END();
7305 return VINF_SUCCESS;
7306}
7307
7308
7309/** Opcode 0xd9 !11/4 */
7310FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7311{
7312 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7313 IEM_MC_BEGIN(3, 0);
7314 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7315 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7316 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7319 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7320 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7321 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7322 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7323 IEM_MC_END();
7324 return VINF_SUCCESS;
7325}
7326
7327
7328/** Opcode 0xd9 !11/5 */
7329FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7330{
7331 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7332 IEM_MC_BEGIN(1, 1);
7333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7334 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7337 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7338 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7339 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7340 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7341 IEM_MC_END();
7342 return VINF_SUCCESS;
7343}
7344
7345
7346/** Opcode 0xd9 !11/6 */
7347FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7348{
7349 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7350 IEM_MC_BEGIN(3, 0);
7351 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7352 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7353 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7356 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7357 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7358 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7359 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7360 IEM_MC_END();
7361 return VINF_SUCCESS;
7362}
7363
7364
7365/** Opcode 0xd9 !11/7 */
7366FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7367{
7368 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7369 IEM_MC_BEGIN(2, 0);
7370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7371 IEM_MC_LOCAL(uint16_t, u16Fcw);
7372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7374 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7375 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7376 IEM_MC_FETCH_FCW(u16Fcw);
7377 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7378 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7379 IEM_MC_END();
7380 return VINF_SUCCESS;
7381}
7382
7383
7384/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7385FNIEMOP_DEF(iemOp_fnop)
7386{
7387 IEMOP_MNEMONIC(fnop, "fnop");
7388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7389
7390 IEM_MC_BEGIN(0, 0);
7391 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7392 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7393 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7394 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7395 * intel optimizations. Investigate. */
7396 IEM_MC_UPDATE_FPU_OPCODE_IP();
7397 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7398 IEM_MC_END();
7399 return VINF_SUCCESS;
7400}
7401
7402
7403/** Opcode 0xd9 11/0 stN */
7404FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7405{
7406 IEMOP_MNEMONIC(fld_stN, "fld stN");
7407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7408
7409 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7410 * indicates that it does. */
7411 IEM_MC_BEGIN(0, 2);
7412 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7413 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7414 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7415 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7416
7417 IEM_MC_PREPARE_FPU_USAGE();
7418 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7419 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7420 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7421 IEM_MC_ELSE()
7422 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7423 IEM_MC_ENDIF();
7424
7425 IEM_MC_ADVANCE_RIP();
7426 IEM_MC_END();
7427
7428 return VINF_SUCCESS;
7429}
7430
7431
7432/** Opcode 0xd9 11/3 stN */
7433FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7434{
7435 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7437
7438 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7439 * indicates that it does. */
7440 IEM_MC_BEGIN(1, 3);
7441 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7442 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7443 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7444 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7446 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7447
7448 IEM_MC_PREPARE_FPU_USAGE();
7449 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7450 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7451 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7452 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7453 IEM_MC_ELSE()
7454 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7455 IEM_MC_ENDIF();
7456
7457 IEM_MC_ADVANCE_RIP();
7458 IEM_MC_END();
7459
7460 return VINF_SUCCESS;
7461}
7462
7463
7464/** Opcode 0xd9 11/4, 0xdd 11/2. */
7465FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7466{
7467 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7469
7470 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7471 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7472 if (!iDstReg)
7473 {
7474 IEM_MC_BEGIN(0, 1);
7475 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7476 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7477 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7478
7479 IEM_MC_PREPARE_FPU_USAGE();
7480 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7481 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7482 IEM_MC_ELSE()
7483 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7484 IEM_MC_ENDIF();
7485
7486 IEM_MC_ADVANCE_RIP();
7487 IEM_MC_END();
7488 }
7489 else
7490 {
7491 IEM_MC_BEGIN(0, 2);
7492 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7493 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7494 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7495 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7496
7497 IEM_MC_PREPARE_FPU_USAGE();
7498 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7499 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7500 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7501 IEM_MC_ELSE()
7502 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7503 IEM_MC_ENDIF();
7504
7505 IEM_MC_ADVANCE_RIP();
7506 IEM_MC_END();
7507 }
7508 return VINF_SUCCESS;
7509}
7510
7511
7512/**
7513 * Common worker for FPU instructions working on ST0 and replaces it with the
7514 * result, i.e. unary operators.
7515 *
7516 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7517 */
7518FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7519{
7520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7521
7522 IEM_MC_BEGIN(2, 1);
7523 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7524 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7525 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7526
7527 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7528 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7529 IEM_MC_PREPARE_FPU_USAGE();
7530 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7531 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7532 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7533 IEM_MC_ELSE()
7534 IEM_MC_FPU_STACK_UNDERFLOW(0);
7535 IEM_MC_ENDIF();
7536 IEM_MC_ADVANCE_RIP();
7537
7538 IEM_MC_END();
7539 return VINF_SUCCESS;
7540}
7541
7542
7543/** Opcode 0xd9 0xe0. */
7544FNIEMOP_DEF(iemOp_fchs)
7545{
7546 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7547 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7548}
7549
7550
7551/** Opcode 0xd9 0xe1. */
7552FNIEMOP_DEF(iemOp_fabs)
7553{
7554 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7555 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7556}
7557
7558
7559/**
7560 * Common worker for FPU instructions working on ST0 and only returns FSW.
7561 *
7562 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7563 */
7564FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7565{
7566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7567
7568 IEM_MC_BEGIN(2, 1);
7569 IEM_MC_LOCAL(uint16_t, u16Fsw);
7570 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7571 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7572
7573 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7574 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7575 IEM_MC_PREPARE_FPU_USAGE();
7576 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7577 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7578 IEM_MC_UPDATE_FSW(u16Fsw);
7579 IEM_MC_ELSE()
7580 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7581 IEM_MC_ENDIF();
7582 IEM_MC_ADVANCE_RIP();
7583
7584 IEM_MC_END();
7585 return VINF_SUCCESS;
7586}
7587
7588
7589/** Opcode 0xd9 0xe4. */
7590FNIEMOP_DEF(iemOp_ftst)
7591{
7592 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7593 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7594}
7595
7596
7597/** Opcode 0xd9 0xe5. */
7598FNIEMOP_DEF(iemOp_fxam)
7599{
7600 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7601 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7602}
7603
7604
7605/**
7606 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7607 *
7608 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7609 */
7610FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7611{
7612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7613
7614 IEM_MC_BEGIN(1, 1);
7615 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7616 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7617
7618 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7619 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7620 IEM_MC_PREPARE_FPU_USAGE();
7621 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7622 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7623 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7624 IEM_MC_ELSE()
7625 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7626 IEM_MC_ENDIF();
7627 IEM_MC_ADVANCE_RIP();
7628
7629 IEM_MC_END();
7630 return VINF_SUCCESS;
7631}
7632
7633
7634/** Opcode 0xd9 0xe8. */
7635FNIEMOP_DEF(iemOp_fld1)
7636{
7637 IEMOP_MNEMONIC(fld1, "fld1");
7638 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7639}
7640
7641
7642/** Opcode 0xd9 0xe9. */
7643FNIEMOP_DEF(iemOp_fldl2t)
7644{
7645 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7646 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7647}
7648
7649
7650/** Opcode 0xd9 0xea. */
7651FNIEMOP_DEF(iemOp_fldl2e)
7652{
7653 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7654 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7655}
7656
7657/** Opcode 0xd9 0xeb. */
7658FNIEMOP_DEF(iemOp_fldpi)
7659{
7660 IEMOP_MNEMONIC(fldpi, "fldpi");
7661 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7662}
7663
7664
7665/** Opcode 0xd9 0xec. */
7666FNIEMOP_DEF(iemOp_fldlg2)
7667{
7668 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7669 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7670}
7671
7672/** Opcode 0xd9 0xed. */
7673FNIEMOP_DEF(iemOp_fldln2)
7674{
7675 IEMOP_MNEMONIC(fldln2, "fldln2");
7676 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7677}
7678
7679
7680/** Opcode 0xd9 0xee. */
7681FNIEMOP_DEF(iemOp_fldz)
7682{
7683 IEMOP_MNEMONIC(fldz, "fldz");
7684 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7685}
7686
7687
7688/** Opcode 0xd9 0xf0. */
7689FNIEMOP_DEF(iemOp_f2xm1)
7690{
7691 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7692 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7693}
7694
7695
7696/**
7697 * Common worker for FPU instructions working on STn and ST0, storing the result
7698 * in STn, and popping the stack unless IE, DE or ZE was raised.
7699 *
7700 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7701 */
7702FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7703{
7704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7705
7706 IEM_MC_BEGIN(3, 1);
7707 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7708 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7709 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7710 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7711
7712 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7714
7715 IEM_MC_PREPARE_FPU_USAGE();
7716 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7717 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7718 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7719 IEM_MC_ELSE()
7720 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7721 IEM_MC_ENDIF();
7722 IEM_MC_ADVANCE_RIP();
7723
7724 IEM_MC_END();
7725 return VINF_SUCCESS;
7726}
7727
7728
7729/** Opcode 0xd9 0xf1. */
7730FNIEMOP_DEF(iemOp_fyl2x)
7731{
7732 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7733 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7734}
7735
7736
7737/**
7738 * Common worker for FPU instructions working on ST0 and having two outputs, one
7739 * replacing ST0 and one pushed onto the stack.
7740 *
7741 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7742 */
7743FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7744{
7745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7746
7747 IEM_MC_BEGIN(2, 1);
7748 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7749 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7750 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7751
7752 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7753 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7754 IEM_MC_PREPARE_FPU_USAGE();
7755 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7756 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7757 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7758 IEM_MC_ELSE()
7759 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7760 IEM_MC_ENDIF();
7761 IEM_MC_ADVANCE_RIP();
7762
7763 IEM_MC_END();
7764 return VINF_SUCCESS;
7765}
7766
7767
7768/** Opcode 0xd9 0xf2. */
7769FNIEMOP_DEF(iemOp_fptan)
7770{
7771 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7772 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7773}
7774
7775
7776/** Opcode 0xd9 0xf3. */
7777FNIEMOP_DEF(iemOp_fpatan)
7778{
7779 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7780 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7781}
7782
7783
7784/** Opcode 0xd9 0xf4. */
7785FNIEMOP_DEF(iemOp_fxtract)
7786{
7787 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7788 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7789}
7790
7791
7792/** Opcode 0xd9 0xf5. */
7793FNIEMOP_DEF(iemOp_fprem1)
7794{
7795 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7796 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7797}
7798
7799
7800/** Opcode 0xd9 0xf6. */
7801FNIEMOP_DEF(iemOp_fdecstp)
7802{
7803 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7805 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7806 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7807 * FINCSTP and FDECSTP. */
7808
7809 IEM_MC_BEGIN(0,0);
7810
7811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7812 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7813
7814 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7815 IEM_MC_FPU_STACK_DEC_TOP();
7816 IEM_MC_UPDATE_FSW_CONST(0);
7817
7818 IEM_MC_ADVANCE_RIP();
7819 IEM_MC_END();
7820 return VINF_SUCCESS;
7821}
7822
7823
7824/** Opcode 0xd9 0xf7. */
7825FNIEMOP_DEF(iemOp_fincstp)
7826{
7827 IEMOP_MNEMONIC(fincstp, "fincstp");
7828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7829 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7830 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7831 * FINCSTP and FDECSTP. */
7832
7833 IEM_MC_BEGIN(0,0);
7834
7835 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7836 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7837
7838 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7839 IEM_MC_FPU_STACK_INC_TOP();
7840 IEM_MC_UPDATE_FSW_CONST(0);
7841
7842 IEM_MC_ADVANCE_RIP();
7843 IEM_MC_END();
7844 return VINF_SUCCESS;
7845}
7846
7847
7848/** Opcode 0xd9 0xf8. */
7849FNIEMOP_DEF(iemOp_fprem)
7850{
7851 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
7852 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
7853}
7854
7855
7856/** Opcode 0xd9 0xf9. */
7857FNIEMOP_DEF(iemOp_fyl2xp1)
7858{
7859 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
7860 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
7861}
7862
7863
7864/** Opcode 0xd9 0xfa. */
7865FNIEMOP_DEF(iemOp_fsqrt)
7866{
7867 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
7868 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
7869}
7870
7871
7872/** Opcode 0xd9 0xfb. */
7873FNIEMOP_DEF(iemOp_fsincos)
7874{
7875 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
7876 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
7877}
7878
7879
7880/** Opcode 0xd9 0xfc. */
7881FNIEMOP_DEF(iemOp_frndint)
7882{
7883 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
7884 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
7885}
7886
7887
7888/** Opcode 0xd9 0xfd. */
7889FNIEMOP_DEF(iemOp_fscale)
7890{
7891 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
7892 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
7893}
7894
7895
7896/** Opcode 0xd9 0xfe. */
7897FNIEMOP_DEF(iemOp_fsin)
7898{
7899 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
7900 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
7901}
7902
7903
7904/** Opcode 0xd9 0xff. */
7905FNIEMOP_DEF(iemOp_fcos)
7906{
7907 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
7908 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
7909}
7910
7911
7912/** Used by iemOp_EscF1. */
7913IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
7914{
7915 /* 0xe0 */ iemOp_fchs,
7916 /* 0xe1 */ iemOp_fabs,
7917 /* 0xe2 */ iemOp_Invalid,
7918 /* 0xe3 */ iemOp_Invalid,
7919 /* 0xe4 */ iemOp_ftst,
7920 /* 0xe5 */ iemOp_fxam,
7921 /* 0xe6 */ iemOp_Invalid,
7922 /* 0xe7 */ iemOp_Invalid,
7923 /* 0xe8 */ iemOp_fld1,
7924 /* 0xe9 */ iemOp_fldl2t,
7925 /* 0xea */ iemOp_fldl2e,
7926 /* 0xeb */ iemOp_fldpi,
7927 /* 0xec */ iemOp_fldlg2,
7928 /* 0xed */ iemOp_fldln2,
7929 /* 0xee */ iemOp_fldz,
7930 /* 0xef */ iemOp_Invalid,
7931 /* 0xf0 */ iemOp_f2xm1,
7932 /* 0xf1 */ iemOp_fyl2x,
7933 /* 0xf2 */ iemOp_fptan,
7934 /* 0xf3 */ iemOp_fpatan,
7935 /* 0xf4 */ iemOp_fxtract,
7936 /* 0xf5 */ iemOp_fprem1,
7937 /* 0xf6 */ iemOp_fdecstp,
7938 /* 0xf7 */ iemOp_fincstp,
7939 /* 0xf8 */ iemOp_fprem,
7940 /* 0xf9 */ iemOp_fyl2xp1,
7941 /* 0xfa */ iemOp_fsqrt,
7942 /* 0xfb */ iemOp_fsincos,
7943 /* 0xfc */ iemOp_frndint,
7944 /* 0xfd */ iemOp_fscale,
7945 /* 0xfe */ iemOp_fsin,
7946 /* 0xff */ iemOp_fcos
7947};
7948
7949
7950/**
7951 * @opcode 0xd9
7952 */
7953FNIEMOP_DEF(iemOp_EscF1)
7954{
7955 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7956 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
7957
7958 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7959 {
7960 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7961 {
7962 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
7963 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
7964 case 2:
7965 if (bRm == 0xd0)
7966 return FNIEMOP_CALL(iemOp_fnop);
7967 return IEMOP_RAISE_INVALID_OPCODE();
7968 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
7969 case 4:
7970 case 5:
7971 case 6:
7972 case 7:
7973 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
7974 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
7975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7976 }
7977 }
7978 else
7979 {
7980 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7981 {
7982 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
7983 case 1: return IEMOP_RAISE_INVALID_OPCODE();
7984 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
7985 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
7986 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
7987 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
7988 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
7989 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
7990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7991 }
7992 }
7993}
7994
7995
7996/** Opcode 0xda 11/0. */
7997FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
7998{
7999 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8001
8002 IEM_MC_BEGIN(0, 1);
8003 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8004
8005 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8006 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8007
8008 IEM_MC_PREPARE_FPU_USAGE();
8009 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8010 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8011 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8012 IEM_MC_ENDIF();
8013 IEM_MC_UPDATE_FPU_OPCODE_IP();
8014 IEM_MC_ELSE()
8015 IEM_MC_FPU_STACK_UNDERFLOW(0);
8016 IEM_MC_ENDIF();
8017 IEM_MC_ADVANCE_RIP();
8018
8019 IEM_MC_END();
8020 return VINF_SUCCESS;
8021}
8022
8023
8024/** Opcode 0xda 11/1. */
8025FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8026{
8027 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8029
8030 IEM_MC_BEGIN(0, 1);
8031 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8032
8033 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8034 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8035
8036 IEM_MC_PREPARE_FPU_USAGE();
8037 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8038 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8039 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8040 IEM_MC_ENDIF();
8041 IEM_MC_UPDATE_FPU_OPCODE_IP();
8042 IEM_MC_ELSE()
8043 IEM_MC_FPU_STACK_UNDERFLOW(0);
8044 IEM_MC_ENDIF();
8045 IEM_MC_ADVANCE_RIP();
8046
8047 IEM_MC_END();
8048 return VINF_SUCCESS;
8049}
8050
8051
8052/** Opcode 0xda 11/2. */
8053FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8054{
8055 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8057
8058 IEM_MC_BEGIN(0, 1);
8059 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8060
8061 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8062 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8063
8064 IEM_MC_PREPARE_FPU_USAGE();
8065 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8066 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8067 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8068 IEM_MC_ENDIF();
8069 IEM_MC_UPDATE_FPU_OPCODE_IP();
8070 IEM_MC_ELSE()
8071 IEM_MC_FPU_STACK_UNDERFLOW(0);
8072 IEM_MC_ENDIF();
8073 IEM_MC_ADVANCE_RIP();
8074
8075 IEM_MC_END();
8076 return VINF_SUCCESS;
8077}
8078
8079
8080/** Opcode 0xda 11/3. */
8081FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8082{
8083 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8085
8086 IEM_MC_BEGIN(0, 1);
8087 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8088
8089 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8090 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8091
8092 IEM_MC_PREPARE_FPU_USAGE();
8093 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8094 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8095 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8096 IEM_MC_ENDIF();
8097 IEM_MC_UPDATE_FPU_OPCODE_IP();
8098 IEM_MC_ELSE()
8099 IEM_MC_FPU_STACK_UNDERFLOW(0);
8100 IEM_MC_ENDIF();
8101 IEM_MC_ADVANCE_RIP();
8102
8103 IEM_MC_END();
8104 return VINF_SUCCESS;
8105}
8106
8107
8108/**
8109 * Common worker for FPU instructions working on ST0 and STn, only affecting
8110 * flags, and popping twice when done.
8111 *
8112 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8113 */
8114FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8115{
8116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8117
8118 IEM_MC_BEGIN(3, 1);
8119 IEM_MC_LOCAL(uint16_t, u16Fsw);
8120 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8121 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8122 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8123
8124 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8125 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8126
8127 IEM_MC_PREPARE_FPU_USAGE();
8128 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8129 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8130 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8131 IEM_MC_ELSE()
8132 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8133 IEM_MC_ENDIF();
8134 IEM_MC_ADVANCE_RIP();
8135
8136 IEM_MC_END();
8137 return VINF_SUCCESS;
8138}
8139
8140
8141/** Opcode 0xda 0xe9. */
8142FNIEMOP_DEF(iemOp_fucompp)
8143{
8144 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8145 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8146}
8147
8148
8149/**
8150 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8151 * the result in ST0.
8152 *
8153 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8154 */
8155FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8156{
8157 IEM_MC_BEGIN(3, 3);
8158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8159 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8160 IEM_MC_LOCAL(int32_t, i32Val2);
8161 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8162 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8163 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8164
8165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8167
8168 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8169 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8170 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8171
8172 IEM_MC_PREPARE_FPU_USAGE();
8173 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8174 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8175 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8176 IEM_MC_ELSE()
8177 IEM_MC_FPU_STACK_UNDERFLOW(0);
8178 IEM_MC_ENDIF();
8179 IEM_MC_ADVANCE_RIP();
8180
8181 IEM_MC_END();
8182 return VINF_SUCCESS;
8183}
8184
8185
8186/** Opcode 0xda !11/0. */
8187FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8188{
8189 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8190 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8191}
8192
8193
8194/** Opcode 0xda !11/1. */
8195FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8196{
8197 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8198 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8199}
8200
8201
8202/** Opcode 0xda !11/2. */
8203FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8204{
8205 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8206
8207 IEM_MC_BEGIN(3, 3);
8208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8209 IEM_MC_LOCAL(uint16_t, u16Fsw);
8210 IEM_MC_LOCAL(int32_t, i32Val2);
8211 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8212 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8213 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8214
8215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8217
8218 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8219 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8220 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8221
8222 IEM_MC_PREPARE_FPU_USAGE();
8223 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8224 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8225 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8226 IEM_MC_ELSE()
8227 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8228 IEM_MC_ENDIF();
8229 IEM_MC_ADVANCE_RIP();
8230
8231 IEM_MC_END();
8232 return VINF_SUCCESS;
8233}
8234
8235
8236/** Opcode 0xda !11/3. */
8237FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8238{
8239 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8240
8241 IEM_MC_BEGIN(3, 3);
8242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8243 IEM_MC_LOCAL(uint16_t, u16Fsw);
8244 IEM_MC_LOCAL(int32_t, i32Val2);
8245 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8246 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8247 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8248
8249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8251
8252 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8253 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8254 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8255
8256 IEM_MC_PREPARE_FPU_USAGE();
8257 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8258 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8259 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8260 IEM_MC_ELSE()
8261 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8262 IEM_MC_ENDIF();
8263 IEM_MC_ADVANCE_RIP();
8264
8265 IEM_MC_END();
8266 return VINF_SUCCESS;
8267}
8268
8269
8270/** Opcode 0xda !11/4. */
8271FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8272{
8273 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8274 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8275}
8276
8277
8278/** Opcode 0xda !11/5. */
8279FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8280{
8281 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8282 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8283}
8284
8285
8286/** Opcode 0xda !11/6. */
8287FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8288{
8289 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8290 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8291}
8292
8293
8294/** Opcode 0xda !11/7. */
8295FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8296{
8297 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8298 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8299}
8300
8301
8302/**
8303 * @opcode 0xda
8304 */
8305FNIEMOP_DEF(iemOp_EscF2)
8306{
8307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8308 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8309 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8310 {
8311 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8312 {
8313 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8314 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8315 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8316 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8317 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8318 case 5:
8319 if (bRm == 0xe9)
8320 return FNIEMOP_CALL(iemOp_fucompp);
8321 return IEMOP_RAISE_INVALID_OPCODE();
8322 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8323 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8325 }
8326 }
8327 else
8328 {
8329 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8330 {
8331 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8332 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8333 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8334 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8335 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8336 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8337 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8338 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8340 }
8341 }
8342}
8343
8344
8345/** Opcode 0xdb !11/0. */
8346FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8347{
8348 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8349
8350 IEM_MC_BEGIN(2, 3);
8351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8352 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8353 IEM_MC_LOCAL(int32_t, i32Val);
8354 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8355 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8356
8357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8359
8360 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8361 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8362 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8363
8364 IEM_MC_PREPARE_FPU_USAGE();
8365 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8366 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8367 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8368 IEM_MC_ELSE()
8369 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8370 IEM_MC_ENDIF();
8371 IEM_MC_ADVANCE_RIP();
8372
8373 IEM_MC_END();
8374 return VINF_SUCCESS;
8375}
8376
8377
8378/** Opcode 0xdb !11/1. */
8379FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8380{
8381 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8382 IEM_MC_BEGIN(3, 2);
8383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8384 IEM_MC_LOCAL(uint16_t, u16Fsw);
8385 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8386 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8387 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8388
8389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8391 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8392 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8393
8394 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8395 IEM_MC_PREPARE_FPU_USAGE();
8396 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8397 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8398 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8399 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8400 IEM_MC_ELSE()
8401 IEM_MC_IF_FCW_IM()
8402 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8403 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8404 IEM_MC_ENDIF();
8405 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8406 IEM_MC_ENDIF();
8407 IEM_MC_ADVANCE_RIP();
8408
8409 IEM_MC_END();
8410 return VINF_SUCCESS;
8411}
8412
8413
8414/** Opcode 0xdb !11/2. */
8415FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8416{
8417 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8418 IEM_MC_BEGIN(3, 2);
8419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8420 IEM_MC_LOCAL(uint16_t, u16Fsw);
8421 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8422 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8423 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8424
8425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8427 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8428 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8429
8430 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8431 IEM_MC_PREPARE_FPU_USAGE();
8432 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8433 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8434 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8435 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8436 IEM_MC_ELSE()
8437 IEM_MC_IF_FCW_IM()
8438 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8439 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8440 IEM_MC_ENDIF();
8441 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8442 IEM_MC_ENDIF();
8443 IEM_MC_ADVANCE_RIP();
8444
8445 IEM_MC_END();
8446 return VINF_SUCCESS;
8447}
8448
8449
8450/** Opcode 0xdb !11/3. */
8451FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8452{
8453 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8454 IEM_MC_BEGIN(3, 2);
8455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8456 IEM_MC_LOCAL(uint16_t, u16Fsw);
8457 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8458 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8459 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8460
8461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8463 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8464 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8465
8466 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8467 IEM_MC_PREPARE_FPU_USAGE();
8468 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8469 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8470 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8471 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8472 IEM_MC_ELSE()
8473 IEM_MC_IF_FCW_IM()
8474 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8475 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8476 IEM_MC_ENDIF();
8477 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8478 IEM_MC_ENDIF();
8479 IEM_MC_ADVANCE_RIP();
8480
8481 IEM_MC_END();
8482 return VINF_SUCCESS;
8483}
8484
8485
8486/** Opcode 0xdb !11/5. */
8487FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8488{
8489 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8490
8491 IEM_MC_BEGIN(2, 3);
8492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8493 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8494 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8495 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8496 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8497
8498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8500
8501 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8502 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8503 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8504
8505 IEM_MC_PREPARE_FPU_USAGE();
8506 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8507 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8508 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8509 IEM_MC_ELSE()
8510 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8511 IEM_MC_ENDIF();
8512 IEM_MC_ADVANCE_RIP();
8513
8514 IEM_MC_END();
8515 return VINF_SUCCESS;
8516}
8517
8518
8519/** Opcode 0xdb !11/7. */
8520FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8521{
8522 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8523 IEM_MC_BEGIN(3, 2);
8524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8525 IEM_MC_LOCAL(uint16_t, u16Fsw);
8526 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8527 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8528 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8529
8530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8532 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8533 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8534
8535 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8536 IEM_MC_PREPARE_FPU_USAGE();
8537 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8538 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8539 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8540 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8541 IEM_MC_ELSE()
8542 IEM_MC_IF_FCW_IM()
8543 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8544 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8545 IEM_MC_ENDIF();
8546 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8547 IEM_MC_ENDIF();
8548 IEM_MC_ADVANCE_RIP();
8549
8550 IEM_MC_END();
8551 return VINF_SUCCESS;
8552}
8553
8554
8555/** Opcode 0xdb 11/0. */
8556FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8557{
8558 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8560
8561 IEM_MC_BEGIN(0, 1);
8562 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8563
8564 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8565 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8566
8567 IEM_MC_PREPARE_FPU_USAGE();
8568 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8569 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8570 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8571 IEM_MC_ENDIF();
8572 IEM_MC_UPDATE_FPU_OPCODE_IP();
8573 IEM_MC_ELSE()
8574 IEM_MC_FPU_STACK_UNDERFLOW(0);
8575 IEM_MC_ENDIF();
8576 IEM_MC_ADVANCE_RIP();
8577
8578 IEM_MC_END();
8579 return VINF_SUCCESS;
8580}
8581
8582
8583/** Opcode 0xdb 11/1. */
8584FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8585{
8586 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8588
8589 IEM_MC_BEGIN(0, 1);
8590 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8591
8592 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8593 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8594
8595 IEM_MC_PREPARE_FPU_USAGE();
8596 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8597 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8598 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8599 IEM_MC_ENDIF();
8600 IEM_MC_UPDATE_FPU_OPCODE_IP();
8601 IEM_MC_ELSE()
8602 IEM_MC_FPU_STACK_UNDERFLOW(0);
8603 IEM_MC_ENDIF();
8604 IEM_MC_ADVANCE_RIP();
8605
8606 IEM_MC_END();
8607 return VINF_SUCCESS;
8608}
8609
8610
8611/** Opcode 0xdb 11/2. */
8612FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8613{
8614 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8616
8617 IEM_MC_BEGIN(0, 1);
8618 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8619
8620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8622
8623 IEM_MC_PREPARE_FPU_USAGE();
8624 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8625 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8626 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8627 IEM_MC_ENDIF();
8628 IEM_MC_UPDATE_FPU_OPCODE_IP();
8629 IEM_MC_ELSE()
8630 IEM_MC_FPU_STACK_UNDERFLOW(0);
8631 IEM_MC_ENDIF();
8632 IEM_MC_ADVANCE_RIP();
8633
8634 IEM_MC_END();
8635 return VINF_SUCCESS;
8636}
8637
8638
8639/** Opcode 0xdb 11/3. */
8640FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8641{
8642 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8644
8645 IEM_MC_BEGIN(0, 1);
8646 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8647
8648 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8649 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8650
8651 IEM_MC_PREPARE_FPU_USAGE();
8652 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8653 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8654 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8655 IEM_MC_ENDIF();
8656 IEM_MC_UPDATE_FPU_OPCODE_IP();
8657 IEM_MC_ELSE()
8658 IEM_MC_FPU_STACK_UNDERFLOW(0);
8659 IEM_MC_ENDIF();
8660 IEM_MC_ADVANCE_RIP();
8661
8662 IEM_MC_END();
8663 return VINF_SUCCESS;
8664}
8665
8666
8667/** Opcode 0xdb 0xe0. */
8668FNIEMOP_DEF(iemOp_fneni)
8669{
8670 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8672 IEM_MC_BEGIN(0,0);
8673 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8674 IEM_MC_ADVANCE_RIP();
8675 IEM_MC_END();
8676 return VINF_SUCCESS;
8677}
8678
8679
8680/** Opcode 0xdb 0xe1. */
8681FNIEMOP_DEF(iemOp_fndisi)
8682{
8683 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8685 IEM_MC_BEGIN(0,0);
8686 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8687 IEM_MC_ADVANCE_RIP();
8688 IEM_MC_END();
8689 return VINF_SUCCESS;
8690}
8691
8692
8693/** Opcode 0xdb 0xe2. */
8694FNIEMOP_DEF(iemOp_fnclex)
8695{
8696 IEMOP_MNEMONIC(fnclex, "fnclex");
8697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8698
8699 IEM_MC_BEGIN(0,0);
8700 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8701 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8702 IEM_MC_CLEAR_FSW_EX();
8703 IEM_MC_ADVANCE_RIP();
8704 IEM_MC_END();
8705 return VINF_SUCCESS;
8706}
8707
8708
8709/** Opcode 0xdb 0xe3. */
8710FNIEMOP_DEF(iemOp_fninit)
8711{
8712 IEMOP_MNEMONIC(fninit, "fninit");
8713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8714 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8715}
8716
8717
8718/** Opcode 0xdb 0xe4. */
8719FNIEMOP_DEF(iemOp_fnsetpm)
8720{
8721 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8723 IEM_MC_BEGIN(0,0);
8724 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8725 IEM_MC_ADVANCE_RIP();
8726 IEM_MC_END();
8727 return VINF_SUCCESS;
8728}
8729
8730
8731/** Opcode 0xdb 0xe5. */
8732FNIEMOP_DEF(iemOp_frstpm)
8733{
8734 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8735#if 0 /* #UDs on newer CPUs */
8736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8737 IEM_MC_BEGIN(0,0);
8738 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8739 IEM_MC_ADVANCE_RIP();
8740 IEM_MC_END();
8741 return VINF_SUCCESS;
8742#else
8743 return IEMOP_RAISE_INVALID_OPCODE();
8744#endif
8745}
8746
8747
8748/** Opcode 0xdb 11/5. */
8749FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8750{
8751 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8752 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8753}
8754
8755
8756/** Opcode 0xdb 11/6. */
8757FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8758{
8759 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8760 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8761}
8762
8763
8764/**
8765 * @opcode 0xdb
8766 */
8767FNIEMOP_DEF(iemOp_EscF3)
8768{
8769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8770 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8772 {
8773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8774 {
8775 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8776 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8777 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8778 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8779 case 4:
8780 switch (bRm)
8781 {
8782 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8783 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8784 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8785 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8786 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8787 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8788 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8789 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8791 }
8792 break;
8793 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8794 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8795 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8797 }
8798 }
8799 else
8800 {
8801 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8802 {
8803 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8804 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8805 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8806 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8807 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8808 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8809 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8810 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8811 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8812 }
8813 }
8814}
8815
8816
8817/**
8818 * Common worker for FPU instructions working on STn and ST0, and storing the
8819 * result in STn unless IE, DE or ZE was raised.
8820 *
8821 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8822 */
8823FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8824{
8825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8826
8827 IEM_MC_BEGIN(3, 1);
8828 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8829 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8830 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8831 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8832
8833 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8834 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8835
8836 IEM_MC_PREPARE_FPU_USAGE();
8837 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
8838 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8839 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8840 IEM_MC_ELSE()
8841 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8842 IEM_MC_ENDIF();
8843 IEM_MC_ADVANCE_RIP();
8844
8845 IEM_MC_END();
8846 return VINF_SUCCESS;
8847}
8848
8849
8850/** Opcode 0xdc 11/0. */
8851FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
8852{
8853 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
8854 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
8855}
8856
8857
8858/** Opcode 0xdc 11/1. */
8859FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
8860{
8861 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
8862 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
8863}
8864
8865
8866/** Opcode 0xdc 11/4. */
8867FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
8868{
8869 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
8870 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
8871}
8872
8873
8874/** Opcode 0xdc 11/5. */
8875FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
8876{
8877 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
8878 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
8879}
8880
8881
8882/** Opcode 0xdc 11/6. */
8883FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
8884{
8885 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
8886 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
8887}
8888
8889
8890/** Opcode 0xdc 11/7. */
8891FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
8892{
8893 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
8894 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
8895}
8896
8897
8898/**
8899 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
8900 * memory operand, and storing the result in ST0.
8901 *
8902 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8903 */
8904FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
8905{
8906 IEM_MC_BEGIN(3, 3);
8907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8908 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8909 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
8910 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8911 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
8912 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
8913
8914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8916 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8917 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8918
8919 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8920 IEM_MC_PREPARE_FPU_USAGE();
8921 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
8922 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
8923 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8924 IEM_MC_ELSE()
8925 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8926 IEM_MC_ENDIF();
8927 IEM_MC_ADVANCE_RIP();
8928
8929 IEM_MC_END();
8930 return VINF_SUCCESS;
8931}
8932
8933
8934/** Opcode 0xdc !11/0. */
8935FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
8936{
8937 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
8938 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
8939}
8940
8941
8942/** Opcode 0xdc !11/1. */
8943FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
8944{
8945 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
8946 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
8947}
8948
8949
8950/** Opcode 0xdc !11/2. */
8951FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
8952{
8953 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
8954
8955 IEM_MC_BEGIN(3, 3);
8956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8957 IEM_MC_LOCAL(uint16_t, u16Fsw);
8958 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8959 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8960 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8961 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8962
8963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8965
8966 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8967 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8968 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8969
8970 IEM_MC_PREPARE_FPU_USAGE();
8971 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8972 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8973 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8974 IEM_MC_ELSE()
8975 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8976 IEM_MC_ENDIF();
8977 IEM_MC_ADVANCE_RIP();
8978
8979 IEM_MC_END();
8980 return VINF_SUCCESS;
8981}
8982
8983
8984/** Opcode 0xdc !11/3. */
8985FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
8986{
8987 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
8988
8989 IEM_MC_BEGIN(3, 3);
8990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8991 IEM_MC_LOCAL(uint16_t, u16Fsw);
8992 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8993 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8994 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8995 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8996
8997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8999
9000 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9001 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9002 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9003
9004 IEM_MC_PREPARE_FPU_USAGE();
9005 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9006 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9007 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9008 IEM_MC_ELSE()
9009 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9010 IEM_MC_ENDIF();
9011 IEM_MC_ADVANCE_RIP();
9012
9013 IEM_MC_END();
9014 return VINF_SUCCESS;
9015}
9016
9017
9018/** Opcode 0xdc !11/4. */
9019FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9020{
9021 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9022 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9023}
9024
9025
9026/** Opcode 0xdc !11/5. */
9027FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9028{
9029 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9030 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9031}
9032
9033
9034/** Opcode 0xdc !11/6. */
9035FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9036{
9037 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9038 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9039}
9040
9041
9042/** Opcode 0xdc !11/7. */
9043FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9044{
9045 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9046 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9047}
9048
9049
9050/**
9051 * @opcode 0xdc
9052 */
9053FNIEMOP_DEF(iemOp_EscF4)
9054{
9055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9056 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9058 {
9059 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9060 {
9061 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9062 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9063 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9064 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9065 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9066 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9067 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9068 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9070 }
9071 }
9072 else
9073 {
9074 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9075 {
9076 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9077 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9078 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9079 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9080 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9081 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9082 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9083 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9085 }
9086 }
9087}
9088
9089
9090/** Opcode 0xdd !11/0.
9091 * @sa iemOp_fld_m32r */
9092FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9093{
9094 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9095
9096 IEM_MC_BEGIN(2, 3);
9097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9098 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9099 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9100 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9101 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9102
9103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9105 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9106 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9107
9108 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9109 IEM_MC_PREPARE_FPU_USAGE();
9110 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9111 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9112 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9113 IEM_MC_ELSE()
9114 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9115 IEM_MC_ENDIF();
9116 IEM_MC_ADVANCE_RIP();
9117
9118 IEM_MC_END();
9119 return VINF_SUCCESS;
9120}
9121
9122
9123/** Opcode 0xdd !11/0. */
9124FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9125{
9126 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9127 IEM_MC_BEGIN(3, 2);
9128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9129 IEM_MC_LOCAL(uint16_t, u16Fsw);
9130 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9131 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9132 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9133
9134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9136 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9137 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9138
9139 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9140 IEM_MC_PREPARE_FPU_USAGE();
9141 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9142 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9143 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9144 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9145 IEM_MC_ELSE()
9146 IEM_MC_IF_FCW_IM()
9147 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9148 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9149 IEM_MC_ENDIF();
9150 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9151 IEM_MC_ENDIF();
9152 IEM_MC_ADVANCE_RIP();
9153
9154 IEM_MC_END();
9155 return VINF_SUCCESS;
9156}
9157
9158
9159/** Opcode 0xdd !11/0. */
9160FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9161{
9162 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9163 IEM_MC_BEGIN(3, 2);
9164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9165 IEM_MC_LOCAL(uint16_t, u16Fsw);
9166 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9167 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9168 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9169
9170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9172 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9173 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9174
9175 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9176 IEM_MC_PREPARE_FPU_USAGE();
9177 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9178 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9179 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9180 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9181 IEM_MC_ELSE()
9182 IEM_MC_IF_FCW_IM()
9183 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9184 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9185 IEM_MC_ENDIF();
9186 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9187 IEM_MC_ENDIF();
9188 IEM_MC_ADVANCE_RIP();
9189
9190 IEM_MC_END();
9191 return VINF_SUCCESS;
9192}
9193
9194
9195
9196
9197/** Opcode 0xdd !11/0. */
9198FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9199{
9200 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9201 IEM_MC_BEGIN(3, 2);
9202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9203 IEM_MC_LOCAL(uint16_t, u16Fsw);
9204 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9205 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9206 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9207
9208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9210 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9211 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9212
9213 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9214 IEM_MC_PREPARE_FPU_USAGE();
9215 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9216 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9217 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9218 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9219 IEM_MC_ELSE()
9220 IEM_MC_IF_FCW_IM()
9221 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9222 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9223 IEM_MC_ENDIF();
9224 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9225 IEM_MC_ENDIF();
9226 IEM_MC_ADVANCE_RIP();
9227
9228 IEM_MC_END();
9229 return VINF_SUCCESS;
9230}
9231
9232
9233/** Opcode 0xdd !11/0. */
9234FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9235{
9236 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9237 IEM_MC_BEGIN(3, 0);
9238 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9239 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9240 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9243 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9244 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9245 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9246 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9247 IEM_MC_END();
9248 return VINF_SUCCESS;
9249}
9250
9251
9252/** Opcode 0xdd !11/0. */
9253FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9254{
9255 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9256 IEM_MC_BEGIN(3, 0);
9257 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9258 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9259 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9262 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9263 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9264 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9265 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9266 IEM_MC_END();
9267 return VINF_SUCCESS;
9268
9269}
9270
9271/** Opcode 0xdd !11/0. */
9272FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9273{
9274 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9275
9276 IEM_MC_BEGIN(0, 2);
9277 IEM_MC_LOCAL(uint16_t, u16Tmp);
9278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9279
9280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9282 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9283
9284 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9285 IEM_MC_FETCH_FSW(u16Tmp);
9286 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9287 IEM_MC_ADVANCE_RIP();
9288
9289/** @todo Debug / drop a hint to the verifier that things may differ
9290 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9291 * NT4SP1. (X86_FSW_PE) */
9292 IEM_MC_END();
9293 return VINF_SUCCESS;
9294}
9295
9296
9297/** Opcode 0xdd 11/0. */
9298FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9299{
9300 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9302 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9303 unmodified. */
9304
9305 IEM_MC_BEGIN(0, 0);
9306
9307 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9308 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9309
9310 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9311 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9312 IEM_MC_UPDATE_FPU_OPCODE_IP();
9313
9314 IEM_MC_ADVANCE_RIP();
9315 IEM_MC_END();
9316 return VINF_SUCCESS;
9317}
9318
9319
9320/** Opcode 0xdd 11/1. */
9321FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9322{
9323 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9325
9326 IEM_MC_BEGIN(0, 2);
9327 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9328 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9329 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9330 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9331
9332 IEM_MC_PREPARE_FPU_USAGE();
9333 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9334 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9335 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9336 IEM_MC_ELSE()
9337 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9338 IEM_MC_ENDIF();
9339
9340 IEM_MC_ADVANCE_RIP();
9341 IEM_MC_END();
9342 return VINF_SUCCESS;
9343}
9344
9345
9346/** Opcode 0xdd 11/3. */
9347FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9348{
9349 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9350 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9351}
9352
9353
9354/** Opcode 0xdd 11/4. */
9355FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9356{
9357 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9358 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9359}
9360
9361
9362/**
9363 * @opcode 0xdd
9364 */
9365FNIEMOP_DEF(iemOp_EscF5)
9366{
9367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9368 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9369 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9370 {
9371 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9372 {
9373 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9374 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9375 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9376 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9377 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9378 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9379 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9380 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9382 }
9383 }
9384 else
9385 {
9386 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9387 {
9388 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9389 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9390 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9391 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9392 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9393 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9394 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9395 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9397 }
9398 }
9399}
9400
9401
9402/** Opcode 0xde 11/0. */
9403FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9404{
9405 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9406 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9407}
9408
9409
9410/** Opcode 0xde 11/0. */
9411FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9412{
9413 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9414 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9415}
9416
9417
9418/** Opcode 0xde 0xd9. */
9419FNIEMOP_DEF(iemOp_fcompp)
9420{
9421 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9422 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9423}
9424
9425
9426/** Opcode 0xde 11/4. */
9427FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9428{
9429 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9430 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9431}
9432
9433
9434/** Opcode 0xde 11/5. */
9435FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9436{
9437 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9438 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9439}
9440
9441
9442/** Opcode 0xde 11/6. */
9443FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9444{
9445 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9446 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9447}
9448
9449
9450/** Opcode 0xde 11/7. */
9451FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9452{
9453 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9454 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9455}
9456
9457
9458/**
9459 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9460 * the result in ST0.
9461 *
9462 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9463 */
9464FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9465{
9466 IEM_MC_BEGIN(3, 3);
9467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9468 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9469 IEM_MC_LOCAL(int16_t, i16Val2);
9470 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9471 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9472 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9473
9474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9476
9477 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9478 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9479 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9480
9481 IEM_MC_PREPARE_FPU_USAGE();
9482 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9483 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9484 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9485 IEM_MC_ELSE()
9486 IEM_MC_FPU_STACK_UNDERFLOW(0);
9487 IEM_MC_ENDIF();
9488 IEM_MC_ADVANCE_RIP();
9489
9490 IEM_MC_END();
9491 return VINF_SUCCESS;
9492}
9493
9494
9495/** Opcode 0xde !11/0. */
9496FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9497{
9498 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9499 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9500}
9501
9502
9503/** Opcode 0xde !11/1. */
9504FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9505{
9506 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9507 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9508}
9509
9510
9511/** Opcode 0xde !11/2. */
9512FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9513{
9514 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9515
9516 IEM_MC_BEGIN(3, 3);
9517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9518 IEM_MC_LOCAL(uint16_t, u16Fsw);
9519 IEM_MC_LOCAL(int16_t, i16Val2);
9520 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9521 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9522 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9523
9524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9526
9527 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9528 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9529 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9530
9531 IEM_MC_PREPARE_FPU_USAGE();
9532 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9533 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9534 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9535 IEM_MC_ELSE()
9536 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9537 IEM_MC_ENDIF();
9538 IEM_MC_ADVANCE_RIP();
9539
9540 IEM_MC_END();
9541 return VINF_SUCCESS;
9542}
9543
9544
9545/** Opcode 0xde !11/3. */
9546FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9547{
9548 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9549
9550 IEM_MC_BEGIN(3, 3);
9551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9552 IEM_MC_LOCAL(uint16_t, u16Fsw);
9553 IEM_MC_LOCAL(int16_t, i16Val2);
9554 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9555 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9556 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9557
9558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9560
9561 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9562 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9563 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9564
9565 IEM_MC_PREPARE_FPU_USAGE();
9566 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9567 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9568 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9569 IEM_MC_ELSE()
9570 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9571 IEM_MC_ENDIF();
9572 IEM_MC_ADVANCE_RIP();
9573
9574 IEM_MC_END();
9575 return VINF_SUCCESS;
9576}
9577
9578
9579/** Opcode 0xde !11/4. */
9580FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9581{
9582 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9583 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9584}
9585
9586
9587/** Opcode 0xde !11/5. */
9588FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9589{
9590 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9591 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9592}
9593
9594
9595/** Opcode 0xde !11/6. */
9596FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9597{
9598 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9599 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9600}
9601
9602
9603/** Opcode 0xde !11/7. */
9604FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9605{
9606 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9607 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9608}
9609
9610
9611/**
9612 * @opcode 0xde
9613 */
9614FNIEMOP_DEF(iemOp_EscF6)
9615{
9616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9617 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9618 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9619 {
9620 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9621 {
9622 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9623 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9624 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9625 case 3: if (bRm == 0xd9)
9626 return FNIEMOP_CALL(iemOp_fcompp);
9627 return IEMOP_RAISE_INVALID_OPCODE();
9628 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9629 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9630 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9631 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9633 }
9634 }
9635 else
9636 {
9637 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9638 {
9639 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9640 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9641 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9642 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9643 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9644 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9645 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9646 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9648 }
9649 }
9650}
9651
9652
9653/** Opcode 0xdf 11/0.
9654 * Undocument instruction, assumed to work like ffree + fincstp. */
9655FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9656{
9657 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9659
9660 IEM_MC_BEGIN(0, 0);
9661
9662 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9663 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9664
9665 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9666 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9667 IEM_MC_FPU_STACK_INC_TOP();
9668 IEM_MC_UPDATE_FPU_OPCODE_IP();
9669
9670 IEM_MC_ADVANCE_RIP();
9671 IEM_MC_END();
9672 return VINF_SUCCESS;
9673}
9674
9675
9676/** Opcode 0xdf 0xe0. */
9677FNIEMOP_DEF(iemOp_fnstsw_ax)
9678{
9679 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9681
9682 IEM_MC_BEGIN(0, 1);
9683 IEM_MC_LOCAL(uint16_t, u16Tmp);
9684 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9685 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9686 IEM_MC_FETCH_FSW(u16Tmp);
9687 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9688 IEM_MC_ADVANCE_RIP();
9689 IEM_MC_END();
9690 return VINF_SUCCESS;
9691}
9692
9693
9694/** Opcode 0xdf 11/5. */
9695FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9696{
9697 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9698 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9699}
9700
9701
9702/** Opcode 0xdf 11/6. */
9703FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9704{
9705 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9706 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9707}
9708
9709
9710/** Opcode 0xdf !11/0. */
9711FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9712{
9713 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9714
9715 IEM_MC_BEGIN(2, 3);
9716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9717 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9718 IEM_MC_LOCAL(int16_t, i16Val);
9719 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9720 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9721
9722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9724
9725 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9726 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9727 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9728
9729 IEM_MC_PREPARE_FPU_USAGE();
9730 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9731 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9732 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9733 IEM_MC_ELSE()
9734 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9735 IEM_MC_ENDIF();
9736 IEM_MC_ADVANCE_RIP();
9737
9738 IEM_MC_END();
9739 return VINF_SUCCESS;
9740}
9741
9742
9743/** Opcode 0xdf !11/1. */
9744FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9745{
9746 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9747 IEM_MC_BEGIN(3, 2);
9748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9749 IEM_MC_LOCAL(uint16_t, u16Fsw);
9750 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9751 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9752 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9753
9754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9756 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9757 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9758
9759 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9760 IEM_MC_PREPARE_FPU_USAGE();
9761 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9762 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9763 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9764 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9765 IEM_MC_ELSE()
9766 IEM_MC_IF_FCW_IM()
9767 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9768 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9769 IEM_MC_ENDIF();
9770 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9771 IEM_MC_ENDIF();
9772 IEM_MC_ADVANCE_RIP();
9773
9774 IEM_MC_END();
9775 return VINF_SUCCESS;
9776}
9777
9778
9779/** Opcode 0xdf !11/2. */
9780FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9781{
9782 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9783 IEM_MC_BEGIN(3, 2);
9784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9785 IEM_MC_LOCAL(uint16_t, u16Fsw);
9786 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9787 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9788 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9789
9790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9792 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9793 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9794
9795 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9796 IEM_MC_PREPARE_FPU_USAGE();
9797 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9798 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9799 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9800 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9801 IEM_MC_ELSE()
9802 IEM_MC_IF_FCW_IM()
9803 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9804 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9805 IEM_MC_ENDIF();
9806 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9807 IEM_MC_ENDIF();
9808 IEM_MC_ADVANCE_RIP();
9809
9810 IEM_MC_END();
9811 return VINF_SUCCESS;
9812}
9813
9814
9815/** Opcode 0xdf !11/3. */
9816FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9817{
9818 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9819 IEM_MC_BEGIN(3, 2);
9820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9821 IEM_MC_LOCAL(uint16_t, u16Fsw);
9822 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9823 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9824 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9825
9826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9828 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9829 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9830
9831 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9832 IEM_MC_PREPARE_FPU_USAGE();
9833 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9834 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9835 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9836 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9837 IEM_MC_ELSE()
9838 IEM_MC_IF_FCW_IM()
9839 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9840 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9841 IEM_MC_ENDIF();
9842 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9843 IEM_MC_ENDIF();
9844 IEM_MC_ADVANCE_RIP();
9845
9846 IEM_MC_END();
9847 return VINF_SUCCESS;
9848}
9849
9850
9851/** Opcode 0xdf !11/4. */
9852FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
9853
9854
9855/** Opcode 0xdf !11/5. */
9856FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
9857{
9858 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
9859
9860 IEM_MC_BEGIN(2, 3);
9861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9862 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9863 IEM_MC_LOCAL(int64_t, i64Val);
9864 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9865 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
9866
9867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9869
9870 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9871 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9872 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9873
9874 IEM_MC_PREPARE_FPU_USAGE();
9875 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9876 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
9877 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9878 IEM_MC_ELSE()
9879 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9880 IEM_MC_ENDIF();
9881 IEM_MC_ADVANCE_RIP();
9882
9883 IEM_MC_END();
9884 return VINF_SUCCESS;
9885}
9886
9887
9888/** Opcode 0xdf !11/6. */
9889FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
9890
9891
9892/** Opcode 0xdf !11/7. */
9893FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
9894{
9895 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
9896 IEM_MC_BEGIN(3, 2);
9897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9898 IEM_MC_LOCAL(uint16_t, u16Fsw);
9899 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9900 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9901 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9902
9903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9905 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9906 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9907
9908 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9909 IEM_MC_PREPARE_FPU_USAGE();
9910 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9911 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9912 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9913 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9914 IEM_MC_ELSE()
9915 IEM_MC_IF_FCW_IM()
9916 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9917 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9918 IEM_MC_ENDIF();
9919 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9920 IEM_MC_ENDIF();
9921 IEM_MC_ADVANCE_RIP();
9922
9923 IEM_MC_END();
9924 return VINF_SUCCESS;
9925}
9926
9927
9928/**
9929 * @opcode 0xdf
9930 */
9931FNIEMOP_DEF(iemOp_EscF7)
9932{
9933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9934 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9935 {
9936 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9937 {
9938 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
9939 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
9940 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9941 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9942 case 4: if (bRm == 0xe0)
9943 return FNIEMOP_CALL(iemOp_fnstsw_ax);
9944 return IEMOP_RAISE_INVALID_OPCODE();
9945 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
9946 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
9947 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9949 }
9950 }
9951 else
9952 {
9953 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9954 {
9955 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
9956 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
9957 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
9958 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
9959 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
9960 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
9961 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
9962 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
9963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9964 }
9965 }
9966}
9967
9968
9969/**
9970 * @opcode 0xe0
9971 */
9972FNIEMOP_DEF(iemOp_loopne_Jb)
9973{
9974 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
9975 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9977 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9978
9979 switch (pVCpu->iem.s.enmEffAddrMode)
9980 {
9981 case IEMMODE_16BIT:
9982 IEM_MC_BEGIN(0,0);
9983 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9984 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9985 IEM_MC_REL_JMP_S8(i8Imm);
9986 } IEM_MC_ELSE() {
9987 IEM_MC_ADVANCE_RIP();
9988 } IEM_MC_ENDIF();
9989 IEM_MC_END();
9990 return VINF_SUCCESS;
9991
9992 case IEMMODE_32BIT:
9993 IEM_MC_BEGIN(0,0);
9994 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9995 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9996 IEM_MC_REL_JMP_S8(i8Imm);
9997 } IEM_MC_ELSE() {
9998 IEM_MC_ADVANCE_RIP();
9999 } IEM_MC_ENDIF();
10000 IEM_MC_END();
10001 return VINF_SUCCESS;
10002
10003 case IEMMODE_64BIT:
10004 IEM_MC_BEGIN(0,0);
10005 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10006 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10007 IEM_MC_REL_JMP_S8(i8Imm);
10008 } IEM_MC_ELSE() {
10009 IEM_MC_ADVANCE_RIP();
10010 } IEM_MC_ENDIF();
10011 IEM_MC_END();
10012 return VINF_SUCCESS;
10013
10014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10015 }
10016}
10017
10018
10019/**
10020 * @opcode 0xe1
10021 */
10022FNIEMOP_DEF(iemOp_loope_Jb)
10023{
10024 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10025 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10027 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10028
10029 switch (pVCpu->iem.s.enmEffAddrMode)
10030 {
10031 case IEMMODE_16BIT:
10032 IEM_MC_BEGIN(0,0);
10033 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10034 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10035 IEM_MC_REL_JMP_S8(i8Imm);
10036 } IEM_MC_ELSE() {
10037 IEM_MC_ADVANCE_RIP();
10038 } IEM_MC_ENDIF();
10039 IEM_MC_END();
10040 return VINF_SUCCESS;
10041
10042 case IEMMODE_32BIT:
10043 IEM_MC_BEGIN(0,0);
10044 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10045 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10046 IEM_MC_REL_JMP_S8(i8Imm);
10047 } IEM_MC_ELSE() {
10048 IEM_MC_ADVANCE_RIP();
10049 } IEM_MC_ENDIF();
10050 IEM_MC_END();
10051 return VINF_SUCCESS;
10052
10053 case IEMMODE_64BIT:
10054 IEM_MC_BEGIN(0,0);
10055 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10056 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10057 IEM_MC_REL_JMP_S8(i8Imm);
10058 } IEM_MC_ELSE() {
10059 IEM_MC_ADVANCE_RIP();
10060 } IEM_MC_ENDIF();
10061 IEM_MC_END();
10062 return VINF_SUCCESS;
10063
10064 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10065 }
10066}
10067
10068
10069/**
10070 * @opcode 0xe2
10071 */
10072FNIEMOP_DEF(iemOp_loop_Jb)
10073{
10074 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10075 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10077 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10078
10079 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
10080 * using the 32-bit operand size override. How can that be restarted? See
10081 * weird pseudo code in intel manual. */
10082 switch (pVCpu->iem.s.enmEffAddrMode)
10083 {
10084 case IEMMODE_16BIT:
10085 IEM_MC_BEGIN(0,0);
10086 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10087 {
10088 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10089 IEM_MC_IF_CX_IS_NZ() {
10090 IEM_MC_REL_JMP_S8(i8Imm);
10091 } IEM_MC_ELSE() {
10092 IEM_MC_ADVANCE_RIP();
10093 } IEM_MC_ENDIF();
10094 }
10095 else
10096 {
10097 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10098 IEM_MC_ADVANCE_RIP();
10099 }
10100 IEM_MC_END();
10101 return VINF_SUCCESS;
10102
10103 case IEMMODE_32BIT:
10104 IEM_MC_BEGIN(0,0);
10105 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10106 {
10107 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10108 IEM_MC_IF_ECX_IS_NZ() {
10109 IEM_MC_REL_JMP_S8(i8Imm);
10110 } IEM_MC_ELSE() {
10111 IEM_MC_ADVANCE_RIP();
10112 } IEM_MC_ENDIF();
10113 }
10114 else
10115 {
10116 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10117 IEM_MC_ADVANCE_RIP();
10118 }
10119 IEM_MC_END();
10120 return VINF_SUCCESS;
10121
10122 case IEMMODE_64BIT:
10123 IEM_MC_BEGIN(0,0);
10124 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10125 {
10126 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10127 IEM_MC_IF_RCX_IS_NZ() {
10128 IEM_MC_REL_JMP_S8(i8Imm);
10129 } IEM_MC_ELSE() {
10130 IEM_MC_ADVANCE_RIP();
10131 } IEM_MC_ENDIF();
10132 }
10133 else
10134 {
10135 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10136 IEM_MC_ADVANCE_RIP();
10137 }
10138 IEM_MC_END();
10139 return VINF_SUCCESS;
10140
10141 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10142 }
10143}
10144
10145
10146/**
10147 * @opcode 0xe3
10148 */
10149FNIEMOP_DEF(iemOp_jecxz_Jb)
10150{
10151 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10152 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10154 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10155
10156 switch (pVCpu->iem.s.enmEffAddrMode)
10157 {
10158 case IEMMODE_16BIT:
10159 IEM_MC_BEGIN(0,0);
10160 IEM_MC_IF_CX_IS_NZ() {
10161 IEM_MC_ADVANCE_RIP();
10162 } IEM_MC_ELSE() {
10163 IEM_MC_REL_JMP_S8(i8Imm);
10164 } IEM_MC_ENDIF();
10165 IEM_MC_END();
10166 return VINF_SUCCESS;
10167
10168 case IEMMODE_32BIT:
10169 IEM_MC_BEGIN(0,0);
10170 IEM_MC_IF_ECX_IS_NZ() {
10171 IEM_MC_ADVANCE_RIP();
10172 } IEM_MC_ELSE() {
10173 IEM_MC_REL_JMP_S8(i8Imm);
10174 } IEM_MC_ENDIF();
10175 IEM_MC_END();
10176 return VINF_SUCCESS;
10177
10178 case IEMMODE_64BIT:
10179 IEM_MC_BEGIN(0,0);
10180 IEM_MC_IF_RCX_IS_NZ() {
10181 IEM_MC_ADVANCE_RIP();
10182 } IEM_MC_ELSE() {
10183 IEM_MC_REL_JMP_S8(i8Imm);
10184 } IEM_MC_ENDIF();
10185 IEM_MC_END();
10186 return VINF_SUCCESS;
10187
10188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10189 }
10190}
10191
10192
10193/** Opcode 0xe4 */
10194FNIEMOP_DEF(iemOp_in_AL_Ib)
10195{
10196 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10197 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10199 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10200}
10201
10202
10203/** Opcode 0xe5 */
10204FNIEMOP_DEF(iemOp_in_eAX_Ib)
10205{
10206 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10207 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10209 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10210}
10211
10212
10213/** Opcode 0xe6 */
10214FNIEMOP_DEF(iemOp_out_Ib_AL)
10215{
10216 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10217 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10219 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10220}
10221
10222
10223/** Opcode 0xe7 */
10224FNIEMOP_DEF(iemOp_out_Ib_eAX)
10225{
10226 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10227 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10229 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10230}
10231
10232
10233/**
10234 * @opcode 0xe8
10235 */
10236FNIEMOP_DEF(iemOp_call_Jv)
10237{
10238 IEMOP_MNEMONIC(call_Jv, "call Jv");
10239 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10240 switch (pVCpu->iem.s.enmEffOpSize)
10241 {
10242 case IEMMODE_16BIT:
10243 {
10244 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10245 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10246 }
10247
10248 case IEMMODE_32BIT:
10249 {
10250 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10251 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10252 }
10253
10254 case IEMMODE_64BIT:
10255 {
10256 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10257 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10258 }
10259
10260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10261 }
10262}
10263
10264
10265/**
10266 * @opcode 0xe9
10267 */
10268FNIEMOP_DEF(iemOp_jmp_Jv)
10269{
10270 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10271 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10272 switch (pVCpu->iem.s.enmEffOpSize)
10273 {
10274 case IEMMODE_16BIT:
10275 {
10276 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10277 IEM_MC_BEGIN(0, 0);
10278 IEM_MC_REL_JMP_S16(i16Imm);
10279 IEM_MC_END();
10280 return VINF_SUCCESS;
10281 }
10282
10283 case IEMMODE_64BIT:
10284 case IEMMODE_32BIT:
10285 {
10286 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10287 IEM_MC_BEGIN(0, 0);
10288 IEM_MC_REL_JMP_S32(i32Imm);
10289 IEM_MC_END();
10290 return VINF_SUCCESS;
10291 }
10292
10293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10294 }
10295}
10296
10297
10298/**
10299 * @opcode 0xea
10300 */
10301FNIEMOP_DEF(iemOp_jmp_Ap)
10302{
10303 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10304 IEMOP_HLP_NO_64BIT();
10305
10306 /* Decode the far pointer address and pass it on to the far call C implementation. */
10307 uint32_t offSeg;
10308 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10309 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10310 else
10311 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10312 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10314 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10315}
10316
10317
10318/**
10319 * @opcode 0xeb
10320 */
10321FNIEMOP_DEF(iemOp_jmp_Jb)
10322{
10323 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10324 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10326 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10327
10328 IEM_MC_BEGIN(0, 0);
10329 IEM_MC_REL_JMP_S8(i8Imm);
10330 IEM_MC_END();
10331 return VINF_SUCCESS;
10332}
10333
10334
10335/** Opcode 0xec */
10336FNIEMOP_DEF(iemOp_in_AL_DX)
10337{
10338 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10340 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10341}
10342
10343
10344/** Opcode 0xed */
10345FNIEMOP_DEF(iemOp_eAX_DX)
10346{
10347 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10349 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10350}
10351
10352
10353/** Opcode 0xee */
10354FNIEMOP_DEF(iemOp_out_DX_AL)
10355{
10356 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10358 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10359}
10360
10361
10362/** Opcode 0xef */
10363FNIEMOP_DEF(iemOp_out_DX_eAX)
10364{
10365 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10367 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10368}
10369
10370
10371/**
10372 * @opcode 0xf0
10373 */
10374FNIEMOP_DEF(iemOp_lock)
10375{
10376 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10377 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10378
10379 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10380 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10381}
10382
10383
10384/**
10385 * @opcode 0xf1
10386 */
10387FNIEMOP_DEF(iemOp_int1)
10388{
10389 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10390 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10391 /** @todo testcase! */
10392 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
10393}
10394
10395
10396/**
10397 * @opcode 0xf2
10398 */
10399FNIEMOP_DEF(iemOp_repne)
10400{
10401 /* This overrides any previous REPE prefix. */
10402 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10403 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10404 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10405
10406 /* For the 4 entry opcode tables, REPNZ overrides any previous
10407 REPZ and operand size prefixes. */
10408 pVCpu->iem.s.idxPrefix = 3;
10409
10410 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10411 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10412}
10413
10414
10415/**
10416 * @opcode 0xf3
10417 */
10418FNIEMOP_DEF(iemOp_repe)
10419{
10420 /* This overrides any previous REPNE prefix. */
10421 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10422 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10423 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10424
10425 /* For the 4 entry opcode tables, REPNZ overrides any previous
10426 REPNZ and operand size prefixes. */
10427 pVCpu->iem.s.idxPrefix = 2;
10428
10429 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10430 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10431}
10432
10433
10434/**
10435 * @opcode 0xf4
10436 */
10437FNIEMOP_DEF(iemOp_hlt)
10438{
10439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10440 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10441}
10442
10443
10444/**
10445 * @opcode 0xf5
10446 */
10447FNIEMOP_DEF(iemOp_cmc)
10448{
10449 IEMOP_MNEMONIC(cmc, "cmc");
10450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10451 IEM_MC_BEGIN(0, 0);
10452 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10453 IEM_MC_ADVANCE_RIP();
10454 IEM_MC_END();
10455 return VINF_SUCCESS;
10456}
10457
10458
10459/**
10460 * Common implementation of 'inc/dec/not/neg Eb'.
10461 *
10462 * @param bRm The RM byte.
10463 * @param pImpl The instruction implementation.
10464 */
10465FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10466{
10467 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10468 {
10469 /* register access */
10470 IEM_MC_BEGIN(2, 0);
10471 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10472 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10473 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10474 IEM_MC_REF_EFLAGS(pEFlags);
10475 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10476 IEM_MC_ADVANCE_RIP();
10477 IEM_MC_END();
10478 }
10479 else
10480 {
10481 /* memory access. */
10482 IEM_MC_BEGIN(2, 2);
10483 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10484 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10486
10487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10488 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10489 IEM_MC_FETCH_EFLAGS(EFlags);
10490 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10491 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10492 else
10493 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10494
10495 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10496 IEM_MC_COMMIT_EFLAGS(EFlags);
10497 IEM_MC_ADVANCE_RIP();
10498 IEM_MC_END();
10499 }
10500 return VINF_SUCCESS;
10501}
10502
10503
10504/**
10505 * Common implementation of 'inc/dec/not/neg Ev'.
10506 *
10507 * @param bRm The RM byte.
10508 * @param pImpl The instruction implementation.
10509 */
10510FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10511{
10512 /* Registers are handled by a common worker. */
10513 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10514 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10515
10516 /* Memory we do here. */
10517 switch (pVCpu->iem.s.enmEffOpSize)
10518 {
10519 case IEMMODE_16BIT:
10520 IEM_MC_BEGIN(2, 2);
10521 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10522 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10524
10525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10526 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10527 IEM_MC_FETCH_EFLAGS(EFlags);
10528 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10529 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10530 else
10531 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10532
10533 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10534 IEM_MC_COMMIT_EFLAGS(EFlags);
10535 IEM_MC_ADVANCE_RIP();
10536 IEM_MC_END();
10537 return VINF_SUCCESS;
10538
10539 case IEMMODE_32BIT:
10540 IEM_MC_BEGIN(2, 2);
10541 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10542 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10544
10545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10546 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10547 IEM_MC_FETCH_EFLAGS(EFlags);
10548 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10549 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10550 else
10551 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10552
10553 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10554 IEM_MC_COMMIT_EFLAGS(EFlags);
10555 IEM_MC_ADVANCE_RIP();
10556 IEM_MC_END();
10557 return VINF_SUCCESS;
10558
10559 case IEMMODE_64BIT:
10560 IEM_MC_BEGIN(2, 2);
10561 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10562 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10564
10565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10566 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10567 IEM_MC_FETCH_EFLAGS(EFlags);
10568 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10569 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10570 else
10571 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10572
10573 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10574 IEM_MC_COMMIT_EFLAGS(EFlags);
10575 IEM_MC_ADVANCE_RIP();
10576 IEM_MC_END();
10577 return VINF_SUCCESS;
10578
10579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10580 }
10581}
10582
10583
10584/** Opcode 0xf6 /0. */
10585FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10586{
10587 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10588 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10589
10590 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10591 {
10592 /* register access */
10593 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10595
10596 IEM_MC_BEGIN(3, 0);
10597 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10598 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10599 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10600 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10601 IEM_MC_REF_EFLAGS(pEFlags);
10602 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10603 IEM_MC_ADVANCE_RIP();
10604 IEM_MC_END();
10605 }
10606 else
10607 {
10608 /* memory access. */
10609 IEM_MC_BEGIN(3, 2);
10610 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10611 IEM_MC_ARG(uint8_t, u8Src, 1);
10612 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10614
10615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10616 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10617 IEM_MC_ASSIGN(u8Src, u8Imm);
10618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10619 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10620 IEM_MC_FETCH_EFLAGS(EFlags);
10621 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10622
10623 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10624 IEM_MC_COMMIT_EFLAGS(EFlags);
10625 IEM_MC_ADVANCE_RIP();
10626 IEM_MC_END();
10627 }
10628 return VINF_SUCCESS;
10629}
10630
10631
10632/** Opcode 0xf7 /0. */
10633FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10634{
10635 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10636 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10637
10638 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10639 {
10640 /* register access */
10641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10642 switch (pVCpu->iem.s.enmEffOpSize)
10643 {
10644 case IEMMODE_16BIT:
10645 {
10646 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10647 IEM_MC_BEGIN(3, 0);
10648 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10649 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10650 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10651 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10652 IEM_MC_REF_EFLAGS(pEFlags);
10653 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10654 IEM_MC_ADVANCE_RIP();
10655 IEM_MC_END();
10656 return VINF_SUCCESS;
10657 }
10658
10659 case IEMMODE_32BIT:
10660 {
10661 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10662 IEM_MC_BEGIN(3, 0);
10663 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10664 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10665 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10666 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10667 IEM_MC_REF_EFLAGS(pEFlags);
10668 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10669 /* No clearing the high dword here - test doesn't write back the result. */
10670 IEM_MC_ADVANCE_RIP();
10671 IEM_MC_END();
10672 return VINF_SUCCESS;
10673 }
10674
10675 case IEMMODE_64BIT:
10676 {
10677 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10678 IEM_MC_BEGIN(3, 0);
10679 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10680 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10681 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10682 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10683 IEM_MC_REF_EFLAGS(pEFlags);
10684 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10685 IEM_MC_ADVANCE_RIP();
10686 IEM_MC_END();
10687 return VINF_SUCCESS;
10688 }
10689
10690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10691 }
10692 }
10693 else
10694 {
10695 /* memory access. */
10696 switch (pVCpu->iem.s.enmEffOpSize)
10697 {
10698 case IEMMODE_16BIT:
10699 {
10700 IEM_MC_BEGIN(3, 2);
10701 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10702 IEM_MC_ARG(uint16_t, u16Src, 1);
10703 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10705
10706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10707 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10708 IEM_MC_ASSIGN(u16Src, u16Imm);
10709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10710 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10711 IEM_MC_FETCH_EFLAGS(EFlags);
10712 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10713
10714 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10715 IEM_MC_COMMIT_EFLAGS(EFlags);
10716 IEM_MC_ADVANCE_RIP();
10717 IEM_MC_END();
10718 return VINF_SUCCESS;
10719 }
10720
10721 case IEMMODE_32BIT:
10722 {
10723 IEM_MC_BEGIN(3, 2);
10724 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10725 IEM_MC_ARG(uint32_t, u32Src, 1);
10726 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10728
10729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10730 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10731 IEM_MC_ASSIGN(u32Src, u32Imm);
10732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10733 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10734 IEM_MC_FETCH_EFLAGS(EFlags);
10735 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10736
10737 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10738 IEM_MC_COMMIT_EFLAGS(EFlags);
10739 IEM_MC_ADVANCE_RIP();
10740 IEM_MC_END();
10741 return VINF_SUCCESS;
10742 }
10743
10744 case IEMMODE_64BIT:
10745 {
10746 IEM_MC_BEGIN(3, 2);
10747 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10748 IEM_MC_ARG(uint64_t, u64Src, 1);
10749 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10751
10752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10753 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10754 IEM_MC_ASSIGN(u64Src, u64Imm);
10755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10756 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10757 IEM_MC_FETCH_EFLAGS(EFlags);
10758 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10759
10760 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10761 IEM_MC_COMMIT_EFLAGS(EFlags);
10762 IEM_MC_ADVANCE_RIP();
10763 IEM_MC_END();
10764 return VINF_SUCCESS;
10765 }
10766
10767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10768 }
10769 }
10770}
10771
10772
10773/** Opcode 0xf6 /4, /5, /6 and /7. */
10774FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10775{
10776 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10777 {
10778 /* register access */
10779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10780 IEM_MC_BEGIN(3, 1);
10781 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10782 IEM_MC_ARG(uint8_t, u8Value, 1);
10783 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10784 IEM_MC_LOCAL(int32_t, rc);
10785
10786 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10787 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10788 IEM_MC_REF_EFLAGS(pEFlags);
10789 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10790 IEM_MC_IF_LOCAL_IS_Z(rc) {
10791 IEM_MC_ADVANCE_RIP();
10792 } IEM_MC_ELSE() {
10793 IEM_MC_RAISE_DIVIDE_ERROR();
10794 } IEM_MC_ENDIF();
10795
10796 IEM_MC_END();
10797 }
10798 else
10799 {
10800 /* memory access. */
10801 IEM_MC_BEGIN(3, 2);
10802 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10803 IEM_MC_ARG(uint8_t, u8Value, 1);
10804 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10806 IEM_MC_LOCAL(int32_t, rc);
10807
10808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10810 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10811 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10812 IEM_MC_REF_EFLAGS(pEFlags);
10813 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10814 IEM_MC_IF_LOCAL_IS_Z(rc) {
10815 IEM_MC_ADVANCE_RIP();
10816 } IEM_MC_ELSE() {
10817 IEM_MC_RAISE_DIVIDE_ERROR();
10818 } IEM_MC_ENDIF();
10819
10820 IEM_MC_END();
10821 }
10822 return VINF_SUCCESS;
10823}
10824
10825
10826/** Opcode 0xf7 /4, /5, /6 and /7. */
10827FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10828{
10829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10830
10831 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10832 {
10833 /* register access */
10834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10835 switch (pVCpu->iem.s.enmEffOpSize)
10836 {
10837 case IEMMODE_16BIT:
10838 {
10839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10840 IEM_MC_BEGIN(4, 1);
10841 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10842 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10843 IEM_MC_ARG(uint16_t, u16Value, 2);
10844 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10845 IEM_MC_LOCAL(int32_t, rc);
10846
10847 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10848 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10849 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10850 IEM_MC_REF_EFLAGS(pEFlags);
10851 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10852 IEM_MC_IF_LOCAL_IS_Z(rc) {
10853 IEM_MC_ADVANCE_RIP();
10854 } IEM_MC_ELSE() {
10855 IEM_MC_RAISE_DIVIDE_ERROR();
10856 } IEM_MC_ENDIF();
10857
10858 IEM_MC_END();
10859 return VINF_SUCCESS;
10860 }
10861
10862 case IEMMODE_32BIT:
10863 {
10864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10865 IEM_MC_BEGIN(4, 1);
10866 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10867 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10868 IEM_MC_ARG(uint32_t, u32Value, 2);
10869 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10870 IEM_MC_LOCAL(int32_t, rc);
10871
10872 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10873 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10874 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10875 IEM_MC_REF_EFLAGS(pEFlags);
10876 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10877 IEM_MC_IF_LOCAL_IS_Z(rc) {
10878 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10879 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10880 IEM_MC_ADVANCE_RIP();
10881 } IEM_MC_ELSE() {
10882 IEM_MC_RAISE_DIVIDE_ERROR();
10883 } IEM_MC_ENDIF();
10884
10885 IEM_MC_END();
10886 return VINF_SUCCESS;
10887 }
10888
10889 case IEMMODE_64BIT:
10890 {
10891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10892 IEM_MC_BEGIN(4, 1);
10893 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10894 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10895 IEM_MC_ARG(uint64_t, u64Value, 2);
10896 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10897 IEM_MC_LOCAL(int32_t, rc);
10898
10899 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10900 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10901 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10902 IEM_MC_REF_EFLAGS(pEFlags);
10903 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10904 IEM_MC_IF_LOCAL_IS_Z(rc) {
10905 IEM_MC_ADVANCE_RIP();
10906 } IEM_MC_ELSE() {
10907 IEM_MC_RAISE_DIVIDE_ERROR();
10908 } IEM_MC_ENDIF();
10909
10910 IEM_MC_END();
10911 return VINF_SUCCESS;
10912 }
10913
10914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10915 }
10916 }
10917 else
10918 {
10919 /* memory access. */
10920 switch (pVCpu->iem.s.enmEffOpSize)
10921 {
10922 case IEMMODE_16BIT:
10923 {
10924 IEM_MC_BEGIN(4, 2);
10925 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10926 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10927 IEM_MC_ARG(uint16_t, u16Value, 2);
10928 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10930 IEM_MC_LOCAL(int32_t, rc);
10931
10932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10934 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10935 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10936 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10937 IEM_MC_REF_EFLAGS(pEFlags);
10938 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10939 IEM_MC_IF_LOCAL_IS_Z(rc) {
10940 IEM_MC_ADVANCE_RIP();
10941 } IEM_MC_ELSE() {
10942 IEM_MC_RAISE_DIVIDE_ERROR();
10943 } IEM_MC_ENDIF();
10944
10945 IEM_MC_END();
10946 return VINF_SUCCESS;
10947 }
10948
10949 case IEMMODE_32BIT:
10950 {
10951 IEM_MC_BEGIN(4, 2);
10952 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10953 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10954 IEM_MC_ARG(uint32_t, u32Value, 2);
10955 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10957 IEM_MC_LOCAL(int32_t, rc);
10958
10959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10961 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10962 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10963 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10964 IEM_MC_REF_EFLAGS(pEFlags);
10965 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10966 IEM_MC_IF_LOCAL_IS_Z(rc) {
10967 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10968 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10969 IEM_MC_ADVANCE_RIP();
10970 } IEM_MC_ELSE() {
10971 IEM_MC_RAISE_DIVIDE_ERROR();
10972 } IEM_MC_ENDIF();
10973
10974 IEM_MC_END();
10975 return VINF_SUCCESS;
10976 }
10977
10978 case IEMMODE_64BIT:
10979 {
10980 IEM_MC_BEGIN(4, 2);
10981 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10982 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10983 IEM_MC_ARG(uint64_t, u64Value, 2);
10984 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10986 IEM_MC_LOCAL(int32_t, rc);
10987
10988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10990 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10991 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10992 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10993 IEM_MC_REF_EFLAGS(pEFlags);
10994 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10995 IEM_MC_IF_LOCAL_IS_Z(rc) {
10996 IEM_MC_ADVANCE_RIP();
10997 } IEM_MC_ELSE() {
10998 IEM_MC_RAISE_DIVIDE_ERROR();
10999 } IEM_MC_ENDIF();
11000
11001 IEM_MC_END();
11002 return VINF_SUCCESS;
11003 }
11004
11005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11006 }
11007 }
11008}
11009
11010/**
11011 * @opcode 0xf6
11012 */
11013FNIEMOP_DEF(iemOp_Grp3_Eb)
11014{
11015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11016 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11017 {
11018 case 0:
11019 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11020 case 1:
11021/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11022 return IEMOP_RAISE_INVALID_OPCODE();
11023 case 2:
11024 IEMOP_MNEMONIC(not_Eb, "not Eb");
11025 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11026 case 3:
11027 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11028 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11029 case 4:
11030 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11031 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11032 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
11033 case 5:
11034 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11035 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11036 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
11037 case 6:
11038 IEMOP_MNEMONIC(div_Eb, "div Eb");
11039 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11040 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
11041 case 7:
11042 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11043 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11044 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
11045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11046 }
11047}
11048
11049
11050/**
11051 * @opcode 0xf7
11052 */
11053FNIEMOP_DEF(iemOp_Grp3_Ev)
11054{
11055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11056 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11057 {
11058 case 0:
11059 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11060 case 1:
11061/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11062 return IEMOP_RAISE_INVALID_OPCODE();
11063 case 2:
11064 IEMOP_MNEMONIC(not_Ev, "not Ev");
11065 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11066 case 3:
11067 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11068 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11069 case 4:
11070 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11071 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11072 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
11073 case 5:
11074 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11075 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11076 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
11077 case 6:
11078 IEMOP_MNEMONIC(div_Ev, "div Ev");
11079 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11080 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
11081 case 7:
11082 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11083 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11084 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
11085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11086 }
11087}
11088
11089
11090/**
11091 * @opcode 0xf8
11092 */
11093FNIEMOP_DEF(iemOp_clc)
11094{
11095 IEMOP_MNEMONIC(clc, "clc");
11096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11097 IEM_MC_BEGIN(0, 0);
11098 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11099 IEM_MC_ADVANCE_RIP();
11100 IEM_MC_END();
11101 return VINF_SUCCESS;
11102}
11103
11104
11105/**
11106 * @opcode 0xf9
11107 */
11108FNIEMOP_DEF(iemOp_stc)
11109{
11110 IEMOP_MNEMONIC(stc, "stc");
11111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11112 IEM_MC_BEGIN(0, 0);
11113 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11114 IEM_MC_ADVANCE_RIP();
11115 IEM_MC_END();
11116 return VINF_SUCCESS;
11117}
11118
11119
11120/**
11121 * @opcode 0xfa
11122 */
11123FNIEMOP_DEF(iemOp_cli)
11124{
11125 IEMOP_MNEMONIC(cli, "cli");
11126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11127 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11128}
11129
11130
11131FNIEMOP_DEF(iemOp_sti)
11132{
11133 IEMOP_MNEMONIC(sti, "sti");
11134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11135 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11136}
11137
11138
11139/**
11140 * @opcode 0xfc
11141 */
11142FNIEMOP_DEF(iemOp_cld)
11143{
11144 IEMOP_MNEMONIC(cld, "cld");
11145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11146 IEM_MC_BEGIN(0, 0);
11147 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11148 IEM_MC_ADVANCE_RIP();
11149 IEM_MC_END();
11150 return VINF_SUCCESS;
11151}
11152
11153
11154/**
11155 * @opcode 0xfd
11156 */
11157FNIEMOP_DEF(iemOp_std)
11158{
11159 IEMOP_MNEMONIC(std, "std");
11160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11161 IEM_MC_BEGIN(0, 0);
11162 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11163 IEM_MC_ADVANCE_RIP();
11164 IEM_MC_END();
11165 return VINF_SUCCESS;
11166}
11167
11168
11169/**
11170 * @opcode 0xfe
11171 */
11172FNIEMOP_DEF(iemOp_Grp4)
11173{
11174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11175 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11176 {
11177 case 0:
11178 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11179 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11180 case 1:
11181 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11182 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11183 default:
11184 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11185 return IEMOP_RAISE_INVALID_OPCODE();
11186 }
11187}
11188
11189
11190/**
11191 * Opcode 0xff /2.
11192 * @param bRm The RM byte.
11193 */
11194FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11195{
11196 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11197 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11198
11199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11200 {
11201 /* The new RIP is taken from a register. */
11202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11203 switch (pVCpu->iem.s.enmEffOpSize)
11204 {
11205 case IEMMODE_16BIT:
11206 IEM_MC_BEGIN(1, 0);
11207 IEM_MC_ARG(uint16_t, u16Target, 0);
11208 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11209 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11210 IEM_MC_END()
11211 return VINF_SUCCESS;
11212
11213 case IEMMODE_32BIT:
11214 IEM_MC_BEGIN(1, 0);
11215 IEM_MC_ARG(uint32_t, u32Target, 0);
11216 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11217 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11218 IEM_MC_END()
11219 return VINF_SUCCESS;
11220
11221 case IEMMODE_64BIT:
11222 IEM_MC_BEGIN(1, 0);
11223 IEM_MC_ARG(uint64_t, u64Target, 0);
11224 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11225 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11226 IEM_MC_END()
11227 return VINF_SUCCESS;
11228
11229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11230 }
11231 }
11232 else
11233 {
11234 /* The new RIP is taken from a register. */
11235 switch (pVCpu->iem.s.enmEffOpSize)
11236 {
11237 case IEMMODE_16BIT:
11238 IEM_MC_BEGIN(1, 1);
11239 IEM_MC_ARG(uint16_t, u16Target, 0);
11240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11243 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11244 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11245 IEM_MC_END()
11246 return VINF_SUCCESS;
11247
11248 case IEMMODE_32BIT:
11249 IEM_MC_BEGIN(1, 1);
11250 IEM_MC_ARG(uint32_t, u32Target, 0);
11251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11254 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11255 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11256 IEM_MC_END()
11257 return VINF_SUCCESS;
11258
11259 case IEMMODE_64BIT:
11260 IEM_MC_BEGIN(1, 1);
11261 IEM_MC_ARG(uint64_t, u64Target, 0);
11262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11265 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11266 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11267 IEM_MC_END()
11268 return VINF_SUCCESS;
11269
11270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11271 }
11272 }
11273}
11274
11275typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11276
11277FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11278{
11279 /* Registers? How?? */
11280 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11281 { /* likely */ }
11282 else
11283 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11284
11285 /* Far pointer loaded from memory. */
11286 switch (pVCpu->iem.s.enmEffOpSize)
11287 {
11288 case IEMMODE_16BIT:
11289 IEM_MC_BEGIN(3, 1);
11290 IEM_MC_ARG(uint16_t, u16Sel, 0);
11291 IEM_MC_ARG(uint16_t, offSeg, 1);
11292 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11296 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11297 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11298 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11299 IEM_MC_END();
11300 return VINF_SUCCESS;
11301
11302 case IEMMODE_64BIT:
11303 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11304 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11305 * and call far qword [rsp] encodings. */
11306 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11307 {
11308 IEM_MC_BEGIN(3, 1);
11309 IEM_MC_ARG(uint16_t, u16Sel, 0);
11310 IEM_MC_ARG(uint64_t, offSeg, 1);
11311 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11315 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11316 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11317 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11318 IEM_MC_END();
11319 return VINF_SUCCESS;
11320 }
11321 /* AMD falls thru. */
11322 /* fall thru */
11323
11324 case IEMMODE_32BIT:
11325 IEM_MC_BEGIN(3, 1);
11326 IEM_MC_ARG(uint16_t, u16Sel, 0);
11327 IEM_MC_ARG(uint32_t, offSeg, 1);
11328 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11332 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11333 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11334 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11335 IEM_MC_END();
11336 return VINF_SUCCESS;
11337
11338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11339 }
11340}
11341
11342
11343/**
11344 * Opcode 0xff /3.
11345 * @param bRm The RM byte.
11346 */
11347FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11348{
11349 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11350 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11351}
11352
11353
11354/**
11355 * Opcode 0xff /4.
11356 * @param bRm The RM byte.
11357 */
11358FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11359{
11360 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11361 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11362
11363 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11364 {
11365 /* The new RIP is taken from a register. */
11366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11367 switch (pVCpu->iem.s.enmEffOpSize)
11368 {
11369 case IEMMODE_16BIT:
11370 IEM_MC_BEGIN(0, 1);
11371 IEM_MC_LOCAL(uint16_t, u16Target);
11372 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11373 IEM_MC_SET_RIP_U16(u16Target);
11374 IEM_MC_END()
11375 return VINF_SUCCESS;
11376
11377 case IEMMODE_32BIT:
11378 IEM_MC_BEGIN(0, 1);
11379 IEM_MC_LOCAL(uint32_t, u32Target);
11380 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11381 IEM_MC_SET_RIP_U32(u32Target);
11382 IEM_MC_END()
11383 return VINF_SUCCESS;
11384
11385 case IEMMODE_64BIT:
11386 IEM_MC_BEGIN(0, 1);
11387 IEM_MC_LOCAL(uint64_t, u64Target);
11388 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11389 IEM_MC_SET_RIP_U64(u64Target);
11390 IEM_MC_END()
11391 return VINF_SUCCESS;
11392
11393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11394 }
11395 }
11396 else
11397 {
11398 /* The new RIP is taken from a memory location. */
11399 switch (pVCpu->iem.s.enmEffOpSize)
11400 {
11401 case IEMMODE_16BIT:
11402 IEM_MC_BEGIN(0, 2);
11403 IEM_MC_LOCAL(uint16_t, u16Target);
11404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11407 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11408 IEM_MC_SET_RIP_U16(u16Target);
11409 IEM_MC_END()
11410 return VINF_SUCCESS;
11411
11412 case IEMMODE_32BIT:
11413 IEM_MC_BEGIN(0, 2);
11414 IEM_MC_LOCAL(uint32_t, u32Target);
11415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11418 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11419 IEM_MC_SET_RIP_U32(u32Target);
11420 IEM_MC_END()
11421 return VINF_SUCCESS;
11422
11423 case IEMMODE_64BIT:
11424 IEM_MC_BEGIN(0, 2);
11425 IEM_MC_LOCAL(uint64_t, u64Target);
11426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11429 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11430 IEM_MC_SET_RIP_U64(u64Target);
11431 IEM_MC_END()
11432 return VINF_SUCCESS;
11433
11434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11435 }
11436 }
11437}
11438
11439
11440/**
11441 * Opcode 0xff /5.
11442 * @param bRm The RM byte.
11443 */
11444FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11445{
11446 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11447 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11448}
11449
11450
11451/**
11452 * Opcode 0xff /6.
11453 * @param bRm The RM byte.
11454 */
11455FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11456{
11457 IEMOP_MNEMONIC(push_Ev, "push Ev");
11458
11459 /* Registers are handled by a common worker. */
11460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11461 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11462
11463 /* Memory we do here. */
11464 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11465 switch (pVCpu->iem.s.enmEffOpSize)
11466 {
11467 case IEMMODE_16BIT:
11468 IEM_MC_BEGIN(0, 2);
11469 IEM_MC_LOCAL(uint16_t, u16Src);
11470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11473 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11474 IEM_MC_PUSH_U16(u16Src);
11475 IEM_MC_ADVANCE_RIP();
11476 IEM_MC_END();
11477 return VINF_SUCCESS;
11478
11479 case IEMMODE_32BIT:
11480 IEM_MC_BEGIN(0, 2);
11481 IEM_MC_LOCAL(uint32_t, u32Src);
11482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11485 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11486 IEM_MC_PUSH_U32(u32Src);
11487 IEM_MC_ADVANCE_RIP();
11488 IEM_MC_END();
11489 return VINF_SUCCESS;
11490
11491 case IEMMODE_64BIT:
11492 IEM_MC_BEGIN(0, 2);
11493 IEM_MC_LOCAL(uint64_t, u64Src);
11494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11497 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11498 IEM_MC_PUSH_U64(u64Src);
11499 IEM_MC_ADVANCE_RIP();
11500 IEM_MC_END();
11501 return VINF_SUCCESS;
11502
11503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11504 }
11505}
11506
11507
11508/**
11509 * @opcode 0xff
11510 */
11511FNIEMOP_DEF(iemOp_Grp5)
11512{
11513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11514 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11515 {
11516 case 0:
11517 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11518 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11519 case 1:
11520 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11521 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11522 case 2:
11523 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11524 case 3:
11525 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11526 case 4:
11527 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11528 case 5:
11529 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11530 case 6:
11531 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11532 case 7:
11533 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11534 return IEMOP_RAISE_INVALID_OPCODE();
11535 }
11536 AssertFailedReturn(VERR_IEM_IPE_3);
11537}
11538
11539
11540
11541const PFNIEMOP g_apfnOneByteMap[256] =
11542{
11543 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11544 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11545 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11546 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11547 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11548 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11549 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11550 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11551 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11552 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11553 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11554 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11555 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11556 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11557 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11558 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11559 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11560 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11561 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11562 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11563 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11564 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11565 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11566 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11567 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11568 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11569 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11570 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11571 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11572 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11573 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11574 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11575 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11576 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11577 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11578 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11579 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11580 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11581 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11582 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11583 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11584 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11585 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11586 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11587 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11588 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11589 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11590 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11591 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11592 /* 0xc4 */ iemOp_les_Gv_Mp__vex2, iemOp_lds_Gv_Mp__vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11593 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11594 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11595 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11596 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11597 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11598 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11599 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11600 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11601 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11602 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11603 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11604 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11605 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11606 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11607};
11608
11609
11610/** @} */
11611
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette