VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 66138

Last change on this file since 66138 was 66138, checked in by vboxsync, 8 years ago

IEM: Implemented AAS.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 382.8 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 66138 2017-03-16 16:27:11Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/* Instruction group definitions: */
25
26/** @defgroup og_gen General
27 * @{ */
28 /** @defgroup og_gen_arith Arithmetic
29 * @{ */
30 /** @defgroup og_gen_arith_bin Binary numbers */
31 /** @defgroup og_gen_arith_dec Decimal numbers */
32 /** @} */
33/** @} */
34
35/** @defgroup og_stack Stack
36 * @{ */
37 /** @defgroup og_stack_sreg Segment registers */
38/** @} */
39
40/** @defgroup og_prefix Prefixes */
41/** @defgroup og_escapes Escape bytes */
42
43
44
45/** @name One byte opcodes.
46 * @{
47 */
48
49/* Instruction specification format - work in progress: */
50
51/**
52 * @opcode 0x00
53 * @opmnemonic add
54 * @op1 rm:Eb
55 * @op2 reg:Gb
56 * @opmaps one
57 * @openc ModR/M
58 * @opflmodify cf,pf,af,zf,sf,of
59 * @ophints harmless ignores_op_size
60 * @opstats add_Eb_Gb
61 * @opgroup og_gen_arith_bin
62 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
63 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
64 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
65 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
66 */
67FNIEMOP_DEF(iemOp_add_Eb_Gb)
68{
69 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
70 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
71}
72
73
74/**
75 * @opcode 0x01
76 * @opgroup og_gen_arith_bin
77 * @opflmodify cf,pf,af,zf,sf,of
78 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
79 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
80 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
81 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
82 */
83FNIEMOP_DEF(iemOp_add_Ev_Gv)
84{
85 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, 0);
86 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
87}
88
89
90/**
91 * @opcode 0x02
92 * @opgroup og_gen_arith_bin
93 * @opflmodify cf,pf,af,zf,sf,of
94 * @opcopytests iemOp_add_Eb_Gb
95 */
96FNIEMOP_DEF(iemOp_add_Gb_Eb)
97{
98 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
99 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
100}
101
102
103/**
104 * @opcode 0x03
105 * @opgroup og_gen_arith_bin
106 * @opflmodify cf,pf,af,zf,sf,of
107 * @opcopytests iemOp_add_Ev_Gv
108 */
109FNIEMOP_DEF(iemOp_add_Gv_Ev)
110{
111 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
112 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
113}
114
115
116/**
117 * @opcode 0x04
118 * @opgroup og_gen_arith_bin
119 * @opflmodify cf,pf,af,zf,sf,of
120 * @opcopytests iemOp_add_Eb_Gb
121 */
122FNIEMOP_DEF(iemOp_add_Al_Ib)
123{
124 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
125 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
126}
127
128
129/**
130 * @opcode 0x05
131 * @opgroup og_gen_arith_bin
132 * @opflmodify cf,pf,af,zf,sf,of
133 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
134 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
135 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
136 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
137 */
138FNIEMOP_DEF(iemOp_add_eAX_Iz)
139{
140 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
142}
143
144
145/**
146 * @opcode 0x06
147 * @opgroup og_stack_sreg
148 */
149FNIEMOP_DEF(iemOp_push_ES)
150{
151 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
152 IEMOP_HLP_NO_64BIT();
153 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
154}
155
156
157/**
158 * @opcode 0x07
159 * @opgroup og_stack_sreg
160 */
161FNIEMOP_DEF(iemOp_pop_ES)
162{
163 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
164 IEMOP_HLP_NO_64BIT();
165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
167}
168
169
170/**
171 * @opcode 0x08
172 * @opgroup og_gen_arith_bin
173 * @opflmodify cf,pf,af,zf,sf,of
174 * @opflundef af
175 * @opflclear of,cf
176 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
177 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
178 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
179 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
180 */
181FNIEMOP_DEF(iemOp_or_Eb_Gb)
182{
183 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
186}
187
188
189/*
190 * @opcode 0x09
191 * @opgroup og_gen_arith_bin
192 * @opflmodify cf,pf,af,zf,sf,of
193 * @opflundef af
194 * @opflclear of,cf
195 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
196 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
197 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
198 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
199 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
200 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
201 */
202FNIEMOP_DEF(iemOp_or_Ev_Gv)
203{
204 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, 0);
205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
206 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
207}
208
209
210/**
211 * @opcode 0x0a
212 * @opgroup og_gen_arith_bin
213 * @opflmodify cf,pf,af,zf,sf,of
214 * @opflundef af
215 * @opflclear of,cf
216 * @opcopytests iemOp_or_Eb_Gb
217 */
218FNIEMOP_DEF(iemOp_or_Gb_Eb)
219{
220 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
221 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
222 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
223}
224
225
226/**
227 * @opcode 0x0b
228 * @opgroup og_gen_arith_bin
229 * @opflmodify cf,pf,af,zf,sf,of
230 * @opflundef af
231 * @opflclear of,cf
232 * @opcopytests iemOp_or_Ev_Gv
233 */
234FNIEMOP_DEF(iemOp_or_Gv_Ev)
235{
236 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
238 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
239}
240
241
242/**
243 * @opcode 0x0c
244 * @opgroup og_gen_arith_bin
245 * @opflmodify cf,pf,af,zf,sf,of
246 * @opflundef af
247 * @opflclear of,cf
248 * @opcopytests iemOp_or_Eb_Gb
249 */
250FNIEMOP_DEF(iemOp_or_Al_Ib)
251{
252 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
255}
256
257
258/**
259 * @opcode 0x0d
260 * @opgroup og_gen_arith_bin
261 * @opflmodify cf,pf,af,zf,sf,of
262 * @opflundef af
263 * @opflclear of,cf
264 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
265 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
266 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
267 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
268 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
269 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
270 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
271 */
272FNIEMOP_DEF(iemOp_or_eAX_Iz)
273{
274 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
277}
278
279
280/**
281 * @opcode 0x0e
282 * @opgroup og_stack_sreg
283 */
284FNIEMOP_DEF(iemOp_push_CS)
285{
286 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
287 IEMOP_HLP_NO_64BIT();
288 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
289}
290
291
292/**
293 * @opcode 0x0f
294 * @opmnemonic EscTwo0f
295 * @openc two0f
296 * @opdisenum OP_2B_ESC
297 * @ophints harmless
298 * @opgroup og_escapes
299 */
300FNIEMOP_DEF(iemOp_2byteEscape)
301{
302#ifdef VBOX_STRICT
303 /* Sanity check the table the first time around. */
304 static bool s_fTested = false;
305 if (RT_LIKELY(s_fTested)) { /* likely */ }
306 else
307 {
308 s_fTested = true;
309 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
310 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
311 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
312 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
313 }
314#endif
315
316 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
317 {
318 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
319 IEMOP_HLP_MIN_286();
320 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
321 }
322 /* @opdone */
323
324 /*
325 * On the 8086 this is a POP CS instruction.
326 * For the time being we don't specify this this.
327 */
328 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
329 IEMOP_HLP_NO_64BIT();
330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
331 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
332}
333
334/**
335 * @opcode 0x10
336 * @opgroup og_gen_arith_bin
337 * @opfltest cf
338 * @opflmodify cf,pf,af,zf,sf,of
339 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
340 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
341 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
342 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
343 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
344 */
345FNIEMOP_DEF(iemOp_adc_Eb_Gb)
346{
347 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
349}
350
351
352/**
353 * @opcode 0x11
354 * @opgroup og_gen_arith_bin
355 * @opfltest cf
356 * @opflmodify cf,pf,af,zf,sf,of
357 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
358 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
359 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
360 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
361 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
362 */
363FNIEMOP_DEF(iemOp_adc_Ev_Gv)
364{
365 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, 0);
366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
367}
368
369
370/**
371 * @opcode 0x12
372 * @opgroup og_gen_arith_bin
373 * @opfltest cf
374 * @opflmodify cf,pf,af,zf,sf,of
375 * @opcopytests iemOp_adc_Eb_Gb
376 */
377FNIEMOP_DEF(iemOp_adc_Gb_Eb)
378{
379 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
381}
382
383
384/**
385 * @opcode 0x13
386 * @opgroup og_gen_arith_bin
387 * @opfltest cf
388 * @opflmodify cf,pf,af,zf,sf,of
389 * @opcopytests iemOp_adc_Ev_Gv
390 */
391FNIEMOP_DEF(iemOp_adc_Gv_Ev)
392{
393 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
394 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
395}
396
397
398/**
399 * @opcode 0x14
400 * @opgroup og_gen_arith_bin
401 * @opfltest cf
402 * @opflmodify cf,pf,af,zf,sf,of
403 * @opcopytests iemOp_adc_Eb_Gb
404 */
405FNIEMOP_DEF(iemOp_adc_Al_Ib)
406{
407 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
409}
410
411
412/**
413 * @opcode 0x15
414 * @opgroup og_gen_arith_bin
415 * @opfltest cf
416 * @opflmodify cf,pf,af,zf,sf,of
417 * @opcopytests iemOp_adc_Ev_Gv
418 */
419FNIEMOP_DEF(iemOp_adc_eAX_Iz)
420{
421 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
422 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
423}
424
425
426/**
427 * @opcode 0x16
428 */
429FNIEMOP_DEF(iemOp_push_SS)
430{
431 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
432 IEMOP_HLP_NO_64BIT();
433 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
434}
435
436
437/**
438 * @opcode 0x17
439 * @opgroup og_gen_arith_bin
440 * @opfltest cf
441 * @opflmodify cf,pf,af,zf,sf,of
442 */
443FNIEMOP_DEF(iemOp_pop_SS)
444{
445 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
447 IEMOP_HLP_NO_64BIT();
448 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
449}
450
451
452/**
453 * @opcode 0x18
454 * @opgroup og_gen_arith_bin
455 * @opfltest cf
456 * @opflmodify cf,pf,af,zf,sf,of
457 */
458FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
459{
460 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
461 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
462}
463
464
465/**
466 * @opcode 0x19
467 * @opgroup og_gen_arith_bin
468 * @opfltest cf
469 * @opflmodify cf,pf,af,zf,sf,of
470 */
471FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
472{
473 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, 0);
474 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
475}
476
477
478/**
479 * @opcode 0x1a
480 * @opgroup og_gen_arith_bin
481 * @opfltest cf
482 * @opflmodify cf,pf,af,zf,sf,of
483 */
484FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
485{
486 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
488}
489
490
491/**
492 * @opcode 0x1b
493 * @opgroup og_gen_arith_bin
494 * @opfltest cf
495 * @opflmodify cf,pf,af,zf,sf,of
496 */
497FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
498{
499 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
501}
502
503
504/**
505 * @opcode 0x1c
506 * @opgroup og_gen_arith_bin
507 * @opfltest cf
508 * @opflmodify cf,pf,af,zf,sf,of
509 */
510FNIEMOP_DEF(iemOp_sbb_Al_Ib)
511{
512 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
514}
515
516
517/**
518 * @opcode 0x1d
519 * @opgroup og_gen_arith_bin
520 * @opfltest cf
521 * @opflmodify cf,pf,af,zf,sf,of
522 */
523FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
524{
525 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
526 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
527}
528
529
530/**
531 * @opcode 0x1e
532 * @opgroup og_stack_sreg
533 */
534FNIEMOP_DEF(iemOp_push_DS)
535{
536 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
537 IEMOP_HLP_NO_64BIT();
538 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
539}
540
541
542/**
543 * @opcode 0x1f
544 * @opgroup og_stack_sreg
545 */
546FNIEMOP_DEF(iemOp_pop_DS)
547{
548 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
550 IEMOP_HLP_NO_64BIT();
551 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
552}
553
554
555/**
556 * @opcode 0x20
557 * @opgroup og_gen_arith_bin
558 * @opflmodify cf,pf,af,zf,sf,of
559 * @opflundef af
560 * @opflclear of,cf
561 */
562FNIEMOP_DEF(iemOp_and_Eb_Gb)
563{
564 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
567}
568
569
570/**
571 * @opcode 0x21
572 * @opgroup og_gen_arith_bin
573 * @opflmodify cf,pf,af,zf,sf,of
574 * @opflundef af
575 * @opflclear of,cf
576 */
577FNIEMOP_DEF(iemOp_and_Ev_Gv)
578{
579 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, 0);
580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
582}
583
584
585/**
586 * @opcode 0x22
587 * @opgroup og_gen_arith_bin
588 * @opflmodify cf,pf,af,zf,sf,of
589 * @opflundef af
590 * @opflclear of,cf
591 */
592FNIEMOP_DEF(iemOp_and_Gb_Eb)
593{
594 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
596 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
597}
598
599
600/**
601 * @opcode 0x23
602 * @opgroup og_gen_arith_bin
603 * @opflmodify cf,pf,af,zf,sf,of
604 * @opflundef af
605 * @opflclear of,cf
606 */
607FNIEMOP_DEF(iemOp_and_Gv_Ev)
608{
609 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
611 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
612}
613
614
615/**
616 * @opcode 0x24
617 * @opgroup og_gen_arith_bin
618 * @opflmodify cf,pf,af,zf,sf,of
619 * @opflundef af
620 * @opflclear of,cf
621 */
622FNIEMOP_DEF(iemOp_and_Al_Ib)
623{
624 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
626 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
627}
628
629
630/**
631 * @opcode 0x25
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 */
637FNIEMOP_DEF(iemOp_and_eAX_Iz)
638{
639 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
641 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
642}
643
644
645/**
646 * @opcode 0x26
647 * @opmnemonic SEG
648 * @op1 ES
649 * @opgroup og_prefix
650 * @openc prefix
651 * @opdisenum OP_SEG
652 * @ophints harmless
653 */
654FNIEMOP_DEF(iemOp_seg_ES)
655{
656 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
657 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
658 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
659
660 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
661 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
662}
663
664
665/**
666 * @opcode 0x27
667 * @opfltest af,cf
668 * @opflmodify cf,pf,af,zf,sf,of
669 * @opflundef of
670 */
671FNIEMOP_DEF(iemOp_daa)
672{
673 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
674 IEMOP_HLP_NO_64BIT();
675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
678}
679
680
681/**
682 * @opcode 0x28
683 * @opgroup og_gen_arith_bin
684 * @opflmodify cf,pf,af,zf,sf,of
685 */
686FNIEMOP_DEF(iemOp_sub_Eb_Gb)
687{
688 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
690}
691
692
693/**
694 * @opcode 0x29
695 * @opgroup og_gen_arith_bin
696 * @opflmodify cf,pf,af,zf,sf,of
697 */
698FNIEMOP_DEF(iemOp_sub_Ev_Gv)
699{
700 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, 0);
701 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
702}
703
704
705/**
706 * @opcode 0x2a
707 * @opgroup og_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 */
710FNIEMOP_DEF(iemOp_sub_Gb_Eb)
711{
712 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
713 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
714}
715
716
717/**
718 * @opcode 0x2b
719 * @opgroup og_gen_arith_bin
720 * @opflmodify cf,pf,af,zf,sf,of
721 */
722FNIEMOP_DEF(iemOp_sub_Gv_Ev)
723{
724 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
726}
727
728
729/**
730 * @opcode 0x2c
731 * @opgroup og_gen_arith_bin
732 * @opflmodify cf,pf,af,zf,sf,of
733 */
734FNIEMOP_DEF(iemOp_sub_Al_Ib)
735{
736 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
738}
739
740
741/**
742 * @opcode 0x2d
743 * @opgroup og_gen_arith_bin
744 * @opflmodify cf,pf,af,zf,sf,of
745 */
746FNIEMOP_DEF(iemOp_sub_eAX_Iz)
747{
748 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
749 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
750}
751
752
753/**
754 * @opcode 0x2e
755 * @opmnemonic SEG
756 * @op1 CS
757 * @opgroup og_prefix
758 * @openc prefix
759 * @opdisenum OP_SEG
760 * @ophints harmless
761 */
762FNIEMOP_DEF(iemOp_seg_CS)
763{
764 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
765 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
766 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
767
768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
770}
771
772
773/**
774 * @opcode 0x2f
775 * @opfltest af,cf
776 * @opflmodify cf,pf,af,zf,sf,of
777 * @opflundef of
778 */
779FNIEMOP_DEF(iemOp_das)
780{
781 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
782 IEMOP_HLP_NO_64BIT();
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
784 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
785 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
786}
787
788
789/**
790 * @opcode 0x30
791 * @opgroup og_gen_arith_bin
792 * @opflmodify cf,pf,af,zf,sf,of
793 * @opflundef af
794 * @opflclear of,cf
795 */
796FNIEMOP_DEF(iemOp_xor_Eb_Gb)
797{
798 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
801}
802
803
804/**
805 * @opcode 0x31
806 * @opgroup og_gen_arith_bin
807 * @opflmodify cf,pf,af,zf,sf,of
808 * @opflundef af
809 * @opflclear of,cf
810 */
811FNIEMOP_DEF(iemOp_xor_Ev_Gv)
812{
813 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, 0);
814 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
815 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
816}
817
818
819/**
820 * @opcode 0x32
821 * @opgroup og_gen_arith_bin
822 * @opflmodify cf,pf,af,zf,sf,of
823 * @opflundef af
824 * @opflclear of,cf
825 */
826FNIEMOP_DEF(iemOp_xor_Gb_Eb)
827{
828 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
830 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
831}
832
833
834/**
835 * @opcode 0x33
836 * @opgroup og_gen_arith_bin
837 * @opflmodify cf,pf,af,zf,sf,of
838 * @opflundef af
839 * @opflclear of,cf
840 */
841FNIEMOP_DEF(iemOp_xor_Gv_Ev)
842{
843 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
844 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
845 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
846}
847
848
849/**
850 * @opcode 0x34
851 * @opgroup og_gen_arith_bin
852 * @opflmodify cf,pf,af,zf,sf,of
853 * @opflundef af
854 * @opflclear of,cf
855 */
856FNIEMOP_DEF(iemOp_xor_Al_Ib)
857{
858 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
861}
862
863
864/**
865 * @opcode 0x35
866 * @opgroup og_gen_arith_bin
867 * @opflmodify cf,pf,af,zf,sf,of
868 * @opflundef af
869 * @opflclear of,cf
870 */
871FNIEMOP_DEF(iemOp_xor_eAX_Iz)
872{
873 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
876}
877
878
879/**
880 * @opcode 0x36
881 * @opmnemonic SEG
882 * @op1 SS
883 * @opgroup og_prefix
884 * @openc prefix
885 * @opdisenum OP_SEG
886 * @ophints harmless
887 */
888FNIEMOP_DEF(iemOp_seg_SS)
889{
890 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
892 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
893
894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
896}
897
898
899/**
900 * @opcode 0x37
901 * @opfltest af,cf
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef pf,zf,sf,of
904 * @opgroup og_gen_arith_dec
905 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
906 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
907 * @optest efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
908 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
909 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
910 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
911 * @optest efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
912 * @optest efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
913 * @optest efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
914 * @optest efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
915 * @optest efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
916 * @optest efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
917 * @optest efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
918 * @optest efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
919 * @optest efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
920 * @optest efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
921 * @optest efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
922 * @optest efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
923 */
924FNIEMOP_DEF(iemOp_aaa)
925{
926 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
927 IEMOP_HLP_NO_64BIT();
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
929 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
930
931 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
932}
933
934
935/**
936 * @opcode 0x38
937 */
938FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
939{
940 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
941 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
942}
943
944
945/**
946 * @opcode 0x39
947 */
948FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
949{
950 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
951 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
952}
953
954
955/**
956 * @opcode 0x3a
957 */
958FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
959{
960 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
961 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
962}
963
964
965/**
966 * @opcode 0x3b
967 */
968FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
969{
970 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
971 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
972}
973
974
975/**
976 * @opcode 0x3c
977 */
978FNIEMOP_DEF(iemOp_cmp_Al_Ib)
979{
980 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
981 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
982}
983
984
985/**
986 * @opcode 0x3d
987 */
988FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
989{
990 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
991 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
992}
993
994
995/**
996 * @opcode 0x3e
997 */
998FNIEMOP_DEF(iemOp_seg_DS)
999{
1000 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1001 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1002 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1003
1004 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1005 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1006}
1007
1008
1009/**
1010 * @opcode 0x3f
1011 * @opfltest af,cf
1012 * @opflmodify cf,pf,af,zf,sf,of
1013 * @opflundef pf,zf,sf,of
1014 * @opgroup og_gen_arith_dec
1015 * @optest efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1016 * @optest efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1017 * @optest efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1018 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1019 * @optest efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1020 * @optest efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1021 * @optest efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1022 * @optest efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1023 * @optest efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1024 * @optest efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1025 * @optest efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1026 * @optest efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1027 * @optest efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1028 * @optest efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1029 * @optest efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1030 * @optest efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1031 * @optest efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1032 * @optest efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1033 * @optest efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1034 */
1035FNIEMOP_DEF(iemOp_aas)
1036{
1037 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1038 IEMOP_HLP_NO_64BIT();
1039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1040 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1041
1042 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1043}
1044
1045
1046/**
1047 * Common 'inc/dec/not/neg register' helper.
1048 */
1049FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1050{
1051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1052 switch (pVCpu->iem.s.enmEffOpSize)
1053 {
1054 case IEMMODE_16BIT:
1055 IEM_MC_BEGIN(2, 0);
1056 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1057 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1058 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1059 IEM_MC_REF_EFLAGS(pEFlags);
1060 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1061 IEM_MC_ADVANCE_RIP();
1062 IEM_MC_END();
1063 return VINF_SUCCESS;
1064
1065 case IEMMODE_32BIT:
1066 IEM_MC_BEGIN(2, 0);
1067 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1068 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1069 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1070 IEM_MC_REF_EFLAGS(pEFlags);
1071 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1072 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1073 IEM_MC_ADVANCE_RIP();
1074 IEM_MC_END();
1075 return VINF_SUCCESS;
1076
1077 case IEMMODE_64BIT:
1078 IEM_MC_BEGIN(2, 0);
1079 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1080 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1081 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1082 IEM_MC_REF_EFLAGS(pEFlags);
1083 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1084 IEM_MC_ADVANCE_RIP();
1085 IEM_MC_END();
1086 return VINF_SUCCESS;
1087 }
1088 return VINF_SUCCESS;
1089}
1090
1091
1092/**
1093 * @opcode 0x40
1094 */
1095FNIEMOP_DEF(iemOp_inc_eAX)
1096{
1097 /*
1098 * This is a REX prefix in 64-bit mode.
1099 */
1100 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1101 {
1102 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1103 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1104
1105 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1106 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1107 }
1108
1109 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1110 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1111}
1112
1113
1114/**
1115 * @opcode 0x41
1116 */
1117FNIEMOP_DEF(iemOp_inc_eCX)
1118{
1119 /*
1120 * This is a REX prefix in 64-bit mode.
1121 */
1122 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1123 {
1124 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1125 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1126 pVCpu->iem.s.uRexB = 1 << 3;
1127
1128 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1129 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1130 }
1131
1132 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1133 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1134}
1135
1136
1137/**
1138 * @opcode 0x42
1139 */
1140FNIEMOP_DEF(iemOp_inc_eDX)
1141{
1142 /*
1143 * This is a REX prefix in 64-bit mode.
1144 */
1145 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1146 {
1147 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1148 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1149 pVCpu->iem.s.uRexIndex = 1 << 3;
1150
1151 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1152 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1153 }
1154
1155 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1156 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1157}
1158
1159
1160
1161/**
1162 * @opcode 0x43
1163 */
1164FNIEMOP_DEF(iemOp_inc_eBX)
1165{
1166 /*
1167 * This is a REX prefix in 64-bit mode.
1168 */
1169 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1170 {
1171 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1172 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1173 pVCpu->iem.s.uRexB = 1 << 3;
1174 pVCpu->iem.s.uRexIndex = 1 << 3;
1175
1176 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1177 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1178 }
1179
1180 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1181 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1182}
1183
1184
1185/**
1186 * @opcode 0x44
1187 */
1188FNIEMOP_DEF(iemOp_inc_eSP)
1189{
1190 /*
1191 * This is a REX prefix in 64-bit mode.
1192 */
1193 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1194 {
1195 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1196 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1197 pVCpu->iem.s.uRexReg = 1 << 3;
1198
1199 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1200 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1201 }
1202
1203 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1204 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1205}
1206
1207
1208/**
1209 * @opcode 0x45
1210 */
1211FNIEMOP_DEF(iemOp_inc_eBP)
1212{
1213 /*
1214 * This is a REX prefix in 64-bit mode.
1215 */
1216 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1217 {
1218 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1219 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1220 pVCpu->iem.s.uRexReg = 1 << 3;
1221 pVCpu->iem.s.uRexB = 1 << 3;
1222
1223 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1224 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1225 }
1226
1227 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1228 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1229}
1230
1231
1232/**
1233 * @opcode 0x46
1234 */
1235FNIEMOP_DEF(iemOp_inc_eSI)
1236{
1237 /*
1238 * This is a REX prefix in 64-bit mode.
1239 */
1240 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1241 {
1242 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1243 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1244 pVCpu->iem.s.uRexReg = 1 << 3;
1245 pVCpu->iem.s.uRexIndex = 1 << 3;
1246
1247 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1248 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1249 }
1250
1251 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1252 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1253}
1254
1255
1256/**
1257 * @opcode 0x47
1258 */
1259FNIEMOP_DEF(iemOp_inc_eDI)
1260{
1261 /*
1262 * This is a REX prefix in 64-bit mode.
1263 */
1264 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1265 {
1266 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1267 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1268 pVCpu->iem.s.uRexReg = 1 << 3;
1269 pVCpu->iem.s.uRexB = 1 << 3;
1270 pVCpu->iem.s.uRexIndex = 1 << 3;
1271
1272 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1273 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1274 }
1275
1276 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1277 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1278}
1279
1280
1281/**
1282 * @opcode 0x48
1283 */
1284FNIEMOP_DEF(iemOp_dec_eAX)
1285{
1286 /*
1287 * This is a REX prefix in 64-bit mode.
1288 */
1289 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1290 {
1291 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1292 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1293 iemRecalEffOpSize(pVCpu);
1294
1295 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1296 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1297 }
1298
1299 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1300 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1301}
1302
1303
1304/**
1305 * @opcode 0x49
1306 */
1307FNIEMOP_DEF(iemOp_dec_eCX)
1308{
1309 /*
1310 * This is a REX prefix in 64-bit mode.
1311 */
1312 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1313 {
1314 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1315 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1316 pVCpu->iem.s.uRexB = 1 << 3;
1317 iemRecalEffOpSize(pVCpu);
1318
1319 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1320 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1321 }
1322
1323 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1324 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1325}
1326
1327
1328/**
1329 * @opcode 0x4a
1330 */
1331FNIEMOP_DEF(iemOp_dec_eDX)
1332{
1333 /*
1334 * This is a REX prefix in 64-bit mode.
1335 */
1336 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1337 {
1338 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1339 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1340 pVCpu->iem.s.uRexIndex = 1 << 3;
1341 iemRecalEffOpSize(pVCpu);
1342
1343 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1344 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1345 }
1346
1347 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1348 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1349}
1350
1351
1352/**
1353 * @opcode 0x4b
1354 */
1355FNIEMOP_DEF(iemOp_dec_eBX)
1356{
1357 /*
1358 * This is a REX prefix in 64-bit mode.
1359 */
1360 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1361 {
1362 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1363 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1364 pVCpu->iem.s.uRexB = 1 << 3;
1365 pVCpu->iem.s.uRexIndex = 1 << 3;
1366 iemRecalEffOpSize(pVCpu);
1367
1368 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1369 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1370 }
1371
1372 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1373 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1374}
1375
1376
1377/**
1378 * @opcode 0x4c
1379 */
1380FNIEMOP_DEF(iemOp_dec_eSP)
1381{
1382 /*
1383 * This is a REX prefix in 64-bit mode.
1384 */
1385 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1386 {
1387 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1388 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1389 pVCpu->iem.s.uRexReg = 1 << 3;
1390 iemRecalEffOpSize(pVCpu);
1391
1392 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1393 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1394 }
1395
1396 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1397 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1398}
1399
1400
1401/**
1402 * @opcode 0x4d
1403 */
1404FNIEMOP_DEF(iemOp_dec_eBP)
1405{
1406 /*
1407 * This is a REX prefix in 64-bit mode.
1408 */
1409 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1410 {
1411 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1412 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1413 pVCpu->iem.s.uRexReg = 1 << 3;
1414 pVCpu->iem.s.uRexB = 1 << 3;
1415 iemRecalEffOpSize(pVCpu);
1416
1417 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1418 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1419 }
1420
1421 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1422 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1423}
1424
1425
1426/**
1427 * @opcode 0x4e
1428 */
1429FNIEMOP_DEF(iemOp_dec_eSI)
1430{
1431 /*
1432 * This is a REX prefix in 64-bit mode.
1433 */
1434 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1435 {
1436 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1437 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1438 pVCpu->iem.s.uRexReg = 1 << 3;
1439 pVCpu->iem.s.uRexIndex = 1 << 3;
1440 iemRecalEffOpSize(pVCpu);
1441
1442 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1443 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1444 }
1445
1446 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1447 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1448}
1449
1450
1451/**
1452 * @opcode 0x4f
1453 */
1454FNIEMOP_DEF(iemOp_dec_eDI)
1455{
1456 /*
1457 * This is a REX prefix in 64-bit mode.
1458 */
1459 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1460 {
1461 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1462 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1463 pVCpu->iem.s.uRexReg = 1 << 3;
1464 pVCpu->iem.s.uRexB = 1 << 3;
1465 pVCpu->iem.s.uRexIndex = 1 << 3;
1466 iemRecalEffOpSize(pVCpu);
1467
1468 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1469 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1470 }
1471
1472 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1473 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1474}
1475
1476
1477/**
1478 * Common 'push register' helper.
1479 */
1480FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1481{
1482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1483 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1484 {
1485 iReg |= pVCpu->iem.s.uRexB;
1486 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1487 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1488 }
1489
1490 switch (pVCpu->iem.s.enmEffOpSize)
1491 {
1492 case IEMMODE_16BIT:
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(uint16_t, u16Value);
1495 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1496 IEM_MC_PUSH_U16(u16Value);
1497 IEM_MC_ADVANCE_RIP();
1498 IEM_MC_END();
1499 break;
1500
1501 case IEMMODE_32BIT:
1502 IEM_MC_BEGIN(0, 1);
1503 IEM_MC_LOCAL(uint32_t, u32Value);
1504 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1505 IEM_MC_PUSH_U32(u32Value);
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 break;
1509
1510 case IEMMODE_64BIT:
1511 IEM_MC_BEGIN(0, 1);
1512 IEM_MC_LOCAL(uint64_t, u64Value);
1513 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1514 IEM_MC_PUSH_U64(u64Value);
1515 IEM_MC_ADVANCE_RIP();
1516 IEM_MC_END();
1517 break;
1518 }
1519
1520 return VINF_SUCCESS;
1521}
1522
1523
1524/**
1525 * @opcode 0x50
1526 */
1527FNIEMOP_DEF(iemOp_push_eAX)
1528{
1529 IEMOP_MNEMONIC(push_rAX, "push rAX");
1530 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1531}
1532
1533
1534/**
1535 * @opcode 0x51
1536 */
1537FNIEMOP_DEF(iemOp_push_eCX)
1538{
1539 IEMOP_MNEMONIC(push_rCX, "push rCX");
1540 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1541}
1542
1543
1544/**
1545 * @opcode 0x52
1546 */
1547FNIEMOP_DEF(iemOp_push_eDX)
1548{
1549 IEMOP_MNEMONIC(push_rDX, "push rDX");
1550 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1551}
1552
1553
1554/**
1555 * @opcode 0x53
1556 */
1557FNIEMOP_DEF(iemOp_push_eBX)
1558{
1559 IEMOP_MNEMONIC(push_rBX, "push rBX");
1560 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1561}
1562
1563
1564/**
1565 * @opcode 0x54
1566 */
1567FNIEMOP_DEF(iemOp_push_eSP)
1568{
1569 IEMOP_MNEMONIC(push_rSP, "push rSP");
1570 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1571 {
1572 IEM_MC_BEGIN(0, 1);
1573 IEM_MC_LOCAL(uint16_t, u16Value);
1574 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1575 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1576 IEM_MC_PUSH_U16(u16Value);
1577 IEM_MC_ADVANCE_RIP();
1578 IEM_MC_END();
1579 }
1580 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1581}
1582
1583
1584/**
1585 * @opcode 0x55
1586 */
1587FNIEMOP_DEF(iemOp_push_eBP)
1588{
1589 IEMOP_MNEMONIC(push_rBP, "push rBP");
1590 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1591}
1592
1593
1594/**
1595 * @opcode 0x56
1596 */
1597FNIEMOP_DEF(iemOp_push_eSI)
1598{
1599 IEMOP_MNEMONIC(push_rSI, "push rSI");
1600 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1601}
1602
1603
1604/**
1605 * @opcode 0x57
1606 */
1607FNIEMOP_DEF(iemOp_push_eDI)
1608{
1609 IEMOP_MNEMONIC(push_rDI, "push rDI");
1610 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1611}
1612
1613
1614/**
1615 * Common 'pop register' helper.
1616 */
1617FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1618{
1619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1620 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1621 {
1622 iReg |= pVCpu->iem.s.uRexB;
1623 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1624 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1625 }
1626
1627 switch (pVCpu->iem.s.enmEffOpSize)
1628 {
1629 case IEMMODE_16BIT:
1630 IEM_MC_BEGIN(0, 1);
1631 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1632 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1633 IEM_MC_POP_U16(pu16Dst);
1634 IEM_MC_ADVANCE_RIP();
1635 IEM_MC_END();
1636 break;
1637
1638 case IEMMODE_32BIT:
1639 IEM_MC_BEGIN(0, 1);
1640 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1641 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1642 IEM_MC_POP_U32(pu32Dst);
1643 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1644 IEM_MC_ADVANCE_RIP();
1645 IEM_MC_END();
1646 break;
1647
1648 case IEMMODE_64BIT:
1649 IEM_MC_BEGIN(0, 1);
1650 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1651 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1652 IEM_MC_POP_U64(pu64Dst);
1653 IEM_MC_ADVANCE_RIP();
1654 IEM_MC_END();
1655 break;
1656 }
1657
1658 return VINF_SUCCESS;
1659}
1660
1661
1662/**
1663 * @opcode 0x58
1664 */
1665FNIEMOP_DEF(iemOp_pop_eAX)
1666{
1667 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1668 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1669}
1670
1671
1672/**
1673 * @opcode 0x59
1674 */
1675FNIEMOP_DEF(iemOp_pop_eCX)
1676{
1677 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1678 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1679}
1680
1681
1682/**
1683 * @opcode 0x5a
1684 */
1685FNIEMOP_DEF(iemOp_pop_eDX)
1686{
1687 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1688 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1689}
1690
1691
1692/**
1693 * @opcode 0x5b
1694 */
1695FNIEMOP_DEF(iemOp_pop_eBX)
1696{
1697 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1698 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1699}
1700
1701
1702/**
1703 * @opcode 0x5c
1704 */
1705FNIEMOP_DEF(iemOp_pop_eSP)
1706{
1707 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1708 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1709 {
1710 if (pVCpu->iem.s.uRexB)
1711 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1712 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1713 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1714 }
1715
1716 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1717 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1718 /** @todo add testcase for this instruction. */
1719 switch (pVCpu->iem.s.enmEffOpSize)
1720 {
1721 case IEMMODE_16BIT:
1722 IEM_MC_BEGIN(0, 1);
1723 IEM_MC_LOCAL(uint16_t, u16Dst);
1724 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1725 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1726 IEM_MC_ADVANCE_RIP();
1727 IEM_MC_END();
1728 break;
1729
1730 case IEMMODE_32BIT:
1731 IEM_MC_BEGIN(0, 1);
1732 IEM_MC_LOCAL(uint32_t, u32Dst);
1733 IEM_MC_POP_U32(&u32Dst);
1734 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1735 IEM_MC_ADVANCE_RIP();
1736 IEM_MC_END();
1737 break;
1738
1739 case IEMMODE_64BIT:
1740 IEM_MC_BEGIN(0, 1);
1741 IEM_MC_LOCAL(uint64_t, u64Dst);
1742 IEM_MC_POP_U64(&u64Dst);
1743 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1744 IEM_MC_ADVANCE_RIP();
1745 IEM_MC_END();
1746 break;
1747 }
1748
1749 return VINF_SUCCESS;
1750}
1751
1752
1753/**
1754 * @opcode 0x5d
1755 */
1756FNIEMOP_DEF(iemOp_pop_eBP)
1757{
1758 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1759 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1760}
1761
1762
1763/**
1764 * @opcode 0x5e
1765 */
1766FNIEMOP_DEF(iemOp_pop_eSI)
1767{
1768 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1769 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1770}
1771
1772
1773/**
1774 * @opcode 0x5f
1775 */
1776FNIEMOP_DEF(iemOp_pop_eDI)
1777{
1778 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1779 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1780}
1781
1782
1783/**
1784 * @opcode 0x60
1785 */
1786FNIEMOP_DEF(iemOp_pusha)
1787{
1788 IEMOP_MNEMONIC(pusha, "pusha");
1789 IEMOP_HLP_MIN_186();
1790 IEMOP_HLP_NO_64BIT();
1791 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1792 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1793 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1794 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1795}
1796
1797
1798/**
1799 * @opcode 0x61
1800 */
1801FNIEMOP_DEF(iemOp_popa__mvex)
1802{
1803 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1804 {
1805 IEMOP_MNEMONIC(popa, "popa");
1806 IEMOP_HLP_MIN_186();
1807 IEMOP_HLP_NO_64BIT();
1808 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1809 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1810 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1812 }
1813 IEMOP_MNEMONIC(mvex, "mvex");
1814 Log(("mvex prefix is not supported!\n"));
1815 return IEMOP_RAISE_INVALID_OPCODE();
1816}
1817
1818
1819/**
1820 * @opcode 0x62
1821 * @opmnemonic bound
1822 * @op1 Gv
1823 * @op2 Ma
1824 * @opmincpu 80186
1825 * @ophints harmless invalid_64
1826 */
1827FNIEMOP_STUB(iemOp_bound_Gv_Ma__evex);
1828// IEMOP_HLP_MIN_186();
1829
1830
1831/** Opcode 0x63 - non-64-bit modes. */
1832FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1833{
1834 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1835 IEMOP_HLP_MIN_286();
1836 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1838
1839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1840 {
1841 /* Register */
1842 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1843 IEM_MC_BEGIN(3, 0);
1844 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1845 IEM_MC_ARG(uint16_t, u16Src, 1);
1846 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1847
1848 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1849 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1850 IEM_MC_REF_EFLAGS(pEFlags);
1851 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1852
1853 IEM_MC_ADVANCE_RIP();
1854 IEM_MC_END();
1855 }
1856 else
1857 {
1858 /* Memory */
1859 IEM_MC_BEGIN(3, 2);
1860 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1861 IEM_MC_ARG(uint16_t, u16Src, 1);
1862 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1864
1865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1866 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1867 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1868 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1869 IEM_MC_FETCH_EFLAGS(EFlags);
1870 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1871
1872 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
1873 IEM_MC_COMMIT_EFLAGS(EFlags);
1874 IEM_MC_ADVANCE_RIP();
1875 IEM_MC_END();
1876 }
1877 return VINF_SUCCESS;
1878
1879}
1880
1881
1882/**
1883 * @opcode 0x63
1884 *
1885 * @note This is a weird one. It works like a regular move instruction if
1886 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
1887 * @todo This definitely needs a testcase to verify the odd cases. */
1888FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
1889{
1890 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
1891
1892 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
1893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1894
1895 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1896 {
1897 /*
1898 * Register to register.
1899 */
1900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1901 IEM_MC_BEGIN(0, 1);
1902 IEM_MC_LOCAL(uint64_t, u64Value);
1903 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1904 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 }
1908 else
1909 {
1910 /*
1911 * We're loading a register from memory.
1912 */
1913 IEM_MC_BEGIN(0, 2);
1914 IEM_MC_LOCAL(uint64_t, u64Value);
1915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1918 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1919 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1920 IEM_MC_ADVANCE_RIP();
1921 IEM_MC_END();
1922 }
1923 return VINF_SUCCESS;
1924}
1925
1926
1927/**
1928 * @opcode 0x64
1929 * @opmnemonic segfs
1930 * @opmincpu 80386
1931 * @opgroup og_prefixes
1932 */
1933FNIEMOP_DEF(iemOp_seg_FS)
1934{
1935 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
1936 IEMOP_HLP_MIN_386();
1937
1938 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
1939 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
1940
1941 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1942 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1943}
1944
1945
1946/**
1947 * @opcode 0x65
1948 * @opmnemonic seggs
1949 * @opmincpu 80386
1950 * @opgroup og_prefixes
1951 */
1952FNIEMOP_DEF(iemOp_seg_GS)
1953{
1954 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
1955 IEMOP_HLP_MIN_386();
1956
1957 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
1958 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
1959
1960 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1961 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1962}
1963
1964
1965/**
1966 * @opcode 0x66
1967 * @opmnemonic opsize
1968 * @openc prefix
1969 * @opmincpu 80386
1970 * @ophints harmless
1971 * @opgroup og_prefixes
1972 */
1973FNIEMOP_DEF(iemOp_op_size)
1974{
1975 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
1976 IEMOP_HLP_MIN_386();
1977
1978 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
1979 iemRecalEffOpSize(pVCpu);
1980
1981 /* For the 4 entry opcode tables, the operand prefix doesn't not count
1982 when REPZ or REPNZ are present. */
1983 if (pVCpu->iem.s.idxPrefix == 0)
1984 pVCpu->iem.s.idxPrefix = 1;
1985
1986 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1987 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1988}
1989
1990
1991/**
1992 * @opcode 0x67
1993 * @opmnemonic addrsize
1994 * @openc prefix
1995 * @opmincpu 80386
1996 * @ophints harmless
1997 * @opgroup og_prefixes
1998 */
1999FNIEMOP_DEF(iemOp_addr_size)
2000{
2001 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2002 IEMOP_HLP_MIN_386();
2003
2004 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2005 switch (pVCpu->iem.s.enmDefAddrMode)
2006 {
2007 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2008 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2009 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2010 default: AssertFailed();
2011 }
2012
2013 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2014 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2015}
2016
2017
2018/**
2019 * @opcode 0x68
2020 */
2021FNIEMOP_DEF(iemOp_push_Iz)
2022{
2023 IEMOP_MNEMONIC(push_Iz, "push Iz");
2024 IEMOP_HLP_MIN_186();
2025 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2026 switch (pVCpu->iem.s.enmEffOpSize)
2027 {
2028 case IEMMODE_16BIT:
2029 {
2030 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2032 IEM_MC_BEGIN(0,0);
2033 IEM_MC_PUSH_U16(u16Imm);
2034 IEM_MC_ADVANCE_RIP();
2035 IEM_MC_END();
2036 return VINF_SUCCESS;
2037 }
2038
2039 case IEMMODE_32BIT:
2040 {
2041 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2043 IEM_MC_BEGIN(0,0);
2044 IEM_MC_PUSH_U32(u32Imm);
2045 IEM_MC_ADVANCE_RIP();
2046 IEM_MC_END();
2047 return VINF_SUCCESS;
2048 }
2049
2050 case IEMMODE_64BIT:
2051 {
2052 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2054 IEM_MC_BEGIN(0,0);
2055 IEM_MC_PUSH_U64(u64Imm);
2056 IEM_MC_ADVANCE_RIP();
2057 IEM_MC_END();
2058 return VINF_SUCCESS;
2059 }
2060
2061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2062 }
2063}
2064
2065
2066/**
2067 * @opcode 0x69
2068 */
2069FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2070{
2071 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2072 IEMOP_HLP_MIN_186();
2073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2074 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2075
2076 switch (pVCpu->iem.s.enmEffOpSize)
2077 {
2078 case IEMMODE_16BIT:
2079 {
2080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2081 {
2082 /* register operand */
2083 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2085
2086 IEM_MC_BEGIN(3, 1);
2087 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2088 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2090 IEM_MC_LOCAL(uint16_t, u16Tmp);
2091
2092 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2093 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2094 IEM_MC_REF_EFLAGS(pEFlags);
2095 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2096 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2097
2098 IEM_MC_ADVANCE_RIP();
2099 IEM_MC_END();
2100 }
2101 else
2102 {
2103 /* memory operand */
2104 IEM_MC_BEGIN(3, 2);
2105 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2106 IEM_MC_ARG(uint16_t, u16Src, 1);
2107 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2108 IEM_MC_LOCAL(uint16_t, u16Tmp);
2109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2110
2111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2112 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2113 IEM_MC_ASSIGN(u16Src, u16Imm);
2114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2115 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2116 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2117 IEM_MC_REF_EFLAGS(pEFlags);
2118 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2119 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2120
2121 IEM_MC_ADVANCE_RIP();
2122 IEM_MC_END();
2123 }
2124 return VINF_SUCCESS;
2125 }
2126
2127 case IEMMODE_32BIT:
2128 {
2129 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2130 {
2131 /* register operand */
2132 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2134
2135 IEM_MC_BEGIN(3, 1);
2136 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2137 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2138 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2139 IEM_MC_LOCAL(uint32_t, u32Tmp);
2140
2141 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2142 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2143 IEM_MC_REF_EFLAGS(pEFlags);
2144 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2145 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2146
2147 IEM_MC_ADVANCE_RIP();
2148 IEM_MC_END();
2149 }
2150 else
2151 {
2152 /* memory operand */
2153 IEM_MC_BEGIN(3, 2);
2154 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2155 IEM_MC_ARG(uint32_t, u32Src, 1);
2156 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2157 IEM_MC_LOCAL(uint32_t, u32Tmp);
2158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2159
2160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2161 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2162 IEM_MC_ASSIGN(u32Src, u32Imm);
2163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2164 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2165 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2166 IEM_MC_REF_EFLAGS(pEFlags);
2167 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2168 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2169
2170 IEM_MC_ADVANCE_RIP();
2171 IEM_MC_END();
2172 }
2173 return VINF_SUCCESS;
2174 }
2175
2176 case IEMMODE_64BIT:
2177 {
2178 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2179 {
2180 /* register operand */
2181 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2183
2184 IEM_MC_BEGIN(3, 1);
2185 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2186 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2188 IEM_MC_LOCAL(uint64_t, u64Tmp);
2189
2190 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2191 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2192 IEM_MC_REF_EFLAGS(pEFlags);
2193 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2194 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2195
2196 IEM_MC_ADVANCE_RIP();
2197 IEM_MC_END();
2198 }
2199 else
2200 {
2201 /* memory operand */
2202 IEM_MC_BEGIN(3, 2);
2203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2204 IEM_MC_ARG(uint64_t, u64Src, 1);
2205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2206 IEM_MC_LOCAL(uint64_t, u64Tmp);
2207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2208
2209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2210 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2211 IEM_MC_ASSIGN(u64Src, u64Imm);
2212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2213 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2214 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2215 IEM_MC_REF_EFLAGS(pEFlags);
2216 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2217 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2218
2219 IEM_MC_ADVANCE_RIP();
2220 IEM_MC_END();
2221 }
2222 return VINF_SUCCESS;
2223 }
2224 }
2225 AssertFailedReturn(VERR_IEM_IPE_9);
2226}
2227
2228
2229/**
2230 * @opcode 0x6a
2231 */
2232FNIEMOP_DEF(iemOp_push_Ib)
2233{
2234 IEMOP_MNEMONIC(push_Ib, "push Ib");
2235 IEMOP_HLP_MIN_186();
2236 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2238 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2239
2240 IEM_MC_BEGIN(0,0);
2241 switch (pVCpu->iem.s.enmEffOpSize)
2242 {
2243 case IEMMODE_16BIT:
2244 IEM_MC_PUSH_U16(i8Imm);
2245 break;
2246 case IEMMODE_32BIT:
2247 IEM_MC_PUSH_U32(i8Imm);
2248 break;
2249 case IEMMODE_64BIT:
2250 IEM_MC_PUSH_U64(i8Imm);
2251 break;
2252 }
2253 IEM_MC_ADVANCE_RIP();
2254 IEM_MC_END();
2255 return VINF_SUCCESS;
2256}
2257
2258
2259/**
2260 * @opcode 0x6b
2261 */
2262FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2263{
2264 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2265 IEMOP_HLP_MIN_186();
2266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2267 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2268
2269 switch (pVCpu->iem.s.enmEffOpSize)
2270 {
2271 case IEMMODE_16BIT:
2272 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2273 {
2274 /* register operand */
2275 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2277
2278 IEM_MC_BEGIN(3, 1);
2279 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2280 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2281 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2282 IEM_MC_LOCAL(uint16_t, u16Tmp);
2283
2284 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2285 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2286 IEM_MC_REF_EFLAGS(pEFlags);
2287 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2288 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2289
2290 IEM_MC_ADVANCE_RIP();
2291 IEM_MC_END();
2292 }
2293 else
2294 {
2295 /* memory operand */
2296 IEM_MC_BEGIN(3, 2);
2297 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2298 IEM_MC_ARG(uint16_t, u16Src, 1);
2299 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2300 IEM_MC_LOCAL(uint16_t, u16Tmp);
2301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2302
2303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2304 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2305 IEM_MC_ASSIGN(u16Src, u16Imm);
2306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2307 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2308 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2309 IEM_MC_REF_EFLAGS(pEFlags);
2310 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2311 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2312
2313 IEM_MC_ADVANCE_RIP();
2314 IEM_MC_END();
2315 }
2316 return VINF_SUCCESS;
2317
2318 case IEMMODE_32BIT:
2319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2320 {
2321 /* register operand */
2322 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2324
2325 IEM_MC_BEGIN(3, 1);
2326 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2327 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2328 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2329 IEM_MC_LOCAL(uint32_t, u32Tmp);
2330
2331 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2332 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2333 IEM_MC_REF_EFLAGS(pEFlags);
2334 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2335 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2336
2337 IEM_MC_ADVANCE_RIP();
2338 IEM_MC_END();
2339 }
2340 else
2341 {
2342 /* memory operand */
2343 IEM_MC_BEGIN(3, 2);
2344 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2345 IEM_MC_ARG(uint32_t, u32Src, 1);
2346 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2347 IEM_MC_LOCAL(uint32_t, u32Tmp);
2348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2349
2350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2351 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2352 IEM_MC_ASSIGN(u32Src, u32Imm);
2353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2354 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2355 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2356 IEM_MC_REF_EFLAGS(pEFlags);
2357 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2358 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2359
2360 IEM_MC_ADVANCE_RIP();
2361 IEM_MC_END();
2362 }
2363 return VINF_SUCCESS;
2364
2365 case IEMMODE_64BIT:
2366 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2367 {
2368 /* register operand */
2369 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2371
2372 IEM_MC_BEGIN(3, 1);
2373 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2374 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2375 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2376 IEM_MC_LOCAL(uint64_t, u64Tmp);
2377
2378 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2379 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2380 IEM_MC_REF_EFLAGS(pEFlags);
2381 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2382 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2383
2384 IEM_MC_ADVANCE_RIP();
2385 IEM_MC_END();
2386 }
2387 else
2388 {
2389 /* memory operand */
2390 IEM_MC_BEGIN(3, 2);
2391 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2392 IEM_MC_ARG(uint64_t, u64Src, 1);
2393 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2394 IEM_MC_LOCAL(uint64_t, u64Tmp);
2395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2396
2397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2398 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2399 IEM_MC_ASSIGN(u64Src, u64Imm);
2400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2401 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2402 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2403 IEM_MC_REF_EFLAGS(pEFlags);
2404 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2405 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2406
2407 IEM_MC_ADVANCE_RIP();
2408 IEM_MC_END();
2409 }
2410 return VINF_SUCCESS;
2411 }
2412 AssertFailedReturn(VERR_IEM_IPE_8);
2413}
2414
2415
2416/**
2417 * @opcode 0x6c
2418 */
2419FNIEMOP_DEF(iemOp_insb_Yb_DX)
2420{
2421 IEMOP_HLP_MIN_186();
2422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2423 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2424 {
2425 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2426 switch (pVCpu->iem.s.enmEffAddrMode)
2427 {
2428 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2429 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2430 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2432 }
2433 }
2434 else
2435 {
2436 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2437 switch (pVCpu->iem.s.enmEffAddrMode)
2438 {
2439 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2440 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2441 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2443 }
2444 }
2445}
2446
2447
2448/**
2449 * @opcode 0x6d
2450 */
2451FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2452{
2453 IEMOP_HLP_MIN_186();
2454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2455 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2456 {
2457 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2458 switch (pVCpu->iem.s.enmEffOpSize)
2459 {
2460 case IEMMODE_16BIT:
2461 switch (pVCpu->iem.s.enmEffAddrMode)
2462 {
2463 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2464 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2465 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2467 }
2468 break;
2469 case IEMMODE_64BIT:
2470 case IEMMODE_32BIT:
2471 switch (pVCpu->iem.s.enmEffAddrMode)
2472 {
2473 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2474 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2475 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2477 }
2478 break;
2479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2480 }
2481 }
2482 else
2483 {
2484 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2485 switch (pVCpu->iem.s.enmEffOpSize)
2486 {
2487 case IEMMODE_16BIT:
2488 switch (pVCpu->iem.s.enmEffAddrMode)
2489 {
2490 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2491 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2492 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2494 }
2495 break;
2496 case IEMMODE_64BIT:
2497 case IEMMODE_32BIT:
2498 switch (pVCpu->iem.s.enmEffAddrMode)
2499 {
2500 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2501 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2502 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2504 }
2505 break;
2506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2507 }
2508 }
2509}
2510
2511
2512/**
2513 * @opcode 0x6e
2514 */
2515FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2516{
2517 IEMOP_HLP_MIN_186();
2518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2519 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2520 {
2521 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2522 switch (pVCpu->iem.s.enmEffAddrMode)
2523 {
2524 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2525 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2526 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2528 }
2529 }
2530 else
2531 {
2532 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2533 switch (pVCpu->iem.s.enmEffAddrMode)
2534 {
2535 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2536 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2537 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2539 }
2540 }
2541}
2542
2543
2544/**
2545 * @opcode 0x6f
2546 */
2547FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2548{
2549 IEMOP_HLP_MIN_186();
2550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2551 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2552 {
2553 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2554 switch (pVCpu->iem.s.enmEffOpSize)
2555 {
2556 case IEMMODE_16BIT:
2557 switch (pVCpu->iem.s.enmEffAddrMode)
2558 {
2559 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2560 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2561 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2563 }
2564 break;
2565 case IEMMODE_64BIT:
2566 case IEMMODE_32BIT:
2567 switch (pVCpu->iem.s.enmEffAddrMode)
2568 {
2569 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2570 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2571 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2573 }
2574 break;
2575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2576 }
2577 }
2578 else
2579 {
2580 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2581 switch (pVCpu->iem.s.enmEffOpSize)
2582 {
2583 case IEMMODE_16BIT:
2584 switch (pVCpu->iem.s.enmEffAddrMode)
2585 {
2586 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2587 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2588 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2590 }
2591 break;
2592 case IEMMODE_64BIT:
2593 case IEMMODE_32BIT:
2594 switch (pVCpu->iem.s.enmEffAddrMode)
2595 {
2596 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2597 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2598 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2600 }
2601 break;
2602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2603 }
2604 }
2605}
2606
2607
2608/**
2609 * @opcode 0x70
2610 */
2611FNIEMOP_DEF(iemOp_jo_Jb)
2612{
2613 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2614 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2617
2618 IEM_MC_BEGIN(0, 0);
2619 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2620 IEM_MC_REL_JMP_S8(i8Imm);
2621 } IEM_MC_ELSE() {
2622 IEM_MC_ADVANCE_RIP();
2623 } IEM_MC_ENDIF();
2624 IEM_MC_END();
2625 return VINF_SUCCESS;
2626}
2627
2628
2629/**
2630 * @opcode 0x71
2631 */
2632FNIEMOP_DEF(iemOp_jno_Jb)
2633{
2634 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2635 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2637 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2638
2639 IEM_MC_BEGIN(0, 0);
2640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2641 IEM_MC_ADVANCE_RIP();
2642 } IEM_MC_ELSE() {
2643 IEM_MC_REL_JMP_S8(i8Imm);
2644 } IEM_MC_ENDIF();
2645 IEM_MC_END();
2646 return VINF_SUCCESS;
2647}
2648
2649/**
2650 * @opcode 0x72
2651 */
2652FNIEMOP_DEF(iemOp_jc_Jb)
2653{
2654 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2655 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2657 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2658
2659 IEM_MC_BEGIN(0, 0);
2660 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2661 IEM_MC_REL_JMP_S8(i8Imm);
2662 } IEM_MC_ELSE() {
2663 IEM_MC_ADVANCE_RIP();
2664 } IEM_MC_ENDIF();
2665 IEM_MC_END();
2666 return VINF_SUCCESS;
2667}
2668
2669
2670/**
2671 * @opcode 0x73
2672 */
2673FNIEMOP_DEF(iemOp_jnc_Jb)
2674{
2675 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2676 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2678 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2679
2680 IEM_MC_BEGIN(0, 0);
2681 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2682 IEM_MC_ADVANCE_RIP();
2683 } IEM_MC_ELSE() {
2684 IEM_MC_REL_JMP_S8(i8Imm);
2685 } IEM_MC_ENDIF();
2686 IEM_MC_END();
2687 return VINF_SUCCESS;
2688}
2689
2690
2691/**
2692 * @opcode 0x74
2693 */
2694FNIEMOP_DEF(iemOp_je_Jb)
2695{
2696 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2697 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2700
2701 IEM_MC_BEGIN(0, 0);
2702 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2703 IEM_MC_REL_JMP_S8(i8Imm);
2704 } IEM_MC_ELSE() {
2705 IEM_MC_ADVANCE_RIP();
2706 } IEM_MC_ENDIF();
2707 IEM_MC_END();
2708 return VINF_SUCCESS;
2709}
2710
2711
2712/**
2713 * @opcode 0x75
2714 */
2715FNIEMOP_DEF(iemOp_jne_Jb)
2716{
2717 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2718 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2720 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2721
2722 IEM_MC_BEGIN(0, 0);
2723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2724 IEM_MC_ADVANCE_RIP();
2725 } IEM_MC_ELSE() {
2726 IEM_MC_REL_JMP_S8(i8Imm);
2727 } IEM_MC_ENDIF();
2728 IEM_MC_END();
2729 return VINF_SUCCESS;
2730}
2731
2732
2733/**
2734 * @opcode 0x76
2735 */
2736FNIEMOP_DEF(iemOp_jbe_Jb)
2737{
2738 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2739 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2741 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2742
2743 IEM_MC_BEGIN(0, 0);
2744 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2745 IEM_MC_REL_JMP_S8(i8Imm);
2746 } IEM_MC_ELSE() {
2747 IEM_MC_ADVANCE_RIP();
2748 } IEM_MC_ENDIF();
2749 IEM_MC_END();
2750 return VINF_SUCCESS;
2751}
2752
2753
2754/**
2755 * @opcode 0x77
2756 */
2757FNIEMOP_DEF(iemOp_jnbe_Jb)
2758{
2759 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2760 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2762 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2763
2764 IEM_MC_BEGIN(0, 0);
2765 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2766 IEM_MC_ADVANCE_RIP();
2767 } IEM_MC_ELSE() {
2768 IEM_MC_REL_JMP_S8(i8Imm);
2769 } IEM_MC_ENDIF();
2770 IEM_MC_END();
2771 return VINF_SUCCESS;
2772}
2773
2774
2775/**
2776 * @opcode 0x78
2777 */
2778FNIEMOP_DEF(iemOp_js_Jb)
2779{
2780 IEMOP_MNEMONIC(js_Jb, "js Jb");
2781 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2783 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2784
2785 IEM_MC_BEGIN(0, 0);
2786 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2787 IEM_MC_REL_JMP_S8(i8Imm);
2788 } IEM_MC_ELSE() {
2789 IEM_MC_ADVANCE_RIP();
2790 } IEM_MC_ENDIF();
2791 IEM_MC_END();
2792 return VINF_SUCCESS;
2793}
2794
2795
2796/**
2797 * @opcode 0x79
2798 */
2799FNIEMOP_DEF(iemOp_jns_Jb)
2800{
2801 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2802 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2805
2806 IEM_MC_BEGIN(0, 0);
2807 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2808 IEM_MC_ADVANCE_RIP();
2809 } IEM_MC_ELSE() {
2810 IEM_MC_REL_JMP_S8(i8Imm);
2811 } IEM_MC_ENDIF();
2812 IEM_MC_END();
2813 return VINF_SUCCESS;
2814}
2815
2816
2817/**
2818 * @opcode 0x7a
2819 */
2820FNIEMOP_DEF(iemOp_jp_Jb)
2821{
2822 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2823 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2825 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2826
2827 IEM_MC_BEGIN(0, 0);
2828 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2829 IEM_MC_REL_JMP_S8(i8Imm);
2830 } IEM_MC_ELSE() {
2831 IEM_MC_ADVANCE_RIP();
2832 } IEM_MC_ENDIF();
2833 IEM_MC_END();
2834 return VINF_SUCCESS;
2835}
2836
2837
2838/**
2839 * @opcode 0x7b
2840 */
2841FNIEMOP_DEF(iemOp_jnp_Jb)
2842{
2843 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2844 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2846 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2847
2848 IEM_MC_BEGIN(0, 0);
2849 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2850 IEM_MC_ADVANCE_RIP();
2851 } IEM_MC_ELSE() {
2852 IEM_MC_REL_JMP_S8(i8Imm);
2853 } IEM_MC_ENDIF();
2854 IEM_MC_END();
2855 return VINF_SUCCESS;
2856}
2857
2858
2859/**
2860 * @opcode 0x7c
2861 */
2862FNIEMOP_DEF(iemOp_jl_Jb)
2863{
2864 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2865 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2867 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2868
2869 IEM_MC_BEGIN(0, 0);
2870 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2871 IEM_MC_REL_JMP_S8(i8Imm);
2872 } IEM_MC_ELSE() {
2873 IEM_MC_ADVANCE_RIP();
2874 } IEM_MC_ENDIF();
2875 IEM_MC_END();
2876 return VINF_SUCCESS;
2877}
2878
2879
2880/**
2881 * @opcode 0x7d
2882 */
2883FNIEMOP_DEF(iemOp_jnl_Jb)
2884{
2885 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
2886 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2888 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2889
2890 IEM_MC_BEGIN(0, 0);
2891 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2892 IEM_MC_ADVANCE_RIP();
2893 } IEM_MC_ELSE() {
2894 IEM_MC_REL_JMP_S8(i8Imm);
2895 } IEM_MC_ENDIF();
2896 IEM_MC_END();
2897 return VINF_SUCCESS;
2898}
2899
2900
2901/**
2902 * @opcode 0x7e
2903 */
2904FNIEMOP_DEF(iemOp_jle_Jb)
2905{
2906 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
2907 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2909 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2910
2911 IEM_MC_BEGIN(0, 0);
2912 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2913 IEM_MC_REL_JMP_S8(i8Imm);
2914 } IEM_MC_ELSE() {
2915 IEM_MC_ADVANCE_RIP();
2916 } IEM_MC_ENDIF();
2917 IEM_MC_END();
2918 return VINF_SUCCESS;
2919}
2920
2921
2922/**
2923 * @opcode 0x7f
2924 */
2925FNIEMOP_DEF(iemOp_jnle_Jb)
2926{
2927 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
2928 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2930 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2931
2932 IEM_MC_BEGIN(0, 0);
2933 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2934 IEM_MC_ADVANCE_RIP();
2935 } IEM_MC_ELSE() {
2936 IEM_MC_REL_JMP_S8(i8Imm);
2937 } IEM_MC_ENDIF();
2938 IEM_MC_END();
2939 return VINF_SUCCESS;
2940}
2941
2942
2943/**
2944 * @opcode 0x80
2945 */
2946FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
2947{
2948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2949 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2950 {
2951 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
2952 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
2953 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
2954 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
2955 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
2956 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
2957 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
2958 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
2959 }
2960 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2961
2962 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2963 {
2964 /* register target */
2965 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2967 IEM_MC_BEGIN(3, 0);
2968 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2969 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2970 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2971
2972 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2973 IEM_MC_REF_EFLAGS(pEFlags);
2974 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2975
2976 IEM_MC_ADVANCE_RIP();
2977 IEM_MC_END();
2978 }
2979 else
2980 {
2981 /* memory target */
2982 uint32_t fAccess;
2983 if (pImpl->pfnLockedU8)
2984 fAccess = IEM_ACCESS_DATA_RW;
2985 else /* CMP */
2986 fAccess = IEM_ACCESS_DATA_R;
2987 IEM_MC_BEGIN(3, 2);
2988 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2989 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2991
2992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2993 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2994 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2995 if (pImpl->pfnLockedU8)
2996 IEMOP_HLP_DONE_DECODING();
2997 else
2998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2999
3000 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3001 IEM_MC_FETCH_EFLAGS(EFlags);
3002 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3003 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3004 else
3005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3006
3007 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3008 IEM_MC_COMMIT_EFLAGS(EFlags);
3009 IEM_MC_ADVANCE_RIP();
3010 IEM_MC_END();
3011 }
3012 return VINF_SUCCESS;
3013}
3014
3015
3016/**
3017 * @opcode 0x81
3018 */
3019FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3020{
3021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3022 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3023 {
3024 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3025 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3026 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3027 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3028 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3029 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3030 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3031 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3032 }
3033 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3034
3035 switch (pVCpu->iem.s.enmEffOpSize)
3036 {
3037 case IEMMODE_16BIT:
3038 {
3039 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3040 {
3041 /* register target */
3042 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3044 IEM_MC_BEGIN(3, 0);
3045 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3046 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3047 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3048
3049 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3050 IEM_MC_REF_EFLAGS(pEFlags);
3051 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3052
3053 IEM_MC_ADVANCE_RIP();
3054 IEM_MC_END();
3055 }
3056 else
3057 {
3058 /* memory target */
3059 uint32_t fAccess;
3060 if (pImpl->pfnLockedU16)
3061 fAccess = IEM_ACCESS_DATA_RW;
3062 else /* CMP, TEST */
3063 fAccess = IEM_ACCESS_DATA_R;
3064 IEM_MC_BEGIN(3, 2);
3065 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3066 IEM_MC_ARG(uint16_t, u16Src, 1);
3067 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3069
3070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3071 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3072 IEM_MC_ASSIGN(u16Src, u16Imm);
3073 if (pImpl->pfnLockedU16)
3074 IEMOP_HLP_DONE_DECODING();
3075 else
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3077 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3078 IEM_MC_FETCH_EFLAGS(EFlags);
3079 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3080 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3081 else
3082 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3083
3084 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3085 IEM_MC_COMMIT_EFLAGS(EFlags);
3086 IEM_MC_ADVANCE_RIP();
3087 IEM_MC_END();
3088 }
3089 break;
3090 }
3091
3092 case IEMMODE_32BIT:
3093 {
3094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3095 {
3096 /* register target */
3097 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3099 IEM_MC_BEGIN(3, 0);
3100 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3101 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3102 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3103
3104 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3105 IEM_MC_REF_EFLAGS(pEFlags);
3106 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3107 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3108
3109 IEM_MC_ADVANCE_RIP();
3110 IEM_MC_END();
3111 }
3112 else
3113 {
3114 /* memory target */
3115 uint32_t fAccess;
3116 if (pImpl->pfnLockedU32)
3117 fAccess = IEM_ACCESS_DATA_RW;
3118 else /* CMP, TEST */
3119 fAccess = IEM_ACCESS_DATA_R;
3120 IEM_MC_BEGIN(3, 2);
3121 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3122 IEM_MC_ARG(uint32_t, u32Src, 1);
3123 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3125
3126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3127 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3128 IEM_MC_ASSIGN(u32Src, u32Imm);
3129 if (pImpl->pfnLockedU32)
3130 IEMOP_HLP_DONE_DECODING();
3131 else
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3134 IEM_MC_FETCH_EFLAGS(EFlags);
3135 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3136 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3137 else
3138 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3139
3140 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3141 IEM_MC_COMMIT_EFLAGS(EFlags);
3142 IEM_MC_ADVANCE_RIP();
3143 IEM_MC_END();
3144 }
3145 break;
3146 }
3147
3148 case IEMMODE_64BIT:
3149 {
3150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3151 {
3152 /* register target */
3153 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3155 IEM_MC_BEGIN(3, 0);
3156 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3157 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3158 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3159
3160 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3161 IEM_MC_REF_EFLAGS(pEFlags);
3162 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3163
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 else
3168 {
3169 /* memory target */
3170 uint32_t fAccess;
3171 if (pImpl->pfnLockedU64)
3172 fAccess = IEM_ACCESS_DATA_RW;
3173 else /* CMP */
3174 fAccess = IEM_ACCESS_DATA_R;
3175 IEM_MC_BEGIN(3, 2);
3176 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3177 IEM_MC_ARG(uint64_t, u64Src, 1);
3178 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3180
3181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3182 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3183 if (pImpl->pfnLockedU64)
3184 IEMOP_HLP_DONE_DECODING();
3185 else
3186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3187 IEM_MC_ASSIGN(u64Src, u64Imm);
3188 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3189 IEM_MC_FETCH_EFLAGS(EFlags);
3190 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3191 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3192 else
3193 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3194
3195 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3196 IEM_MC_COMMIT_EFLAGS(EFlags);
3197 IEM_MC_ADVANCE_RIP();
3198 IEM_MC_END();
3199 }
3200 break;
3201 }
3202 }
3203 return VINF_SUCCESS;
3204}
3205
3206
3207/**
3208 * @opcode 0x82
3209 * @opmnemonic grp1_82
3210 * @opgroup og_groups
3211 */
3212FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3213{
3214 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3215 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3216}
3217
3218
3219/**
3220 * @opcode 0x83
3221 */
3222FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3223{
3224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3226 {
3227 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3228 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3229 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3230 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3231 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3232 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3233 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3234 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3235 }
3236 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3237 to the 386 even if absent in the intel reference manuals and some
3238 3rd party opcode listings. */
3239 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3240
3241 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3242 {
3243 /*
3244 * Register target
3245 */
3246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3247 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3248 switch (pVCpu->iem.s.enmEffOpSize)
3249 {
3250 case IEMMODE_16BIT:
3251 {
3252 IEM_MC_BEGIN(3, 0);
3253 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3254 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3255 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3256
3257 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3258 IEM_MC_REF_EFLAGS(pEFlags);
3259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3260
3261 IEM_MC_ADVANCE_RIP();
3262 IEM_MC_END();
3263 break;
3264 }
3265
3266 case IEMMODE_32BIT:
3267 {
3268 IEM_MC_BEGIN(3, 0);
3269 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3270 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3271 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3272
3273 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3274 IEM_MC_REF_EFLAGS(pEFlags);
3275 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3276 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3277
3278 IEM_MC_ADVANCE_RIP();
3279 IEM_MC_END();
3280 break;
3281 }
3282
3283 case IEMMODE_64BIT:
3284 {
3285 IEM_MC_BEGIN(3, 0);
3286 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3287 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3288 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3289
3290 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3291 IEM_MC_REF_EFLAGS(pEFlags);
3292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3293
3294 IEM_MC_ADVANCE_RIP();
3295 IEM_MC_END();
3296 break;
3297 }
3298 }
3299 }
3300 else
3301 {
3302 /*
3303 * Memory target.
3304 */
3305 uint32_t fAccess;
3306 if (pImpl->pfnLockedU16)
3307 fAccess = IEM_ACCESS_DATA_RW;
3308 else /* CMP */
3309 fAccess = IEM_ACCESS_DATA_R;
3310
3311 switch (pVCpu->iem.s.enmEffOpSize)
3312 {
3313 case IEMMODE_16BIT:
3314 {
3315 IEM_MC_BEGIN(3, 2);
3316 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3317 IEM_MC_ARG(uint16_t, u16Src, 1);
3318 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3320
3321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3322 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3323 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3324 if (pImpl->pfnLockedU16)
3325 IEMOP_HLP_DONE_DECODING();
3326 else
3327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3328 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3329 IEM_MC_FETCH_EFLAGS(EFlags);
3330 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3331 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3332 else
3333 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3334
3335 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3336 IEM_MC_COMMIT_EFLAGS(EFlags);
3337 IEM_MC_ADVANCE_RIP();
3338 IEM_MC_END();
3339 break;
3340 }
3341
3342 case IEMMODE_32BIT:
3343 {
3344 IEM_MC_BEGIN(3, 2);
3345 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3346 IEM_MC_ARG(uint32_t, u32Src, 1);
3347 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3349
3350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3351 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3352 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3353 if (pImpl->pfnLockedU32)
3354 IEMOP_HLP_DONE_DECODING();
3355 else
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3358 IEM_MC_FETCH_EFLAGS(EFlags);
3359 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3361 else
3362 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3363
3364 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3365 IEM_MC_COMMIT_EFLAGS(EFlags);
3366 IEM_MC_ADVANCE_RIP();
3367 IEM_MC_END();
3368 break;
3369 }
3370
3371 case IEMMODE_64BIT:
3372 {
3373 IEM_MC_BEGIN(3, 2);
3374 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3375 IEM_MC_ARG(uint64_t, u64Src, 1);
3376 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3378
3379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3380 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3381 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3382 if (pImpl->pfnLockedU64)
3383 IEMOP_HLP_DONE_DECODING();
3384 else
3385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3386 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3387 IEM_MC_FETCH_EFLAGS(EFlags);
3388 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3389 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3390 else
3391 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3392
3393 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3394 IEM_MC_COMMIT_EFLAGS(EFlags);
3395 IEM_MC_ADVANCE_RIP();
3396 IEM_MC_END();
3397 break;
3398 }
3399 }
3400 }
3401 return VINF_SUCCESS;
3402}
3403
3404
3405/**
3406 * @opcode 0x84
3407 */
3408FNIEMOP_DEF(iemOp_test_Eb_Gb)
3409{
3410 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3411 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3412 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3413}
3414
3415
3416/**
3417 * @opcode 0x85
3418 */
3419FNIEMOP_DEF(iemOp_test_Ev_Gv)
3420{
3421 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3422 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3423 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3424}
3425
3426
3427/**
3428 * @opcode 0x86
3429 */
3430FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3431{
3432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3433 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3434
3435 /*
3436 * If rm is denoting a register, no more instruction bytes.
3437 */
3438 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3439 {
3440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3441
3442 IEM_MC_BEGIN(0, 2);
3443 IEM_MC_LOCAL(uint8_t, uTmp1);
3444 IEM_MC_LOCAL(uint8_t, uTmp2);
3445
3446 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3447 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3448 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3449 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3450
3451 IEM_MC_ADVANCE_RIP();
3452 IEM_MC_END();
3453 }
3454 else
3455 {
3456 /*
3457 * We're accessing memory.
3458 */
3459/** @todo the register must be committed separately! */
3460 IEM_MC_BEGIN(2, 2);
3461 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3462 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3464
3465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3466 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3467 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3468 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3469 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3470
3471 IEM_MC_ADVANCE_RIP();
3472 IEM_MC_END();
3473 }
3474 return VINF_SUCCESS;
3475}
3476
3477
3478/**
3479 * @opcode 0x87
3480 */
3481FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3482{
3483 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3485
3486 /*
3487 * If rm is denoting a register, no more instruction bytes.
3488 */
3489 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3490 {
3491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3492
3493 switch (pVCpu->iem.s.enmEffOpSize)
3494 {
3495 case IEMMODE_16BIT:
3496 IEM_MC_BEGIN(0, 2);
3497 IEM_MC_LOCAL(uint16_t, uTmp1);
3498 IEM_MC_LOCAL(uint16_t, uTmp2);
3499
3500 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3501 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3502 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3503 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3504
3505 IEM_MC_ADVANCE_RIP();
3506 IEM_MC_END();
3507 return VINF_SUCCESS;
3508
3509 case IEMMODE_32BIT:
3510 IEM_MC_BEGIN(0, 2);
3511 IEM_MC_LOCAL(uint32_t, uTmp1);
3512 IEM_MC_LOCAL(uint32_t, uTmp2);
3513
3514 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3515 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3516 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3517 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3518
3519 IEM_MC_ADVANCE_RIP();
3520 IEM_MC_END();
3521 return VINF_SUCCESS;
3522
3523 case IEMMODE_64BIT:
3524 IEM_MC_BEGIN(0, 2);
3525 IEM_MC_LOCAL(uint64_t, uTmp1);
3526 IEM_MC_LOCAL(uint64_t, uTmp2);
3527
3528 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3529 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3530 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3531 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3532
3533 IEM_MC_ADVANCE_RIP();
3534 IEM_MC_END();
3535 return VINF_SUCCESS;
3536
3537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3538 }
3539 }
3540 else
3541 {
3542 /*
3543 * We're accessing memory.
3544 */
3545 switch (pVCpu->iem.s.enmEffOpSize)
3546 {
3547/** @todo the register must be committed separately! */
3548 case IEMMODE_16BIT:
3549 IEM_MC_BEGIN(2, 2);
3550 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3551 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3553
3554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3555 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3556 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3557 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3558 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3559
3560 IEM_MC_ADVANCE_RIP();
3561 IEM_MC_END();
3562 return VINF_SUCCESS;
3563
3564 case IEMMODE_32BIT:
3565 IEM_MC_BEGIN(2, 2);
3566 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3567 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3569
3570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3571 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3572 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3573 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3574 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3575
3576 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3577 IEM_MC_ADVANCE_RIP();
3578 IEM_MC_END();
3579 return VINF_SUCCESS;
3580
3581 case IEMMODE_64BIT:
3582 IEM_MC_BEGIN(2, 2);
3583 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3584 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3586
3587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3588 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3589 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3590 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3591 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3592
3593 IEM_MC_ADVANCE_RIP();
3594 IEM_MC_END();
3595 return VINF_SUCCESS;
3596
3597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3598 }
3599 }
3600}
3601
3602
3603/**
3604 * @opcode 0x88
3605 */
3606FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3607{
3608 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3609
3610 uint8_t bRm;
3611 IEM_OPCODE_GET_NEXT_U8(&bRm);
3612
3613 /*
3614 * If rm is denoting a register, no more instruction bytes.
3615 */
3616 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3617 {
3618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3619 IEM_MC_BEGIN(0, 1);
3620 IEM_MC_LOCAL(uint8_t, u8Value);
3621 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3622 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3623 IEM_MC_ADVANCE_RIP();
3624 IEM_MC_END();
3625 }
3626 else
3627 {
3628 /*
3629 * We're writing a register to memory.
3630 */
3631 IEM_MC_BEGIN(0, 2);
3632 IEM_MC_LOCAL(uint8_t, u8Value);
3633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3636 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3637 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3638 IEM_MC_ADVANCE_RIP();
3639 IEM_MC_END();
3640 }
3641 return VINF_SUCCESS;
3642
3643}
3644
3645
3646/**
3647 * @opcode 0x89
3648 */
3649FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3650{
3651 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3652
3653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3654
3655 /*
3656 * If rm is denoting a register, no more instruction bytes.
3657 */
3658 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3659 {
3660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3661 switch (pVCpu->iem.s.enmEffOpSize)
3662 {
3663 case IEMMODE_16BIT:
3664 IEM_MC_BEGIN(0, 1);
3665 IEM_MC_LOCAL(uint16_t, u16Value);
3666 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3667 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3668 IEM_MC_ADVANCE_RIP();
3669 IEM_MC_END();
3670 break;
3671
3672 case IEMMODE_32BIT:
3673 IEM_MC_BEGIN(0, 1);
3674 IEM_MC_LOCAL(uint32_t, u32Value);
3675 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3676 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3677 IEM_MC_ADVANCE_RIP();
3678 IEM_MC_END();
3679 break;
3680
3681 case IEMMODE_64BIT:
3682 IEM_MC_BEGIN(0, 1);
3683 IEM_MC_LOCAL(uint64_t, u64Value);
3684 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3685 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3686 IEM_MC_ADVANCE_RIP();
3687 IEM_MC_END();
3688 break;
3689 }
3690 }
3691 else
3692 {
3693 /*
3694 * We're writing a register to memory.
3695 */
3696 switch (pVCpu->iem.s.enmEffOpSize)
3697 {
3698 case IEMMODE_16BIT:
3699 IEM_MC_BEGIN(0, 2);
3700 IEM_MC_LOCAL(uint16_t, u16Value);
3701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3704 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3705 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3706 IEM_MC_ADVANCE_RIP();
3707 IEM_MC_END();
3708 break;
3709
3710 case IEMMODE_32BIT:
3711 IEM_MC_BEGIN(0, 2);
3712 IEM_MC_LOCAL(uint32_t, u32Value);
3713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3716 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3717 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 break;
3721
3722 case IEMMODE_64BIT:
3723 IEM_MC_BEGIN(0, 2);
3724 IEM_MC_LOCAL(uint64_t, u64Value);
3725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3728 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3729 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3730 IEM_MC_ADVANCE_RIP();
3731 IEM_MC_END();
3732 break;
3733 }
3734 }
3735 return VINF_SUCCESS;
3736}
3737
3738
3739/**
3740 * @opcode 0x8a
3741 */
3742FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3743{
3744 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3745
3746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3747
3748 /*
3749 * If rm is denoting a register, no more instruction bytes.
3750 */
3751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3752 {
3753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3754 IEM_MC_BEGIN(0, 1);
3755 IEM_MC_LOCAL(uint8_t, u8Value);
3756 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3757 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3758 IEM_MC_ADVANCE_RIP();
3759 IEM_MC_END();
3760 }
3761 else
3762 {
3763 /*
3764 * We're loading a register from memory.
3765 */
3766 IEM_MC_BEGIN(0, 2);
3767 IEM_MC_LOCAL(uint8_t, u8Value);
3768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3771 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3772 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3773 IEM_MC_ADVANCE_RIP();
3774 IEM_MC_END();
3775 }
3776 return VINF_SUCCESS;
3777}
3778
3779
3780/**
3781 * @opcode 0x8b
3782 */
3783FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3784{
3785 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3786
3787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3788
3789 /*
3790 * If rm is denoting a register, no more instruction bytes.
3791 */
3792 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3793 {
3794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3795 switch (pVCpu->iem.s.enmEffOpSize)
3796 {
3797 case IEMMODE_16BIT:
3798 IEM_MC_BEGIN(0, 1);
3799 IEM_MC_LOCAL(uint16_t, u16Value);
3800 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3801 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3802 IEM_MC_ADVANCE_RIP();
3803 IEM_MC_END();
3804 break;
3805
3806 case IEMMODE_32BIT:
3807 IEM_MC_BEGIN(0, 1);
3808 IEM_MC_LOCAL(uint32_t, u32Value);
3809 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3810 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3811 IEM_MC_ADVANCE_RIP();
3812 IEM_MC_END();
3813 break;
3814
3815 case IEMMODE_64BIT:
3816 IEM_MC_BEGIN(0, 1);
3817 IEM_MC_LOCAL(uint64_t, u64Value);
3818 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3819 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3820 IEM_MC_ADVANCE_RIP();
3821 IEM_MC_END();
3822 break;
3823 }
3824 }
3825 else
3826 {
3827 /*
3828 * We're loading a register from memory.
3829 */
3830 switch (pVCpu->iem.s.enmEffOpSize)
3831 {
3832 case IEMMODE_16BIT:
3833 IEM_MC_BEGIN(0, 2);
3834 IEM_MC_LOCAL(uint16_t, u16Value);
3835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3838 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3839 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3840 IEM_MC_ADVANCE_RIP();
3841 IEM_MC_END();
3842 break;
3843
3844 case IEMMODE_32BIT:
3845 IEM_MC_BEGIN(0, 2);
3846 IEM_MC_LOCAL(uint32_t, u32Value);
3847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3850 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3851 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3852 IEM_MC_ADVANCE_RIP();
3853 IEM_MC_END();
3854 break;
3855
3856 case IEMMODE_64BIT:
3857 IEM_MC_BEGIN(0, 2);
3858 IEM_MC_LOCAL(uint64_t, u64Value);
3859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3863 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3864 IEM_MC_ADVANCE_RIP();
3865 IEM_MC_END();
3866 break;
3867 }
3868 }
3869 return VINF_SUCCESS;
3870}
3871
3872
3873/**
3874 * opcode 0x63
3875 * @todo Table fixme
3876 */
3877FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
3878{
3879 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
3880 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
3881 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
3882 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
3883 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
3884}
3885
3886
3887/**
3888 * @opcode 0x8c
3889 */
3890FNIEMOP_DEF(iemOp_mov_Ev_Sw)
3891{
3892 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
3893
3894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3895
3896 /*
3897 * Check that the destination register exists. The REX.R prefix is ignored.
3898 */
3899 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3900 if ( iSegReg > X86_SREG_GS)
3901 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3902
3903 /*
3904 * If rm is denoting a register, no more instruction bytes.
3905 * In that case, the operand size is respected and the upper bits are
3906 * cleared (starting with some pentium).
3907 */
3908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3909 {
3910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3911 switch (pVCpu->iem.s.enmEffOpSize)
3912 {
3913 case IEMMODE_16BIT:
3914 IEM_MC_BEGIN(0, 1);
3915 IEM_MC_LOCAL(uint16_t, u16Value);
3916 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3917 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3918 IEM_MC_ADVANCE_RIP();
3919 IEM_MC_END();
3920 break;
3921
3922 case IEMMODE_32BIT:
3923 IEM_MC_BEGIN(0, 1);
3924 IEM_MC_LOCAL(uint32_t, u32Value);
3925 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
3926 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3927 IEM_MC_ADVANCE_RIP();
3928 IEM_MC_END();
3929 break;
3930
3931 case IEMMODE_64BIT:
3932 IEM_MC_BEGIN(0, 1);
3933 IEM_MC_LOCAL(uint64_t, u64Value);
3934 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
3935 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3936 IEM_MC_ADVANCE_RIP();
3937 IEM_MC_END();
3938 break;
3939 }
3940 }
3941 else
3942 {
3943 /*
3944 * We're saving the register to memory. The access is word sized
3945 * regardless of operand size prefixes.
3946 */
3947#if 0 /* not necessary */
3948 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3949#endif
3950 IEM_MC_BEGIN(0, 2);
3951 IEM_MC_LOCAL(uint16_t, u16Value);
3952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3955 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3956 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3957 IEM_MC_ADVANCE_RIP();
3958 IEM_MC_END();
3959 }
3960 return VINF_SUCCESS;
3961}
3962
3963
3964
3965
3966/**
3967 * @opcode 0x8d
3968 */
3969FNIEMOP_DEF(iemOp_lea_Gv_M)
3970{
3971 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
3972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3973 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3974 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
3975
3976 switch (pVCpu->iem.s.enmEffOpSize)
3977 {
3978 case IEMMODE_16BIT:
3979 IEM_MC_BEGIN(0, 2);
3980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3981 IEM_MC_LOCAL(uint16_t, u16Cast);
3982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3984 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
3985 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
3986 IEM_MC_ADVANCE_RIP();
3987 IEM_MC_END();
3988 return VINF_SUCCESS;
3989
3990 case IEMMODE_32BIT:
3991 IEM_MC_BEGIN(0, 2);
3992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3993 IEM_MC_LOCAL(uint32_t, u32Cast);
3994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3996 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
3997 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
3998 IEM_MC_ADVANCE_RIP();
3999 IEM_MC_END();
4000 return VINF_SUCCESS;
4001
4002 case IEMMODE_64BIT:
4003 IEM_MC_BEGIN(0, 1);
4004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4007 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
4008 IEM_MC_ADVANCE_RIP();
4009 IEM_MC_END();
4010 return VINF_SUCCESS;
4011 }
4012 AssertFailedReturn(VERR_IEM_IPE_7);
4013}
4014
4015
4016/**
4017 * @opcode 0x8e
4018 */
4019FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4020{
4021 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4022
4023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4024
4025 /*
4026 * The practical operand size is 16-bit.
4027 */
4028#if 0 /* not necessary */
4029 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4030#endif
4031
4032 /*
4033 * Check that the destination register exists and can be used with this
4034 * instruction. The REX.R prefix is ignored.
4035 */
4036 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4037 if ( iSegReg == X86_SREG_CS
4038 || iSegReg > X86_SREG_GS)
4039 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4040
4041 /*
4042 * If rm is denoting a register, no more instruction bytes.
4043 */
4044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4045 {
4046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4047 IEM_MC_BEGIN(2, 0);
4048 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4049 IEM_MC_ARG(uint16_t, u16Value, 1);
4050 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4051 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4052 IEM_MC_END();
4053 }
4054 else
4055 {
4056 /*
4057 * We're loading the register from memory. The access is word sized
4058 * regardless of operand size prefixes.
4059 */
4060 IEM_MC_BEGIN(2, 1);
4061 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4062 IEM_MC_ARG(uint16_t, u16Value, 1);
4063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4066 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4067 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4068 IEM_MC_END();
4069 }
4070 return VINF_SUCCESS;
4071}
4072
4073
4074/** Opcode 0x8f /0. */
4075FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4076{
4077 /* This bugger is rather annoying as it requires rSP to be updated before
4078 doing the effective address calculations. Will eventually require a
4079 split between the R/M+SIB decoding and the effective address
4080 calculation - which is something that is required for any attempt at
4081 reusing this code for a recompiler. It may also be good to have if we
4082 need to delay #UD exception caused by invalid lock prefixes.
4083
4084 For now, we'll do a mostly safe interpreter-only implementation here. */
4085 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4086 * now until tests show it's checked.. */
4087 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4088
4089 /* Register access is relatively easy and can share code. */
4090 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4091 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4092
4093 /*
4094 * Memory target.
4095 *
4096 * Intel says that RSP is incremented before it's used in any effective
4097 * address calcuations. This means some serious extra annoyance here since
4098 * we decode and calculate the effective address in one step and like to
4099 * delay committing registers till everything is done.
4100 *
4101 * So, we'll decode and calculate the effective address twice. This will
4102 * require some recoding if turned into a recompiler.
4103 */
4104 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4105
4106#ifndef TST_IEM_CHECK_MC
4107 /* Calc effective address with modified ESP. */
4108/** @todo testcase */
4109 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
4110 RTGCPTR GCPtrEff;
4111 VBOXSTRICTRC rcStrict;
4112 switch (pVCpu->iem.s.enmEffOpSize)
4113 {
4114 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4115 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4116 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4118 }
4119 if (rcStrict != VINF_SUCCESS)
4120 return rcStrict;
4121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4122
4123 /* Perform the operation - this should be CImpl. */
4124 RTUINT64U TmpRsp;
4125 TmpRsp.u = pCtx->rsp;
4126 switch (pVCpu->iem.s.enmEffOpSize)
4127 {
4128 case IEMMODE_16BIT:
4129 {
4130 uint16_t u16Value;
4131 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4132 if (rcStrict == VINF_SUCCESS)
4133 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4134 break;
4135 }
4136
4137 case IEMMODE_32BIT:
4138 {
4139 uint32_t u32Value;
4140 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4141 if (rcStrict == VINF_SUCCESS)
4142 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4143 break;
4144 }
4145
4146 case IEMMODE_64BIT:
4147 {
4148 uint64_t u64Value;
4149 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4150 if (rcStrict == VINF_SUCCESS)
4151 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4152 break;
4153 }
4154
4155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4156 }
4157 if (rcStrict == VINF_SUCCESS)
4158 {
4159 pCtx->rsp = TmpRsp.u;
4160 iemRegUpdateRipAndClearRF(pVCpu);
4161 }
4162 return rcStrict;
4163
4164#else
4165 return VERR_IEM_IPE_2;
4166#endif
4167}
4168
4169
4170/**
4171 * @opcode 0x8f
4172 */
4173FNIEMOP_DEF(iemOp_Grp1A__xop)
4174{
4175 /*
4176 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4177 * three byte VEX prefix, except that the mmmmm field cannot have the values
4178 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4179 */
4180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4181 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4182 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4183
4184 IEMOP_MNEMONIC(xop, "xop");
4185 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4186 {
4187 /** @todo Test when exctly the XOP conformance checks kick in during
4188 * instruction decoding and fetching (using \#PF). */
4189 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4190 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4191 if ( ( pVCpu->iem.s.fPrefixes
4192 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4193 == 0)
4194 {
4195 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4196 if (bXop2 & 0x80 /* XOP.W */)
4197 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4198 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
4199 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
4200 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
4201 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4202 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4203 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4204
4205 /** @todo XOP: Just use new tables and decoders. */
4206 switch (bRm & 0x1f)
4207 {
4208 case 8: /* xop opcode map 8. */
4209 IEMOP_BITCH_ABOUT_STUB();
4210 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4211
4212 case 9: /* xop opcode map 9. */
4213 IEMOP_BITCH_ABOUT_STUB();
4214 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4215
4216 case 10: /* xop opcode map 10. */
4217 IEMOP_BITCH_ABOUT_STUB();
4218 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4219
4220 default:
4221 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4222 return IEMOP_RAISE_INVALID_OPCODE();
4223 }
4224 }
4225 else
4226 Log(("XOP: Invalid prefix mix!\n"));
4227 }
4228 else
4229 Log(("XOP: XOP support disabled!\n"));
4230 return IEMOP_RAISE_INVALID_OPCODE();
4231}
4232
4233
4234/**
4235 * Common 'xchg reg,rAX' helper.
4236 */
4237FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4238{
4239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4240
4241 iReg |= pVCpu->iem.s.uRexB;
4242 switch (pVCpu->iem.s.enmEffOpSize)
4243 {
4244 case IEMMODE_16BIT:
4245 IEM_MC_BEGIN(0, 2);
4246 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4247 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4248 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4249 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4250 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4251 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4252 IEM_MC_ADVANCE_RIP();
4253 IEM_MC_END();
4254 return VINF_SUCCESS;
4255
4256 case IEMMODE_32BIT:
4257 IEM_MC_BEGIN(0, 2);
4258 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4259 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4260 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4261 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4262 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4263 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4264 IEM_MC_ADVANCE_RIP();
4265 IEM_MC_END();
4266 return VINF_SUCCESS;
4267
4268 case IEMMODE_64BIT:
4269 IEM_MC_BEGIN(0, 2);
4270 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4271 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4272 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4273 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4274 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4275 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4276 IEM_MC_ADVANCE_RIP();
4277 IEM_MC_END();
4278 return VINF_SUCCESS;
4279
4280 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4281 }
4282}
4283
4284
4285/**
4286 * @opcode 0x90
4287 */
4288FNIEMOP_DEF(iemOp_nop)
4289{
4290 /* R8/R8D and RAX/EAX can be exchanged. */
4291 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4292 {
4293 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4294 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4295 }
4296
4297 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4298 IEMOP_MNEMONIC(pause, "pause");
4299 else
4300 IEMOP_MNEMONIC(nop, "nop");
4301 IEM_MC_BEGIN(0, 0);
4302 IEM_MC_ADVANCE_RIP();
4303 IEM_MC_END();
4304 return VINF_SUCCESS;
4305}
4306
4307
4308/**
4309 * @opcode 0x91
4310 */
4311FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4312{
4313 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4314 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4315}
4316
4317
4318/**
4319 * @opcode 0x92
4320 */
4321FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4322{
4323 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4324 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4325}
4326
4327
4328/**
4329 * @opcode 0x93
4330 */
4331FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4332{
4333 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4334 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4335}
4336
4337
4338/**
4339 * @opcode 0x94
4340 */
4341FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4342{
4343 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4344 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4345}
4346
4347
4348/**
4349 * @opcode 0x95
4350 */
4351FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4352{
4353 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4354 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4355}
4356
4357
4358/**
4359 * @opcode 0x96
4360 */
4361FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4362{
4363 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4364 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4365}
4366
4367
4368/**
4369 * @opcode 0x97
4370 */
4371FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4372{
4373 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4374 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4375}
4376
4377
4378/**
4379 * @opcode 0x98
4380 */
4381FNIEMOP_DEF(iemOp_cbw)
4382{
4383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4384 switch (pVCpu->iem.s.enmEffOpSize)
4385 {
4386 case IEMMODE_16BIT:
4387 IEMOP_MNEMONIC(cbw, "cbw");
4388 IEM_MC_BEGIN(0, 1);
4389 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4390 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4391 } IEM_MC_ELSE() {
4392 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4393 } IEM_MC_ENDIF();
4394 IEM_MC_ADVANCE_RIP();
4395 IEM_MC_END();
4396 return VINF_SUCCESS;
4397
4398 case IEMMODE_32BIT:
4399 IEMOP_MNEMONIC(cwde, "cwde");
4400 IEM_MC_BEGIN(0, 1);
4401 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4402 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4403 } IEM_MC_ELSE() {
4404 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4405 } IEM_MC_ENDIF();
4406 IEM_MC_ADVANCE_RIP();
4407 IEM_MC_END();
4408 return VINF_SUCCESS;
4409
4410 case IEMMODE_64BIT:
4411 IEMOP_MNEMONIC(cdqe, "cdqe");
4412 IEM_MC_BEGIN(0, 1);
4413 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4414 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4415 } IEM_MC_ELSE() {
4416 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4417 } IEM_MC_ENDIF();
4418 IEM_MC_ADVANCE_RIP();
4419 IEM_MC_END();
4420 return VINF_SUCCESS;
4421
4422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4423 }
4424}
4425
4426
4427/**
4428 * @opcode 0x99
4429 */
4430FNIEMOP_DEF(iemOp_cwd)
4431{
4432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4433 switch (pVCpu->iem.s.enmEffOpSize)
4434 {
4435 case IEMMODE_16BIT:
4436 IEMOP_MNEMONIC(cwd, "cwd");
4437 IEM_MC_BEGIN(0, 1);
4438 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4439 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4440 } IEM_MC_ELSE() {
4441 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4442 } IEM_MC_ENDIF();
4443 IEM_MC_ADVANCE_RIP();
4444 IEM_MC_END();
4445 return VINF_SUCCESS;
4446
4447 case IEMMODE_32BIT:
4448 IEMOP_MNEMONIC(cdq, "cdq");
4449 IEM_MC_BEGIN(0, 1);
4450 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4451 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4452 } IEM_MC_ELSE() {
4453 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4454 } IEM_MC_ENDIF();
4455 IEM_MC_ADVANCE_RIP();
4456 IEM_MC_END();
4457 return VINF_SUCCESS;
4458
4459 case IEMMODE_64BIT:
4460 IEMOP_MNEMONIC(cqo, "cqo");
4461 IEM_MC_BEGIN(0, 1);
4462 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4463 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4464 } IEM_MC_ELSE() {
4465 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4466 } IEM_MC_ENDIF();
4467 IEM_MC_ADVANCE_RIP();
4468 IEM_MC_END();
4469 return VINF_SUCCESS;
4470
4471 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4472 }
4473}
4474
4475
4476/**
4477 * @opcode 0x9a
4478 */
4479FNIEMOP_DEF(iemOp_call_Ap)
4480{
4481 IEMOP_MNEMONIC(call_Ap, "call Ap");
4482 IEMOP_HLP_NO_64BIT();
4483
4484 /* Decode the far pointer address and pass it on to the far call C implementation. */
4485 uint32_t offSeg;
4486 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4487 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4488 else
4489 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4490 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4492 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4493}
4494
4495
4496/** Opcode 0x9b. (aka fwait) */
4497FNIEMOP_DEF(iemOp_wait)
4498{
4499 IEMOP_MNEMONIC(wait, "wait");
4500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4501
4502 IEM_MC_BEGIN(0, 0);
4503 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4504 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4505 IEM_MC_ADVANCE_RIP();
4506 IEM_MC_END();
4507 return VINF_SUCCESS;
4508}
4509
4510
4511/**
4512 * @opcode 0x9c
4513 */
4514FNIEMOP_DEF(iemOp_pushf_Fv)
4515{
4516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4517 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4518 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4519}
4520
4521
4522/**
4523 * @opcode 0x9d
4524 */
4525FNIEMOP_DEF(iemOp_popf_Fv)
4526{
4527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4528 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4529 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4530}
4531
4532
4533/**
4534 * @opcode 0x9e
4535 */
4536FNIEMOP_DEF(iemOp_sahf)
4537{
4538 IEMOP_MNEMONIC(sahf, "sahf");
4539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4540 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4541 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4542 return IEMOP_RAISE_INVALID_OPCODE();
4543 IEM_MC_BEGIN(0, 2);
4544 IEM_MC_LOCAL(uint32_t, u32Flags);
4545 IEM_MC_LOCAL(uint32_t, EFlags);
4546 IEM_MC_FETCH_EFLAGS(EFlags);
4547 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4548 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4549 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4550 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4551 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4552 IEM_MC_COMMIT_EFLAGS(EFlags);
4553 IEM_MC_ADVANCE_RIP();
4554 IEM_MC_END();
4555 return VINF_SUCCESS;
4556}
4557
4558
4559/**
4560 * @opcode 0x9f
4561 */
4562FNIEMOP_DEF(iemOp_lahf)
4563{
4564 IEMOP_MNEMONIC(lahf, "lahf");
4565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4566 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4567 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4568 return IEMOP_RAISE_INVALID_OPCODE();
4569 IEM_MC_BEGIN(0, 1);
4570 IEM_MC_LOCAL(uint8_t, u8Flags);
4571 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4572 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4573 IEM_MC_ADVANCE_RIP();
4574 IEM_MC_END();
4575 return VINF_SUCCESS;
4576}
4577
4578
4579/**
4580 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4581 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
4582 * prefixes. Will return on failures.
4583 * @param a_GCPtrMemOff The variable to store the offset in.
4584 */
4585#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4586 do \
4587 { \
4588 switch (pVCpu->iem.s.enmEffAddrMode) \
4589 { \
4590 case IEMMODE_16BIT: \
4591 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4592 break; \
4593 case IEMMODE_32BIT: \
4594 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4595 break; \
4596 case IEMMODE_64BIT: \
4597 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4598 break; \
4599 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4600 } \
4601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4602 } while (0)
4603
4604/**
4605 * @opcode 0xa0
4606 */
4607FNIEMOP_DEF(iemOp_mov_AL_Ob)
4608{
4609 /*
4610 * Get the offset and fend of lock prefixes.
4611 */
4612 RTGCPTR GCPtrMemOff;
4613 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4614
4615 /*
4616 * Fetch AL.
4617 */
4618 IEM_MC_BEGIN(0,1);
4619 IEM_MC_LOCAL(uint8_t, u8Tmp);
4620 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4621 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4622 IEM_MC_ADVANCE_RIP();
4623 IEM_MC_END();
4624 return VINF_SUCCESS;
4625}
4626
4627
4628/**
4629 * @opcode 0xa1
4630 */
4631FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4632{
4633 /*
4634 * Get the offset and fend of lock prefixes.
4635 */
4636 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4637 RTGCPTR GCPtrMemOff;
4638 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4639
4640 /*
4641 * Fetch rAX.
4642 */
4643 switch (pVCpu->iem.s.enmEffOpSize)
4644 {
4645 case IEMMODE_16BIT:
4646 IEM_MC_BEGIN(0,1);
4647 IEM_MC_LOCAL(uint16_t, u16Tmp);
4648 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4649 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4650 IEM_MC_ADVANCE_RIP();
4651 IEM_MC_END();
4652 return VINF_SUCCESS;
4653
4654 case IEMMODE_32BIT:
4655 IEM_MC_BEGIN(0,1);
4656 IEM_MC_LOCAL(uint32_t, u32Tmp);
4657 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4658 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4659 IEM_MC_ADVANCE_RIP();
4660 IEM_MC_END();
4661 return VINF_SUCCESS;
4662
4663 case IEMMODE_64BIT:
4664 IEM_MC_BEGIN(0,1);
4665 IEM_MC_LOCAL(uint64_t, u64Tmp);
4666 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4667 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4668 IEM_MC_ADVANCE_RIP();
4669 IEM_MC_END();
4670 return VINF_SUCCESS;
4671
4672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4673 }
4674}
4675
4676
4677/**
4678 * @opcode 0xa2
4679 */
4680FNIEMOP_DEF(iemOp_mov_Ob_AL)
4681{
4682 /*
4683 * Get the offset and fend of lock prefixes.
4684 */
4685 RTGCPTR GCPtrMemOff;
4686 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4687
4688 /*
4689 * Store AL.
4690 */
4691 IEM_MC_BEGIN(0,1);
4692 IEM_MC_LOCAL(uint8_t, u8Tmp);
4693 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4694 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4695 IEM_MC_ADVANCE_RIP();
4696 IEM_MC_END();
4697 return VINF_SUCCESS;
4698}
4699
4700
4701/**
4702 * @opcode 0xa3
4703 */
4704FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4705{
4706 /*
4707 * Get the offset and fend of lock prefixes.
4708 */
4709 RTGCPTR GCPtrMemOff;
4710 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4711
4712 /*
4713 * Store rAX.
4714 */
4715 switch (pVCpu->iem.s.enmEffOpSize)
4716 {
4717 case IEMMODE_16BIT:
4718 IEM_MC_BEGIN(0,1);
4719 IEM_MC_LOCAL(uint16_t, u16Tmp);
4720 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4721 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4722 IEM_MC_ADVANCE_RIP();
4723 IEM_MC_END();
4724 return VINF_SUCCESS;
4725
4726 case IEMMODE_32BIT:
4727 IEM_MC_BEGIN(0,1);
4728 IEM_MC_LOCAL(uint32_t, u32Tmp);
4729 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4730 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4731 IEM_MC_ADVANCE_RIP();
4732 IEM_MC_END();
4733 return VINF_SUCCESS;
4734
4735 case IEMMODE_64BIT:
4736 IEM_MC_BEGIN(0,1);
4737 IEM_MC_LOCAL(uint64_t, u64Tmp);
4738 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4739 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4740 IEM_MC_ADVANCE_RIP();
4741 IEM_MC_END();
4742 return VINF_SUCCESS;
4743
4744 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4745 }
4746}
4747
4748/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4749#define IEM_MOVS_CASE(ValBits, AddrBits) \
4750 IEM_MC_BEGIN(0, 2); \
4751 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4752 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4753 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4754 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4755 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4756 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4757 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4758 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4759 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4760 } IEM_MC_ELSE() { \
4761 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4762 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4763 } IEM_MC_ENDIF(); \
4764 IEM_MC_ADVANCE_RIP(); \
4765 IEM_MC_END();
4766
4767/**
4768 * @opcode 0xa4
4769 */
4770FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4771{
4772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4773
4774 /*
4775 * Use the C implementation if a repeat prefix is encountered.
4776 */
4777 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4778 {
4779 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4780 switch (pVCpu->iem.s.enmEffAddrMode)
4781 {
4782 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4783 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4784 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4786 }
4787 }
4788 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4789
4790 /*
4791 * Sharing case implementation with movs[wdq] below.
4792 */
4793 switch (pVCpu->iem.s.enmEffAddrMode)
4794 {
4795 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4796 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4797 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4799 }
4800 return VINF_SUCCESS;
4801}
4802
4803
4804/**
4805 * @opcode 0xa5
4806 */
4807FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4808{
4809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4810
4811 /*
4812 * Use the C implementation if a repeat prefix is encountered.
4813 */
4814 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4815 {
4816 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4817 switch (pVCpu->iem.s.enmEffOpSize)
4818 {
4819 case IEMMODE_16BIT:
4820 switch (pVCpu->iem.s.enmEffAddrMode)
4821 {
4822 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4823 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4824 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4826 }
4827 break;
4828 case IEMMODE_32BIT:
4829 switch (pVCpu->iem.s.enmEffAddrMode)
4830 {
4831 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4832 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4833 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4835 }
4836 case IEMMODE_64BIT:
4837 switch (pVCpu->iem.s.enmEffAddrMode)
4838 {
4839 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4840 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4841 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4843 }
4844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4845 }
4846 }
4847 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4848
4849 /*
4850 * Annoying double switch here.
4851 * Using ugly macro for implementing the cases, sharing it with movsb.
4852 */
4853 switch (pVCpu->iem.s.enmEffOpSize)
4854 {
4855 case IEMMODE_16BIT:
4856 switch (pVCpu->iem.s.enmEffAddrMode)
4857 {
4858 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4859 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4860 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4862 }
4863 break;
4864
4865 case IEMMODE_32BIT:
4866 switch (pVCpu->iem.s.enmEffAddrMode)
4867 {
4868 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4869 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4870 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
4871 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4872 }
4873 break;
4874
4875 case IEMMODE_64BIT:
4876 switch (pVCpu->iem.s.enmEffAddrMode)
4877 {
4878 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4879 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
4880 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
4881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4882 }
4883 break;
4884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4885 }
4886 return VINF_SUCCESS;
4887}
4888
4889#undef IEM_MOVS_CASE
4890
4891/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
4892#define IEM_CMPS_CASE(ValBits, AddrBits) \
4893 IEM_MC_BEGIN(3, 3); \
4894 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
4895 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
4896 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4897 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
4898 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4899 \
4900 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4901 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
4902 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4903 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
4904 IEM_MC_REF_LOCAL(puValue1, uValue1); \
4905 IEM_MC_REF_EFLAGS(pEFlags); \
4906 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
4907 \
4908 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4909 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4910 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4911 } IEM_MC_ELSE() { \
4912 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4913 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4914 } IEM_MC_ENDIF(); \
4915 IEM_MC_ADVANCE_RIP(); \
4916 IEM_MC_END(); \
4917
4918/**
4919 * @opcode 0xa6
4920 */
4921FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
4922{
4923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4924
4925 /*
4926 * Use the C implementation if a repeat prefix is encountered.
4927 */
4928 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4929 {
4930 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
4931 switch (pVCpu->iem.s.enmEffAddrMode)
4932 {
4933 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4934 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4935 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4937 }
4938 }
4939 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4940 {
4941 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
4942 switch (pVCpu->iem.s.enmEffAddrMode)
4943 {
4944 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4945 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4946 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4948 }
4949 }
4950 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
4951
4952 /*
4953 * Sharing case implementation with cmps[wdq] below.
4954 */
4955 switch (pVCpu->iem.s.enmEffAddrMode)
4956 {
4957 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
4958 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
4959 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
4960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4961 }
4962 return VINF_SUCCESS;
4963
4964}
4965
4966
4967/**
4968 * @opcode 0xa7
4969 */
4970FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
4971{
4972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4973
4974 /*
4975 * Use the C implementation if a repeat prefix is encountered.
4976 */
4977 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4978 {
4979 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
4980 switch (pVCpu->iem.s.enmEffOpSize)
4981 {
4982 case IEMMODE_16BIT:
4983 switch (pVCpu->iem.s.enmEffAddrMode)
4984 {
4985 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4986 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4987 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4989 }
4990 break;
4991 case IEMMODE_32BIT:
4992 switch (pVCpu->iem.s.enmEffAddrMode)
4993 {
4994 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4995 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4996 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4998 }
4999 case IEMMODE_64BIT:
5000 switch (pVCpu->iem.s.enmEffAddrMode)
5001 {
5002 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5003 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5004 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5006 }
5007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5008 }
5009 }
5010
5011 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5012 {
5013 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5014 switch (pVCpu->iem.s.enmEffOpSize)
5015 {
5016 case IEMMODE_16BIT:
5017 switch (pVCpu->iem.s.enmEffAddrMode)
5018 {
5019 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5020 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5021 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5023 }
5024 break;
5025 case IEMMODE_32BIT:
5026 switch (pVCpu->iem.s.enmEffAddrMode)
5027 {
5028 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5029 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5030 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5032 }
5033 case IEMMODE_64BIT:
5034 switch (pVCpu->iem.s.enmEffAddrMode)
5035 {
5036 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5037 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5038 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5040 }
5041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5042 }
5043 }
5044
5045 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5046
5047 /*
5048 * Annoying double switch here.
5049 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5050 */
5051 switch (pVCpu->iem.s.enmEffOpSize)
5052 {
5053 case IEMMODE_16BIT:
5054 switch (pVCpu->iem.s.enmEffAddrMode)
5055 {
5056 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5057 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5058 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5060 }
5061 break;
5062
5063 case IEMMODE_32BIT:
5064 switch (pVCpu->iem.s.enmEffAddrMode)
5065 {
5066 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5067 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5068 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5070 }
5071 break;
5072
5073 case IEMMODE_64BIT:
5074 switch (pVCpu->iem.s.enmEffAddrMode)
5075 {
5076 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5077 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5078 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5080 }
5081 break;
5082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5083 }
5084 return VINF_SUCCESS;
5085
5086}
5087
5088#undef IEM_CMPS_CASE
5089
5090/**
5091 * @opcode 0xa8
5092 */
5093FNIEMOP_DEF(iemOp_test_AL_Ib)
5094{
5095 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5096 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5097 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5098}
5099
5100
5101/**
5102 * @opcode 0xa9
5103 */
5104FNIEMOP_DEF(iemOp_test_eAX_Iz)
5105{
5106 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5107 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5108 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5109}
5110
5111
5112/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5113#define IEM_STOS_CASE(ValBits, AddrBits) \
5114 IEM_MC_BEGIN(0, 2); \
5115 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5116 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5117 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5118 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5119 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5121 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5122 } IEM_MC_ELSE() { \
5123 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5124 } IEM_MC_ENDIF(); \
5125 IEM_MC_ADVANCE_RIP(); \
5126 IEM_MC_END(); \
5127
5128/**
5129 * @opcode 0xaa
5130 */
5131FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5132{
5133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5134
5135 /*
5136 * Use the C implementation if a repeat prefix is encountered.
5137 */
5138 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5139 {
5140 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5141 switch (pVCpu->iem.s.enmEffAddrMode)
5142 {
5143 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5144 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5145 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5147 }
5148 }
5149 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5150
5151 /*
5152 * Sharing case implementation with stos[wdq] below.
5153 */
5154 switch (pVCpu->iem.s.enmEffAddrMode)
5155 {
5156 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5157 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5158 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5160 }
5161 return VINF_SUCCESS;
5162}
5163
5164
5165/**
5166 * @opcode 0xab
5167 */
5168FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5169{
5170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5171
5172 /*
5173 * Use the C implementation if a repeat prefix is encountered.
5174 */
5175 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5176 {
5177 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5178 switch (pVCpu->iem.s.enmEffOpSize)
5179 {
5180 case IEMMODE_16BIT:
5181 switch (pVCpu->iem.s.enmEffAddrMode)
5182 {
5183 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5184 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5185 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5187 }
5188 break;
5189 case IEMMODE_32BIT:
5190 switch (pVCpu->iem.s.enmEffAddrMode)
5191 {
5192 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5193 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5194 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5196 }
5197 case IEMMODE_64BIT:
5198 switch (pVCpu->iem.s.enmEffAddrMode)
5199 {
5200 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5201 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5202 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5204 }
5205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5206 }
5207 }
5208 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5209
5210 /*
5211 * Annoying double switch here.
5212 * Using ugly macro for implementing the cases, sharing it with stosb.
5213 */
5214 switch (pVCpu->iem.s.enmEffOpSize)
5215 {
5216 case IEMMODE_16BIT:
5217 switch (pVCpu->iem.s.enmEffAddrMode)
5218 {
5219 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5220 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5221 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5223 }
5224 break;
5225
5226 case IEMMODE_32BIT:
5227 switch (pVCpu->iem.s.enmEffAddrMode)
5228 {
5229 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5230 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5231 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5233 }
5234 break;
5235
5236 case IEMMODE_64BIT:
5237 switch (pVCpu->iem.s.enmEffAddrMode)
5238 {
5239 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5240 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5241 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5243 }
5244 break;
5245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5246 }
5247 return VINF_SUCCESS;
5248}
5249
5250#undef IEM_STOS_CASE
5251
5252/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5253#define IEM_LODS_CASE(ValBits, AddrBits) \
5254 IEM_MC_BEGIN(0, 2); \
5255 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5256 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5257 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5258 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5259 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5260 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5261 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5262 } IEM_MC_ELSE() { \
5263 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5264 } IEM_MC_ENDIF(); \
5265 IEM_MC_ADVANCE_RIP(); \
5266 IEM_MC_END();
5267
5268/**
5269 * @opcode 0xac
5270 */
5271FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5272{
5273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5274
5275 /*
5276 * Use the C implementation if a repeat prefix is encountered.
5277 */
5278 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5279 {
5280 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5281 switch (pVCpu->iem.s.enmEffAddrMode)
5282 {
5283 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5284 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5285 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5287 }
5288 }
5289 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5290
5291 /*
5292 * Sharing case implementation with stos[wdq] below.
5293 */
5294 switch (pVCpu->iem.s.enmEffAddrMode)
5295 {
5296 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5297 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5298 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5300 }
5301 return VINF_SUCCESS;
5302}
5303
5304
5305/**
5306 * @opcode 0xad
5307 */
5308FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5309{
5310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5311
5312 /*
5313 * Use the C implementation if a repeat prefix is encountered.
5314 */
5315 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5316 {
5317 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5318 switch (pVCpu->iem.s.enmEffOpSize)
5319 {
5320 case IEMMODE_16BIT:
5321 switch (pVCpu->iem.s.enmEffAddrMode)
5322 {
5323 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5324 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5325 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5327 }
5328 break;
5329 case IEMMODE_32BIT:
5330 switch (pVCpu->iem.s.enmEffAddrMode)
5331 {
5332 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5333 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5334 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5336 }
5337 case IEMMODE_64BIT:
5338 switch (pVCpu->iem.s.enmEffAddrMode)
5339 {
5340 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5341 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5342 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5344 }
5345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5346 }
5347 }
5348 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5349
5350 /*
5351 * Annoying double switch here.
5352 * Using ugly macro for implementing the cases, sharing it with lodsb.
5353 */
5354 switch (pVCpu->iem.s.enmEffOpSize)
5355 {
5356 case IEMMODE_16BIT:
5357 switch (pVCpu->iem.s.enmEffAddrMode)
5358 {
5359 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5360 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5361 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5363 }
5364 break;
5365
5366 case IEMMODE_32BIT:
5367 switch (pVCpu->iem.s.enmEffAddrMode)
5368 {
5369 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5370 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5371 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5373 }
5374 break;
5375
5376 case IEMMODE_64BIT:
5377 switch (pVCpu->iem.s.enmEffAddrMode)
5378 {
5379 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5380 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5381 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5383 }
5384 break;
5385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5386 }
5387 return VINF_SUCCESS;
5388}
5389
5390#undef IEM_LODS_CASE
5391
5392/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5393#define IEM_SCAS_CASE(ValBits, AddrBits) \
5394 IEM_MC_BEGIN(3, 2); \
5395 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5396 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5397 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5398 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5399 \
5400 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5401 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5402 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5403 IEM_MC_REF_EFLAGS(pEFlags); \
5404 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5405 \
5406 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5407 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5408 } IEM_MC_ELSE() { \
5409 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5410 } IEM_MC_ENDIF(); \
5411 IEM_MC_ADVANCE_RIP(); \
5412 IEM_MC_END();
5413
5414/**
5415 * @opcode 0xae
5416 */
5417FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5418{
5419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5420
5421 /*
5422 * Use the C implementation if a repeat prefix is encountered.
5423 */
5424 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5425 {
5426 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5427 switch (pVCpu->iem.s.enmEffAddrMode)
5428 {
5429 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5430 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5431 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5433 }
5434 }
5435 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5436 {
5437 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5438 switch (pVCpu->iem.s.enmEffAddrMode)
5439 {
5440 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5441 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5442 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5444 }
5445 }
5446 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5447
5448 /*
5449 * Sharing case implementation with stos[wdq] below.
5450 */
5451 switch (pVCpu->iem.s.enmEffAddrMode)
5452 {
5453 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5454 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5455 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5457 }
5458 return VINF_SUCCESS;
5459}
5460
5461
5462/**
5463 * @opcode 0xaf
5464 */
5465FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5466{
5467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5468
5469 /*
5470 * Use the C implementation if a repeat prefix is encountered.
5471 */
5472 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5473 {
5474 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5475 switch (pVCpu->iem.s.enmEffOpSize)
5476 {
5477 case IEMMODE_16BIT:
5478 switch (pVCpu->iem.s.enmEffAddrMode)
5479 {
5480 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5481 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5482 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5484 }
5485 break;
5486 case IEMMODE_32BIT:
5487 switch (pVCpu->iem.s.enmEffAddrMode)
5488 {
5489 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5490 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5491 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5493 }
5494 case IEMMODE_64BIT:
5495 switch (pVCpu->iem.s.enmEffAddrMode)
5496 {
5497 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5498 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5499 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5501 }
5502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5503 }
5504 }
5505 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5506 {
5507 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5508 switch (pVCpu->iem.s.enmEffOpSize)
5509 {
5510 case IEMMODE_16BIT:
5511 switch (pVCpu->iem.s.enmEffAddrMode)
5512 {
5513 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5514 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5515 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5517 }
5518 break;
5519 case IEMMODE_32BIT:
5520 switch (pVCpu->iem.s.enmEffAddrMode)
5521 {
5522 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5523 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5524 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5526 }
5527 case IEMMODE_64BIT:
5528 switch (pVCpu->iem.s.enmEffAddrMode)
5529 {
5530 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5531 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5532 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5534 }
5535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5536 }
5537 }
5538 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5539
5540 /*
5541 * Annoying double switch here.
5542 * Using ugly macro for implementing the cases, sharing it with scasb.
5543 */
5544 switch (pVCpu->iem.s.enmEffOpSize)
5545 {
5546 case IEMMODE_16BIT:
5547 switch (pVCpu->iem.s.enmEffAddrMode)
5548 {
5549 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5550 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5551 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5553 }
5554 break;
5555
5556 case IEMMODE_32BIT:
5557 switch (pVCpu->iem.s.enmEffAddrMode)
5558 {
5559 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5560 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5561 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5563 }
5564 break;
5565
5566 case IEMMODE_64BIT:
5567 switch (pVCpu->iem.s.enmEffAddrMode)
5568 {
5569 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5570 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5571 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5573 }
5574 break;
5575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5576 }
5577 return VINF_SUCCESS;
5578}
5579
5580#undef IEM_SCAS_CASE
5581
5582/**
5583 * Common 'mov r8, imm8' helper.
5584 */
5585FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5586{
5587 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5589
5590 IEM_MC_BEGIN(0, 1);
5591 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5592 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5593 IEM_MC_ADVANCE_RIP();
5594 IEM_MC_END();
5595
5596 return VINF_SUCCESS;
5597}
5598
5599
5600/**
5601 * @opcode 0xb0
5602 */
5603FNIEMOP_DEF(iemOp_mov_AL_Ib)
5604{
5605 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5606 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5607}
5608
5609
5610/**
5611 * @opcode 0xb1
5612 */
5613FNIEMOP_DEF(iemOp_CL_Ib)
5614{
5615 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5616 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5617}
5618
5619
5620/**
5621 * @opcode 0xb2
5622 */
5623FNIEMOP_DEF(iemOp_DL_Ib)
5624{
5625 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5626 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5627}
5628
5629
5630/**
5631 * @opcode 0xb3
5632 */
5633FNIEMOP_DEF(iemOp_BL_Ib)
5634{
5635 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5636 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5637}
5638
5639
5640/**
5641 * @opcode 0xb4
5642 */
5643FNIEMOP_DEF(iemOp_mov_AH_Ib)
5644{
5645 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5646 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5647}
5648
5649
5650/**
5651 * @opcode 0xb5
5652 */
5653FNIEMOP_DEF(iemOp_CH_Ib)
5654{
5655 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5656 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5657}
5658
5659
5660/**
5661 * @opcode 0xb6
5662 */
5663FNIEMOP_DEF(iemOp_DH_Ib)
5664{
5665 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5666 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5667}
5668
5669
5670/**
5671 * @opcode 0xb7
5672 */
5673FNIEMOP_DEF(iemOp_BH_Ib)
5674{
5675 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5676 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5677}
5678
5679
5680/**
5681 * Common 'mov regX,immX' helper.
5682 */
5683FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5684{
5685 switch (pVCpu->iem.s.enmEffOpSize)
5686 {
5687 case IEMMODE_16BIT:
5688 {
5689 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5691
5692 IEM_MC_BEGIN(0, 1);
5693 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5694 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5695 IEM_MC_ADVANCE_RIP();
5696 IEM_MC_END();
5697 break;
5698 }
5699
5700 case IEMMODE_32BIT:
5701 {
5702 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5704
5705 IEM_MC_BEGIN(0, 1);
5706 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5707 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5708 IEM_MC_ADVANCE_RIP();
5709 IEM_MC_END();
5710 break;
5711 }
5712 case IEMMODE_64BIT:
5713 {
5714 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5716
5717 IEM_MC_BEGIN(0, 1);
5718 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5719 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5720 IEM_MC_ADVANCE_RIP();
5721 IEM_MC_END();
5722 break;
5723 }
5724 }
5725
5726 return VINF_SUCCESS;
5727}
5728
5729
5730/**
5731 * @opcode 0xb8
5732 */
5733FNIEMOP_DEF(iemOp_eAX_Iv)
5734{
5735 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5736 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5737}
5738
5739
5740/**
5741 * @opcode 0xb9
5742 */
5743FNIEMOP_DEF(iemOp_eCX_Iv)
5744{
5745 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5746 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5747}
5748
5749
5750/**
5751 * @opcode 0xba
5752 */
5753FNIEMOP_DEF(iemOp_eDX_Iv)
5754{
5755 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5756 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5757}
5758
5759
5760/**
5761 * @opcode 0xbb
5762 */
5763FNIEMOP_DEF(iemOp_eBX_Iv)
5764{
5765 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5766 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5767}
5768
5769
5770/**
5771 * @opcode 0xbc
5772 */
5773FNIEMOP_DEF(iemOp_eSP_Iv)
5774{
5775 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5776 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5777}
5778
5779
5780/**
5781 * @opcode 0xbd
5782 */
5783FNIEMOP_DEF(iemOp_eBP_Iv)
5784{
5785 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5786 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5787}
5788
5789
5790/**
5791 * @opcode 0xbe
5792 */
5793FNIEMOP_DEF(iemOp_eSI_Iv)
5794{
5795 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5796 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5797}
5798
5799
5800/**
5801 * @opcode 0xbf
5802 */
5803FNIEMOP_DEF(iemOp_eDI_Iv)
5804{
5805 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5806 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5807}
5808
5809
5810/**
5811 * @opcode 0xc0
5812 */
5813FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5814{
5815 IEMOP_HLP_MIN_186();
5816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5817 PCIEMOPSHIFTSIZES pImpl;
5818 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5819 {
5820 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5821 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5822 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5823 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5824 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5825 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5826 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5827 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5828 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5829 }
5830 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5831
5832 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5833 {
5834 /* register */
5835 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5837 IEM_MC_BEGIN(3, 0);
5838 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5839 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5840 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5841 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5842 IEM_MC_REF_EFLAGS(pEFlags);
5843 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5844 IEM_MC_ADVANCE_RIP();
5845 IEM_MC_END();
5846 }
5847 else
5848 {
5849 /* memory */
5850 IEM_MC_BEGIN(3, 2);
5851 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5852 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5853 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5855
5856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5857 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5858 IEM_MC_ASSIGN(cShiftArg, cShift);
5859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5860 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5861 IEM_MC_FETCH_EFLAGS(EFlags);
5862 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5863
5864 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5865 IEM_MC_COMMIT_EFLAGS(EFlags);
5866 IEM_MC_ADVANCE_RIP();
5867 IEM_MC_END();
5868 }
5869 return VINF_SUCCESS;
5870}
5871
5872
5873/**
5874 * @opcode 0xc1
5875 */
5876FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
5877{
5878 IEMOP_HLP_MIN_186();
5879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5880 PCIEMOPSHIFTSIZES pImpl;
5881 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5882 {
5883 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
5884 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
5885 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
5886 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
5887 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
5888 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
5889 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
5890 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5891 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5892 }
5893 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5894
5895 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5896 {
5897 /* register */
5898 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5900 switch (pVCpu->iem.s.enmEffOpSize)
5901 {
5902 case IEMMODE_16BIT:
5903 IEM_MC_BEGIN(3, 0);
5904 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5905 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5906 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5907 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5908 IEM_MC_REF_EFLAGS(pEFlags);
5909 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5910 IEM_MC_ADVANCE_RIP();
5911 IEM_MC_END();
5912 return VINF_SUCCESS;
5913
5914 case IEMMODE_32BIT:
5915 IEM_MC_BEGIN(3, 0);
5916 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5917 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5918 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5919 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5920 IEM_MC_REF_EFLAGS(pEFlags);
5921 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5922 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5923 IEM_MC_ADVANCE_RIP();
5924 IEM_MC_END();
5925 return VINF_SUCCESS;
5926
5927 case IEMMODE_64BIT:
5928 IEM_MC_BEGIN(3, 0);
5929 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5930 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5931 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5932 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5933 IEM_MC_REF_EFLAGS(pEFlags);
5934 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5935 IEM_MC_ADVANCE_RIP();
5936 IEM_MC_END();
5937 return VINF_SUCCESS;
5938
5939 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5940 }
5941 }
5942 else
5943 {
5944 /* memory */
5945 switch (pVCpu->iem.s.enmEffOpSize)
5946 {
5947 case IEMMODE_16BIT:
5948 IEM_MC_BEGIN(3, 2);
5949 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5950 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5951 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5953
5954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5955 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5956 IEM_MC_ASSIGN(cShiftArg, cShift);
5957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5958 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5959 IEM_MC_FETCH_EFLAGS(EFlags);
5960 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5961
5962 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5963 IEM_MC_COMMIT_EFLAGS(EFlags);
5964 IEM_MC_ADVANCE_RIP();
5965 IEM_MC_END();
5966 return VINF_SUCCESS;
5967
5968 case IEMMODE_32BIT:
5969 IEM_MC_BEGIN(3, 2);
5970 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5971 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5972 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5974
5975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5976 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5977 IEM_MC_ASSIGN(cShiftArg, cShift);
5978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5979 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5980 IEM_MC_FETCH_EFLAGS(EFlags);
5981 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5982
5983 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5984 IEM_MC_COMMIT_EFLAGS(EFlags);
5985 IEM_MC_ADVANCE_RIP();
5986 IEM_MC_END();
5987 return VINF_SUCCESS;
5988
5989 case IEMMODE_64BIT:
5990 IEM_MC_BEGIN(3, 2);
5991 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5992 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5993 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5995
5996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5997 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5998 IEM_MC_ASSIGN(cShiftArg, cShift);
5999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6000 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6001 IEM_MC_FETCH_EFLAGS(EFlags);
6002 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6003
6004 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6005 IEM_MC_COMMIT_EFLAGS(EFlags);
6006 IEM_MC_ADVANCE_RIP();
6007 IEM_MC_END();
6008 return VINF_SUCCESS;
6009
6010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6011 }
6012 }
6013}
6014
6015
6016/**
6017 * @opcode 0xc2
6018 */
6019FNIEMOP_DEF(iemOp_retn_Iw)
6020{
6021 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6022 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6024 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6025 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
6026}
6027
6028
6029/**
6030 * @opcode 0xc3
6031 */
6032FNIEMOP_DEF(iemOp_retn)
6033{
6034 IEMOP_MNEMONIC(retn, "retn");
6035 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6037 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6038}
6039
6040
6041/**
6042 * @opcode 0xc4
6043 */
6044FNIEMOP_DEF(iemOp_les_Gv_Mp__vex2)
6045{
6046 /* The LES instruction is invalid 64-bit mode. In legacy and
6047 compatability mode it is invalid with MOD=3.
6048 The use as a VEX prefix is made possible by assigning the inverted
6049 REX.R to the top MOD bit, and the top bit in the inverted register
6050 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6051 to accessing registers 0..7 in this VEX form. */
6052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6053 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6054 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6055 {
6056 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6057 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6058 {
6059 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6060 if ( ( pVCpu->iem.s.fPrefixes
6061 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6062 == 0)
6063 {
6064 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6065 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6066 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6067 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6068 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6069
6070 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6071 }
6072
6073 Log(("VEX2: Invalid prefix mix!\n"));
6074 }
6075 else
6076 Log(("VEX2: AVX support disabled!\n"));
6077
6078 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6079 return IEMOP_RAISE_INVALID_OPCODE();
6080 }
6081 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6082 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6083}
6084
6085
6086/**
6087 * @opcode 0xc5
6088 */
6089FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex3)
6090{
6091 /* The LDS instruction is invalid 64-bit mode. In legacy and
6092 compatability mode it is invalid with MOD=3.
6093 The use as a VEX prefix is made possible by assigning the inverted
6094 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6095 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6097 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
6098 {
6099 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6100 {
6101 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6102 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6103 }
6104 IEMOP_HLP_NO_REAL_OR_V86_MODE();
6105 }
6106
6107 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6108 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6109 {
6110 /** @todo Test when exctly the VEX conformance checks kick in during
6111 * instruction decoding and fetching (using \#PF). */
6112 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6113 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6114 if ( ( pVCpu->iem.s.fPrefixes
6115 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6116 == 0)
6117 {
6118 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6119 if (bVex2 & 0x80 /* VEX.W */)
6120 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6121 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6122 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
6123 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
6124 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6125 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6126 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6127
6128 switch (bRm & 0x1f)
6129 {
6130 case 1: /* 0x0f lead opcode byte. */
6131 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6132
6133 case 2: /* 0x0f 0x38 lead opcode bytes. */
6134 /** @todo VEX: Just use new tables and decoders. */
6135 IEMOP_BITCH_ABOUT_STUB();
6136 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6137
6138 case 3: /* 0x0f 0x3a lead opcode bytes. */
6139 /** @todo VEX: Just use new tables and decoders. */
6140 IEMOP_BITCH_ABOUT_STUB();
6141 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6142
6143 default:
6144 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6145 return IEMOP_RAISE_INVALID_OPCODE();
6146 }
6147 }
6148 else
6149 Log(("VEX3: Invalid prefix mix!\n"));
6150 }
6151 else
6152 Log(("VEX3: AVX support disabled!\n"));
6153 return IEMOP_RAISE_INVALID_OPCODE();
6154}
6155
6156
6157/**
6158 * @opcode 0xc6
6159 */
6160FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6161{
6162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6163 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6164 return IEMOP_RAISE_INVALID_OPCODE();
6165 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6166
6167 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6168 {
6169 /* register access */
6170 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6172 IEM_MC_BEGIN(0, 0);
6173 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6174 IEM_MC_ADVANCE_RIP();
6175 IEM_MC_END();
6176 }
6177 else
6178 {
6179 /* memory access. */
6180 IEM_MC_BEGIN(0, 1);
6181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6183 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6185 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6186 IEM_MC_ADVANCE_RIP();
6187 IEM_MC_END();
6188 }
6189 return VINF_SUCCESS;
6190}
6191
6192
6193/**
6194 * @opcode 0xc7
6195 */
6196FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6197{
6198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6199 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6200 return IEMOP_RAISE_INVALID_OPCODE();
6201 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6202
6203 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6204 {
6205 /* register access */
6206 switch (pVCpu->iem.s.enmEffOpSize)
6207 {
6208 case IEMMODE_16BIT:
6209 IEM_MC_BEGIN(0, 0);
6210 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6212 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6213 IEM_MC_ADVANCE_RIP();
6214 IEM_MC_END();
6215 return VINF_SUCCESS;
6216
6217 case IEMMODE_32BIT:
6218 IEM_MC_BEGIN(0, 0);
6219 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6221 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6222 IEM_MC_ADVANCE_RIP();
6223 IEM_MC_END();
6224 return VINF_SUCCESS;
6225
6226 case IEMMODE_64BIT:
6227 IEM_MC_BEGIN(0, 0);
6228 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6230 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6231 IEM_MC_ADVANCE_RIP();
6232 IEM_MC_END();
6233 return VINF_SUCCESS;
6234
6235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6236 }
6237 }
6238 else
6239 {
6240 /* memory access. */
6241 switch (pVCpu->iem.s.enmEffOpSize)
6242 {
6243 case IEMMODE_16BIT:
6244 IEM_MC_BEGIN(0, 1);
6245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6247 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6249 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6250 IEM_MC_ADVANCE_RIP();
6251 IEM_MC_END();
6252 return VINF_SUCCESS;
6253
6254 case IEMMODE_32BIT:
6255 IEM_MC_BEGIN(0, 1);
6256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6258 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6260 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6261 IEM_MC_ADVANCE_RIP();
6262 IEM_MC_END();
6263 return VINF_SUCCESS;
6264
6265 case IEMMODE_64BIT:
6266 IEM_MC_BEGIN(0, 1);
6267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6269 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6271 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6272 IEM_MC_ADVANCE_RIP();
6273 IEM_MC_END();
6274 return VINF_SUCCESS;
6275
6276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6277 }
6278 }
6279}
6280
6281
6282
6283
6284/**
6285 * @opcode 0xc8
6286 */
6287FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6288{
6289 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6290 IEMOP_HLP_MIN_186();
6291 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6292 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6293 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6295 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6296}
6297
6298
6299/**
6300 * @opcode 0xc9
6301 */
6302FNIEMOP_DEF(iemOp_leave)
6303{
6304 IEMOP_MNEMONIC(leave, "leave");
6305 IEMOP_HLP_MIN_186();
6306 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6308 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6309}
6310
6311
6312/**
6313 * @opcode 0xca
6314 */
6315FNIEMOP_DEF(iemOp_retf_Iw)
6316{
6317 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6318 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6320 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6321 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6322}
6323
6324
6325/**
6326 * @opcode 0xcb
6327 */
6328FNIEMOP_DEF(iemOp_retf)
6329{
6330 IEMOP_MNEMONIC(retf, "retf");
6331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6332 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6333 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6334}
6335
6336
6337/**
6338 * @opcode 0xcc
6339 */
6340FNIEMOP_DEF(iemOp_int3)
6341{
6342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6343 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
6344}
6345
6346
6347/**
6348 * @opcode 0xcd
6349 */
6350FNIEMOP_DEF(iemOp_int_Ib)
6351{
6352 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6354 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
6355}
6356
6357
6358/**
6359 * @opcode 0xce
6360 */
6361FNIEMOP_DEF(iemOp_into)
6362{
6363 IEMOP_MNEMONIC(into, "into");
6364 IEMOP_HLP_NO_64BIT();
6365
6366 IEM_MC_BEGIN(2, 0);
6367 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6368 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
6369 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
6370 IEM_MC_END();
6371 return VINF_SUCCESS;
6372}
6373
6374
6375/**
6376 * @opcode 0xcf
6377 */
6378FNIEMOP_DEF(iemOp_iret)
6379{
6380 IEMOP_MNEMONIC(iret, "iret");
6381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6382 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6383}
6384
6385
6386/**
6387 * @opcode 0xd0
6388 */
6389FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6390{
6391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6392 PCIEMOPSHIFTSIZES pImpl;
6393 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6394 {
6395 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6396 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6397 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6398 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6399 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6400 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6401 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6402 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6403 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6404 }
6405 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6406
6407 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6408 {
6409 /* register */
6410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6411 IEM_MC_BEGIN(3, 0);
6412 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6413 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6414 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6415 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6416 IEM_MC_REF_EFLAGS(pEFlags);
6417 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6418 IEM_MC_ADVANCE_RIP();
6419 IEM_MC_END();
6420 }
6421 else
6422 {
6423 /* memory */
6424 IEM_MC_BEGIN(3, 2);
6425 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6426 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6427 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6429
6430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6432 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6433 IEM_MC_FETCH_EFLAGS(EFlags);
6434 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6435
6436 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6437 IEM_MC_COMMIT_EFLAGS(EFlags);
6438 IEM_MC_ADVANCE_RIP();
6439 IEM_MC_END();
6440 }
6441 return VINF_SUCCESS;
6442}
6443
6444
6445
6446/**
6447 * @opcode 0xd1
6448 */
6449FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6450{
6451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6452 PCIEMOPSHIFTSIZES pImpl;
6453 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6454 {
6455 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6456 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6457 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6458 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6459 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6460 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6461 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6462 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6463 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6464 }
6465 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6466
6467 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6468 {
6469 /* register */
6470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6471 switch (pVCpu->iem.s.enmEffOpSize)
6472 {
6473 case IEMMODE_16BIT:
6474 IEM_MC_BEGIN(3, 0);
6475 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6476 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6477 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6478 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6479 IEM_MC_REF_EFLAGS(pEFlags);
6480 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6481 IEM_MC_ADVANCE_RIP();
6482 IEM_MC_END();
6483 return VINF_SUCCESS;
6484
6485 case IEMMODE_32BIT:
6486 IEM_MC_BEGIN(3, 0);
6487 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6488 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6489 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6490 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6491 IEM_MC_REF_EFLAGS(pEFlags);
6492 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6493 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6494 IEM_MC_ADVANCE_RIP();
6495 IEM_MC_END();
6496 return VINF_SUCCESS;
6497
6498 case IEMMODE_64BIT:
6499 IEM_MC_BEGIN(3, 0);
6500 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6501 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6502 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6503 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6504 IEM_MC_REF_EFLAGS(pEFlags);
6505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6506 IEM_MC_ADVANCE_RIP();
6507 IEM_MC_END();
6508 return VINF_SUCCESS;
6509
6510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6511 }
6512 }
6513 else
6514 {
6515 /* memory */
6516 switch (pVCpu->iem.s.enmEffOpSize)
6517 {
6518 case IEMMODE_16BIT:
6519 IEM_MC_BEGIN(3, 2);
6520 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6521 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6522 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6524
6525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6527 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6528 IEM_MC_FETCH_EFLAGS(EFlags);
6529 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6530
6531 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6532 IEM_MC_COMMIT_EFLAGS(EFlags);
6533 IEM_MC_ADVANCE_RIP();
6534 IEM_MC_END();
6535 return VINF_SUCCESS;
6536
6537 case IEMMODE_32BIT:
6538 IEM_MC_BEGIN(3, 2);
6539 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6540 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6541 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6543
6544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6546 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6547 IEM_MC_FETCH_EFLAGS(EFlags);
6548 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6549
6550 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6551 IEM_MC_COMMIT_EFLAGS(EFlags);
6552 IEM_MC_ADVANCE_RIP();
6553 IEM_MC_END();
6554 return VINF_SUCCESS;
6555
6556 case IEMMODE_64BIT:
6557 IEM_MC_BEGIN(3, 2);
6558 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6559 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6560 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6562
6563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6565 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6566 IEM_MC_FETCH_EFLAGS(EFlags);
6567 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6568
6569 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6570 IEM_MC_COMMIT_EFLAGS(EFlags);
6571 IEM_MC_ADVANCE_RIP();
6572 IEM_MC_END();
6573 return VINF_SUCCESS;
6574
6575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6576 }
6577 }
6578}
6579
6580
6581/**
6582 * @opcode 0xd2
6583 */
6584FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6585{
6586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6587 PCIEMOPSHIFTSIZES pImpl;
6588 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6589 {
6590 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6591 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6592 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6593 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6594 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6595 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6596 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6597 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6598 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6599 }
6600 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6601
6602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6603 {
6604 /* register */
6605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6606 IEM_MC_BEGIN(3, 0);
6607 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6608 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6609 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6610 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6611 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6612 IEM_MC_REF_EFLAGS(pEFlags);
6613 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6614 IEM_MC_ADVANCE_RIP();
6615 IEM_MC_END();
6616 }
6617 else
6618 {
6619 /* memory */
6620 IEM_MC_BEGIN(3, 2);
6621 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6622 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6623 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6625
6626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6628 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6629 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6630 IEM_MC_FETCH_EFLAGS(EFlags);
6631 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6632
6633 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6634 IEM_MC_COMMIT_EFLAGS(EFlags);
6635 IEM_MC_ADVANCE_RIP();
6636 IEM_MC_END();
6637 }
6638 return VINF_SUCCESS;
6639}
6640
6641
6642/**
6643 * @opcode 0xd3
6644 */
6645FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6646{
6647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6648 PCIEMOPSHIFTSIZES pImpl;
6649 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6650 {
6651 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6652 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6653 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6654 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6655 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6656 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6657 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6658 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6659 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6660 }
6661 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6662
6663 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6664 {
6665 /* register */
6666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6667 switch (pVCpu->iem.s.enmEffOpSize)
6668 {
6669 case IEMMODE_16BIT:
6670 IEM_MC_BEGIN(3, 0);
6671 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6672 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6673 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6674 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6675 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6676 IEM_MC_REF_EFLAGS(pEFlags);
6677 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6678 IEM_MC_ADVANCE_RIP();
6679 IEM_MC_END();
6680 return VINF_SUCCESS;
6681
6682 case IEMMODE_32BIT:
6683 IEM_MC_BEGIN(3, 0);
6684 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6685 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6686 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6687 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6688 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6689 IEM_MC_REF_EFLAGS(pEFlags);
6690 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6691 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6692 IEM_MC_ADVANCE_RIP();
6693 IEM_MC_END();
6694 return VINF_SUCCESS;
6695
6696 case IEMMODE_64BIT:
6697 IEM_MC_BEGIN(3, 0);
6698 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6699 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6700 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6701 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6702 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6703 IEM_MC_REF_EFLAGS(pEFlags);
6704 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6705 IEM_MC_ADVANCE_RIP();
6706 IEM_MC_END();
6707 return VINF_SUCCESS;
6708
6709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6710 }
6711 }
6712 else
6713 {
6714 /* memory */
6715 switch (pVCpu->iem.s.enmEffOpSize)
6716 {
6717 case IEMMODE_16BIT:
6718 IEM_MC_BEGIN(3, 2);
6719 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6720 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6721 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6723
6724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6726 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6727 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6728 IEM_MC_FETCH_EFLAGS(EFlags);
6729 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6730
6731 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6732 IEM_MC_COMMIT_EFLAGS(EFlags);
6733 IEM_MC_ADVANCE_RIP();
6734 IEM_MC_END();
6735 return VINF_SUCCESS;
6736
6737 case IEMMODE_32BIT:
6738 IEM_MC_BEGIN(3, 2);
6739 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6740 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6741 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6743
6744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6746 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6747 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6748 IEM_MC_FETCH_EFLAGS(EFlags);
6749 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6750
6751 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6752 IEM_MC_COMMIT_EFLAGS(EFlags);
6753 IEM_MC_ADVANCE_RIP();
6754 IEM_MC_END();
6755 return VINF_SUCCESS;
6756
6757 case IEMMODE_64BIT:
6758 IEM_MC_BEGIN(3, 2);
6759 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6760 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6761 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6763
6764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6766 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6767 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6768 IEM_MC_FETCH_EFLAGS(EFlags);
6769 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6770
6771 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6772 IEM_MC_COMMIT_EFLAGS(EFlags);
6773 IEM_MC_ADVANCE_RIP();
6774 IEM_MC_END();
6775 return VINF_SUCCESS;
6776
6777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6778 }
6779 }
6780}
6781
6782/**
6783 * @opcode 0xd4
6784 */
6785FNIEMOP_DEF(iemOp_aam_Ib)
6786{
6787 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6788 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6790 IEMOP_HLP_NO_64BIT();
6791 if (!bImm)
6792 return IEMOP_RAISE_DIVIDE_ERROR();
6793 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6794}
6795
6796
6797/**
6798 * @opcode 0xd5
6799 */
6800FNIEMOP_DEF(iemOp_aad_Ib)
6801{
6802 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6803 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6805 IEMOP_HLP_NO_64BIT();
6806 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6807}
6808
6809
6810/**
6811 * @opcode 0xd6
6812 */
6813FNIEMOP_DEF(iemOp_salc)
6814{
6815 IEMOP_MNEMONIC(salc, "salc");
6816 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6817 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6819 IEMOP_HLP_NO_64BIT();
6820
6821 IEM_MC_BEGIN(0, 0);
6822 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6823 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6824 } IEM_MC_ELSE() {
6825 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6826 } IEM_MC_ENDIF();
6827 IEM_MC_ADVANCE_RIP();
6828 IEM_MC_END();
6829 return VINF_SUCCESS;
6830}
6831
6832
6833/**
6834 * @opcode 0xd7
6835 */
6836FNIEMOP_DEF(iemOp_xlat)
6837{
6838 IEMOP_MNEMONIC(xlat, "xlat");
6839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6840 switch (pVCpu->iem.s.enmEffAddrMode)
6841 {
6842 case IEMMODE_16BIT:
6843 IEM_MC_BEGIN(2, 0);
6844 IEM_MC_LOCAL(uint8_t, u8Tmp);
6845 IEM_MC_LOCAL(uint16_t, u16Addr);
6846 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6847 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6848 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6849 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6850 IEM_MC_ADVANCE_RIP();
6851 IEM_MC_END();
6852 return VINF_SUCCESS;
6853
6854 case IEMMODE_32BIT:
6855 IEM_MC_BEGIN(2, 0);
6856 IEM_MC_LOCAL(uint8_t, u8Tmp);
6857 IEM_MC_LOCAL(uint32_t, u32Addr);
6858 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6859 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6860 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6861 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6862 IEM_MC_ADVANCE_RIP();
6863 IEM_MC_END();
6864 return VINF_SUCCESS;
6865
6866 case IEMMODE_64BIT:
6867 IEM_MC_BEGIN(2, 0);
6868 IEM_MC_LOCAL(uint8_t, u8Tmp);
6869 IEM_MC_LOCAL(uint64_t, u64Addr);
6870 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
6871 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
6872 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
6873 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6874 IEM_MC_ADVANCE_RIP();
6875 IEM_MC_END();
6876 return VINF_SUCCESS;
6877
6878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6879 }
6880}
6881
6882
6883/**
6884 * Common worker for FPU instructions working on ST0 and STn, and storing the
6885 * result in ST0.
6886 *
6887 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6888 */
6889FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6890{
6891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6892
6893 IEM_MC_BEGIN(3, 1);
6894 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6895 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6896 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6897 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6898
6899 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6900 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6901 IEM_MC_PREPARE_FPU_USAGE();
6902 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6903 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6904 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6905 IEM_MC_ELSE()
6906 IEM_MC_FPU_STACK_UNDERFLOW(0);
6907 IEM_MC_ENDIF();
6908 IEM_MC_ADVANCE_RIP();
6909
6910 IEM_MC_END();
6911 return VINF_SUCCESS;
6912}
6913
6914
6915/**
6916 * Common worker for FPU instructions working on ST0 and STn, and only affecting
6917 * flags.
6918 *
6919 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6920 */
6921FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6922{
6923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6924
6925 IEM_MC_BEGIN(3, 1);
6926 IEM_MC_LOCAL(uint16_t, u16Fsw);
6927 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6928 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6929 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6930
6931 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6932 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6933 IEM_MC_PREPARE_FPU_USAGE();
6934 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6935 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6936 IEM_MC_UPDATE_FSW(u16Fsw);
6937 IEM_MC_ELSE()
6938 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6939 IEM_MC_ENDIF();
6940 IEM_MC_ADVANCE_RIP();
6941
6942 IEM_MC_END();
6943 return VINF_SUCCESS;
6944}
6945
6946
6947/**
6948 * Common worker for FPU instructions working on ST0 and STn, only affecting
6949 * flags, and popping when done.
6950 *
6951 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6952 */
6953FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6954{
6955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6956
6957 IEM_MC_BEGIN(3, 1);
6958 IEM_MC_LOCAL(uint16_t, u16Fsw);
6959 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6960 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6961 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6962
6963 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6964 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6965 IEM_MC_PREPARE_FPU_USAGE();
6966 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6967 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6968 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6969 IEM_MC_ELSE()
6970 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
6971 IEM_MC_ENDIF();
6972 IEM_MC_ADVANCE_RIP();
6973
6974 IEM_MC_END();
6975 return VINF_SUCCESS;
6976}
6977
6978
6979/** Opcode 0xd8 11/0. */
6980FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
6981{
6982 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
6983 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
6984}
6985
6986
6987/** Opcode 0xd8 11/1. */
6988FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
6989{
6990 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
6991 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
6992}
6993
6994
6995/** Opcode 0xd8 11/2. */
6996FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
6997{
6998 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
6999 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7000}
7001
7002
7003/** Opcode 0xd8 11/3. */
7004FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7005{
7006 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7007 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7008}
7009
7010
7011/** Opcode 0xd8 11/4. */
7012FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7013{
7014 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7015 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7016}
7017
7018
7019/** Opcode 0xd8 11/5. */
7020FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7021{
7022 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7023 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7024}
7025
7026
7027/** Opcode 0xd8 11/6. */
7028FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7029{
7030 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7031 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7032}
7033
7034
7035/** Opcode 0xd8 11/7. */
7036FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7037{
7038 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7039 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7040}
7041
7042
7043/**
7044 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7045 * the result in ST0.
7046 *
7047 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7048 */
7049FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7050{
7051 IEM_MC_BEGIN(3, 3);
7052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7053 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7054 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7055 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7056 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7057 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7058
7059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7061
7062 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7063 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7064 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7065
7066 IEM_MC_PREPARE_FPU_USAGE();
7067 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7068 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7069 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7070 IEM_MC_ELSE()
7071 IEM_MC_FPU_STACK_UNDERFLOW(0);
7072 IEM_MC_ENDIF();
7073 IEM_MC_ADVANCE_RIP();
7074
7075 IEM_MC_END();
7076 return VINF_SUCCESS;
7077}
7078
7079
7080/** Opcode 0xd8 !11/0. */
7081FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7082{
7083 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7084 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7085}
7086
7087
7088/** Opcode 0xd8 !11/1. */
7089FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7090{
7091 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7092 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7093}
7094
7095
7096/** Opcode 0xd8 !11/2. */
7097FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7098{
7099 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7100
7101 IEM_MC_BEGIN(3, 3);
7102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7103 IEM_MC_LOCAL(uint16_t, u16Fsw);
7104 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7105 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7106 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7107 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7108
7109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7111
7112 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7113 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7114 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7115
7116 IEM_MC_PREPARE_FPU_USAGE();
7117 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7118 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7119 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7120 IEM_MC_ELSE()
7121 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7122 IEM_MC_ENDIF();
7123 IEM_MC_ADVANCE_RIP();
7124
7125 IEM_MC_END();
7126 return VINF_SUCCESS;
7127}
7128
7129
7130/** Opcode 0xd8 !11/3. */
7131FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7132{
7133 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7134
7135 IEM_MC_BEGIN(3, 3);
7136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7137 IEM_MC_LOCAL(uint16_t, u16Fsw);
7138 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7139 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7140 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7141 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7142
7143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7145
7146 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7147 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7148 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7149
7150 IEM_MC_PREPARE_FPU_USAGE();
7151 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7152 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7153 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7154 IEM_MC_ELSE()
7155 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7156 IEM_MC_ENDIF();
7157 IEM_MC_ADVANCE_RIP();
7158
7159 IEM_MC_END();
7160 return VINF_SUCCESS;
7161}
7162
7163
7164/** Opcode 0xd8 !11/4. */
7165FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7166{
7167 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7168 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7169}
7170
7171
7172/** Opcode 0xd8 !11/5. */
7173FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7174{
7175 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7176 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7177}
7178
7179
7180/** Opcode 0xd8 !11/6. */
7181FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7182{
7183 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7184 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7185}
7186
7187
7188/** Opcode 0xd8 !11/7. */
7189FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7190{
7191 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7192 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7193}
7194
7195
7196/**
7197 * @opcode 0xd8
7198 */
7199FNIEMOP_DEF(iemOp_EscF0)
7200{
7201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7202 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7203
7204 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7205 {
7206 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7207 {
7208 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7209 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7210 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7211 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7212 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7213 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7214 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7215 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7217 }
7218 }
7219 else
7220 {
7221 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7222 {
7223 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7224 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7225 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7226 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7227 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7228 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7229 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7230 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7232 }
7233 }
7234}
7235
7236
7237/** Opcode 0xd9 /0 mem32real
7238 * @sa iemOp_fld_m64r */
7239FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7240{
7241 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7242
7243 IEM_MC_BEGIN(2, 3);
7244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7245 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7246 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7247 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7248 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7249
7250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7252
7253 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7254 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7255 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7256
7257 IEM_MC_PREPARE_FPU_USAGE();
7258 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7259 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7260 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7261 IEM_MC_ELSE()
7262 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7263 IEM_MC_ENDIF();
7264 IEM_MC_ADVANCE_RIP();
7265
7266 IEM_MC_END();
7267 return VINF_SUCCESS;
7268}
7269
7270
7271/** Opcode 0xd9 !11/2 mem32real */
7272FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7273{
7274 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7275 IEM_MC_BEGIN(3, 2);
7276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7277 IEM_MC_LOCAL(uint16_t, u16Fsw);
7278 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7279 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7280 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7281
7282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7284 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7285 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7286
7287 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7288 IEM_MC_PREPARE_FPU_USAGE();
7289 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7290 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7291 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7292 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7293 IEM_MC_ELSE()
7294 IEM_MC_IF_FCW_IM()
7295 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7296 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7297 IEM_MC_ENDIF();
7298 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7299 IEM_MC_ENDIF();
7300 IEM_MC_ADVANCE_RIP();
7301
7302 IEM_MC_END();
7303 return VINF_SUCCESS;
7304}
7305
7306
7307/** Opcode 0xd9 !11/3 */
7308FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7309{
7310 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7311 IEM_MC_BEGIN(3, 2);
7312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7313 IEM_MC_LOCAL(uint16_t, u16Fsw);
7314 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7315 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7316 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7317
7318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7320 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7321 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7322
7323 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7324 IEM_MC_PREPARE_FPU_USAGE();
7325 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7326 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7327 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7328 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7329 IEM_MC_ELSE()
7330 IEM_MC_IF_FCW_IM()
7331 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7332 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7333 IEM_MC_ENDIF();
7334 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7335 IEM_MC_ENDIF();
7336 IEM_MC_ADVANCE_RIP();
7337
7338 IEM_MC_END();
7339 return VINF_SUCCESS;
7340}
7341
7342
7343/** Opcode 0xd9 !11/4 */
7344FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7345{
7346 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7347 IEM_MC_BEGIN(3, 0);
7348 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7349 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7350 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7353 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7354 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7355 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7356 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7357 IEM_MC_END();
7358 return VINF_SUCCESS;
7359}
7360
7361
7362/** Opcode 0xd9 !11/5 */
7363FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7364{
7365 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7366 IEM_MC_BEGIN(1, 1);
7367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7368 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7371 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7372 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7373 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7374 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7375 IEM_MC_END();
7376 return VINF_SUCCESS;
7377}
7378
7379
7380/** Opcode 0xd9 !11/6 */
7381FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7382{
7383 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7384 IEM_MC_BEGIN(3, 0);
7385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7386 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7387 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7390 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7391 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7392 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7393 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7394 IEM_MC_END();
7395 return VINF_SUCCESS;
7396}
7397
7398
7399/** Opcode 0xd9 !11/7 */
7400FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7401{
7402 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7403 IEM_MC_BEGIN(2, 0);
7404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7405 IEM_MC_LOCAL(uint16_t, u16Fcw);
7406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7408 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7409 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7410 IEM_MC_FETCH_FCW(u16Fcw);
7411 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7412 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7413 IEM_MC_END();
7414 return VINF_SUCCESS;
7415}
7416
7417
7418/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7419FNIEMOP_DEF(iemOp_fnop)
7420{
7421 IEMOP_MNEMONIC(fnop, "fnop");
7422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7423
7424 IEM_MC_BEGIN(0, 0);
7425 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7426 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7427 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7428 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7429 * intel optimizations. Investigate. */
7430 IEM_MC_UPDATE_FPU_OPCODE_IP();
7431 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7432 IEM_MC_END();
7433 return VINF_SUCCESS;
7434}
7435
7436
7437/** Opcode 0xd9 11/0 stN */
7438FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7439{
7440 IEMOP_MNEMONIC(fld_stN, "fld stN");
7441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7442
7443 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7444 * indicates that it does. */
7445 IEM_MC_BEGIN(0, 2);
7446 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7447 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7448 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7449 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7450
7451 IEM_MC_PREPARE_FPU_USAGE();
7452 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7453 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7454 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7455 IEM_MC_ELSE()
7456 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7457 IEM_MC_ENDIF();
7458
7459 IEM_MC_ADVANCE_RIP();
7460 IEM_MC_END();
7461
7462 return VINF_SUCCESS;
7463}
7464
7465
7466/** Opcode 0xd9 11/3 stN */
7467FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7468{
7469 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7471
7472 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7473 * indicates that it does. */
7474 IEM_MC_BEGIN(1, 3);
7475 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7476 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7477 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7478 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7479 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7480 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7481
7482 IEM_MC_PREPARE_FPU_USAGE();
7483 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7484 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7485 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7486 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7487 IEM_MC_ELSE()
7488 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7489 IEM_MC_ENDIF();
7490
7491 IEM_MC_ADVANCE_RIP();
7492 IEM_MC_END();
7493
7494 return VINF_SUCCESS;
7495}
7496
7497
7498/** Opcode 0xd9 11/4, 0xdd 11/2. */
7499FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7500{
7501 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7503
7504 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7505 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7506 if (!iDstReg)
7507 {
7508 IEM_MC_BEGIN(0, 1);
7509 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7510 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7511 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7512
7513 IEM_MC_PREPARE_FPU_USAGE();
7514 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7515 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7516 IEM_MC_ELSE()
7517 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7518 IEM_MC_ENDIF();
7519
7520 IEM_MC_ADVANCE_RIP();
7521 IEM_MC_END();
7522 }
7523 else
7524 {
7525 IEM_MC_BEGIN(0, 2);
7526 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7527 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7528 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7529 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7530
7531 IEM_MC_PREPARE_FPU_USAGE();
7532 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7533 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7534 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7535 IEM_MC_ELSE()
7536 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7537 IEM_MC_ENDIF();
7538
7539 IEM_MC_ADVANCE_RIP();
7540 IEM_MC_END();
7541 }
7542 return VINF_SUCCESS;
7543}
7544
7545
7546/**
7547 * Common worker for FPU instructions working on ST0 and replaces it with the
7548 * result, i.e. unary operators.
7549 *
7550 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7551 */
7552FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7553{
7554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7555
7556 IEM_MC_BEGIN(2, 1);
7557 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7558 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7559 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7560
7561 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7562 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7563 IEM_MC_PREPARE_FPU_USAGE();
7564 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7565 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7566 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7567 IEM_MC_ELSE()
7568 IEM_MC_FPU_STACK_UNDERFLOW(0);
7569 IEM_MC_ENDIF();
7570 IEM_MC_ADVANCE_RIP();
7571
7572 IEM_MC_END();
7573 return VINF_SUCCESS;
7574}
7575
7576
7577/** Opcode 0xd9 0xe0. */
7578FNIEMOP_DEF(iemOp_fchs)
7579{
7580 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7581 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7582}
7583
7584
7585/** Opcode 0xd9 0xe1. */
7586FNIEMOP_DEF(iemOp_fabs)
7587{
7588 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7589 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7590}
7591
7592
7593/**
7594 * Common worker for FPU instructions working on ST0 and only returns FSW.
7595 *
7596 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7597 */
7598FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7599{
7600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7601
7602 IEM_MC_BEGIN(2, 1);
7603 IEM_MC_LOCAL(uint16_t, u16Fsw);
7604 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7605 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7606
7607 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7608 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7609 IEM_MC_PREPARE_FPU_USAGE();
7610 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7611 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7612 IEM_MC_UPDATE_FSW(u16Fsw);
7613 IEM_MC_ELSE()
7614 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7615 IEM_MC_ENDIF();
7616 IEM_MC_ADVANCE_RIP();
7617
7618 IEM_MC_END();
7619 return VINF_SUCCESS;
7620}
7621
7622
7623/** Opcode 0xd9 0xe4. */
7624FNIEMOP_DEF(iemOp_ftst)
7625{
7626 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7627 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7628}
7629
7630
7631/** Opcode 0xd9 0xe5. */
7632FNIEMOP_DEF(iemOp_fxam)
7633{
7634 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7635 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7636}
7637
7638
7639/**
7640 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7641 *
7642 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7643 */
7644FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7645{
7646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7647
7648 IEM_MC_BEGIN(1, 1);
7649 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7650 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7651
7652 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7653 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7654 IEM_MC_PREPARE_FPU_USAGE();
7655 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7656 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7657 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7658 IEM_MC_ELSE()
7659 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7660 IEM_MC_ENDIF();
7661 IEM_MC_ADVANCE_RIP();
7662
7663 IEM_MC_END();
7664 return VINF_SUCCESS;
7665}
7666
7667
7668/** Opcode 0xd9 0xe8. */
7669FNIEMOP_DEF(iemOp_fld1)
7670{
7671 IEMOP_MNEMONIC(fld1, "fld1");
7672 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7673}
7674
7675
7676/** Opcode 0xd9 0xe9. */
7677FNIEMOP_DEF(iemOp_fldl2t)
7678{
7679 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7680 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7681}
7682
7683
7684/** Opcode 0xd9 0xea. */
7685FNIEMOP_DEF(iemOp_fldl2e)
7686{
7687 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7688 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7689}
7690
7691/** Opcode 0xd9 0xeb. */
7692FNIEMOP_DEF(iemOp_fldpi)
7693{
7694 IEMOP_MNEMONIC(fldpi, "fldpi");
7695 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7696}
7697
7698
7699/** Opcode 0xd9 0xec. */
7700FNIEMOP_DEF(iemOp_fldlg2)
7701{
7702 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7703 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7704}
7705
7706/** Opcode 0xd9 0xed. */
7707FNIEMOP_DEF(iemOp_fldln2)
7708{
7709 IEMOP_MNEMONIC(fldln2, "fldln2");
7710 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7711}
7712
7713
7714/** Opcode 0xd9 0xee. */
7715FNIEMOP_DEF(iemOp_fldz)
7716{
7717 IEMOP_MNEMONIC(fldz, "fldz");
7718 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7719}
7720
7721
7722/** Opcode 0xd9 0xf0. */
7723FNIEMOP_DEF(iemOp_f2xm1)
7724{
7725 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7726 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7727}
7728
7729
7730/**
7731 * Common worker for FPU instructions working on STn and ST0, storing the result
7732 * in STn, and popping the stack unless IE, DE or ZE was raised.
7733 *
7734 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7735 */
7736FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7737{
7738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7739
7740 IEM_MC_BEGIN(3, 1);
7741 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7742 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7743 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7744 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7745
7746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7748
7749 IEM_MC_PREPARE_FPU_USAGE();
7750 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7751 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7752 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7753 IEM_MC_ELSE()
7754 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7755 IEM_MC_ENDIF();
7756 IEM_MC_ADVANCE_RIP();
7757
7758 IEM_MC_END();
7759 return VINF_SUCCESS;
7760}
7761
7762
7763/** Opcode 0xd9 0xf1. */
7764FNIEMOP_DEF(iemOp_fyl2x)
7765{
7766 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7767 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7768}
7769
7770
7771/**
7772 * Common worker for FPU instructions working on ST0 and having two outputs, one
7773 * replacing ST0 and one pushed onto the stack.
7774 *
7775 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7776 */
7777FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7778{
7779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7780
7781 IEM_MC_BEGIN(2, 1);
7782 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7783 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7784 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7785
7786 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7787 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7788 IEM_MC_PREPARE_FPU_USAGE();
7789 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7790 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7791 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7792 IEM_MC_ELSE()
7793 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7794 IEM_MC_ENDIF();
7795 IEM_MC_ADVANCE_RIP();
7796
7797 IEM_MC_END();
7798 return VINF_SUCCESS;
7799}
7800
7801
7802/** Opcode 0xd9 0xf2. */
7803FNIEMOP_DEF(iemOp_fptan)
7804{
7805 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7806 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7807}
7808
7809
7810/** Opcode 0xd9 0xf3. */
7811FNIEMOP_DEF(iemOp_fpatan)
7812{
7813 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7814 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7815}
7816
7817
7818/** Opcode 0xd9 0xf4. */
7819FNIEMOP_DEF(iemOp_fxtract)
7820{
7821 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7822 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7823}
7824
7825
7826/** Opcode 0xd9 0xf5. */
7827FNIEMOP_DEF(iemOp_fprem1)
7828{
7829 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7830 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7831}
7832
7833
7834/** Opcode 0xd9 0xf6. */
7835FNIEMOP_DEF(iemOp_fdecstp)
7836{
7837 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7839 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7840 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7841 * FINCSTP and FDECSTP. */
7842
7843 IEM_MC_BEGIN(0,0);
7844
7845 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7846 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7847
7848 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7849 IEM_MC_FPU_STACK_DEC_TOP();
7850 IEM_MC_UPDATE_FSW_CONST(0);
7851
7852 IEM_MC_ADVANCE_RIP();
7853 IEM_MC_END();
7854 return VINF_SUCCESS;
7855}
7856
7857
7858/** Opcode 0xd9 0xf7. */
7859FNIEMOP_DEF(iemOp_fincstp)
7860{
7861 IEMOP_MNEMONIC(fincstp, "fincstp");
7862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7863 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7864 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7865 * FINCSTP and FDECSTP. */
7866
7867 IEM_MC_BEGIN(0,0);
7868
7869 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7870 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7871
7872 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7873 IEM_MC_FPU_STACK_INC_TOP();
7874 IEM_MC_UPDATE_FSW_CONST(0);
7875
7876 IEM_MC_ADVANCE_RIP();
7877 IEM_MC_END();
7878 return VINF_SUCCESS;
7879}
7880
7881
7882/** Opcode 0xd9 0xf8. */
7883FNIEMOP_DEF(iemOp_fprem)
7884{
7885 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
7886 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
7887}
7888
7889
7890/** Opcode 0xd9 0xf9. */
7891FNIEMOP_DEF(iemOp_fyl2xp1)
7892{
7893 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
7894 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
7895}
7896
7897
7898/** Opcode 0xd9 0xfa. */
7899FNIEMOP_DEF(iemOp_fsqrt)
7900{
7901 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
7902 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
7903}
7904
7905
7906/** Opcode 0xd9 0xfb. */
7907FNIEMOP_DEF(iemOp_fsincos)
7908{
7909 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
7910 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
7911}
7912
7913
7914/** Opcode 0xd9 0xfc. */
7915FNIEMOP_DEF(iemOp_frndint)
7916{
7917 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
7918 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
7919}
7920
7921
7922/** Opcode 0xd9 0xfd. */
7923FNIEMOP_DEF(iemOp_fscale)
7924{
7925 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
7926 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
7927}
7928
7929
7930/** Opcode 0xd9 0xfe. */
7931FNIEMOP_DEF(iemOp_fsin)
7932{
7933 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
7934 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
7935}
7936
7937
7938/** Opcode 0xd9 0xff. */
7939FNIEMOP_DEF(iemOp_fcos)
7940{
7941 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
7942 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
7943}
7944
7945
7946/** Used by iemOp_EscF1. */
7947IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
7948{
7949 /* 0xe0 */ iemOp_fchs,
7950 /* 0xe1 */ iemOp_fabs,
7951 /* 0xe2 */ iemOp_Invalid,
7952 /* 0xe3 */ iemOp_Invalid,
7953 /* 0xe4 */ iemOp_ftst,
7954 /* 0xe5 */ iemOp_fxam,
7955 /* 0xe6 */ iemOp_Invalid,
7956 /* 0xe7 */ iemOp_Invalid,
7957 /* 0xe8 */ iemOp_fld1,
7958 /* 0xe9 */ iemOp_fldl2t,
7959 /* 0xea */ iemOp_fldl2e,
7960 /* 0xeb */ iemOp_fldpi,
7961 /* 0xec */ iemOp_fldlg2,
7962 /* 0xed */ iemOp_fldln2,
7963 /* 0xee */ iemOp_fldz,
7964 /* 0xef */ iemOp_Invalid,
7965 /* 0xf0 */ iemOp_f2xm1,
7966 /* 0xf1 */ iemOp_fyl2x,
7967 /* 0xf2 */ iemOp_fptan,
7968 /* 0xf3 */ iemOp_fpatan,
7969 /* 0xf4 */ iemOp_fxtract,
7970 /* 0xf5 */ iemOp_fprem1,
7971 /* 0xf6 */ iemOp_fdecstp,
7972 /* 0xf7 */ iemOp_fincstp,
7973 /* 0xf8 */ iemOp_fprem,
7974 /* 0xf9 */ iemOp_fyl2xp1,
7975 /* 0xfa */ iemOp_fsqrt,
7976 /* 0xfb */ iemOp_fsincos,
7977 /* 0xfc */ iemOp_frndint,
7978 /* 0xfd */ iemOp_fscale,
7979 /* 0xfe */ iemOp_fsin,
7980 /* 0xff */ iemOp_fcos
7981};
7982
7983
7984/**
7985 * @opcode 0xd9
7986 */
7987FNIEMOP_DEF(iemOp_EscF1)
7988{
7989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7990 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
7991
7992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7993 {
7994 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7995 {
7996 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
7997 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
7998 case 2:
7999 if (bRm == 0xd0)
8000 return FNIEMOP_CALL(iemOp_fnop);
8001 return IEMOP_RAISE_INVALID_OPCODE();
8002 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8003 case 4:
8004 case 5:
8005 case 6:
8006 case 7:
8007 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8008 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8010 }
8011 }
8012 else
8013 {
8014 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8015 {
8016 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8017 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8018 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8019 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8020 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8021 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8022 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8023 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8025 }
8026 }
8027}
8028
8029
8030/** Opcode 0xda 11/0. */
8031FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8032{
8033 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8035
8036 IEM_MC_BEGIN(0, 1);
8037 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8038
8039 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8040 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8041
8042 IEM_MC_PREPARE_FPU_USAGE();
8043 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8044 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8045 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8046 IEM_MC_ENDIF();
8047 IEM_MC_UPDATE_FPU_OPCODE_IP();
8048 IEM_MC_ELSE()
8049 IEM_MC_FPU_STACK_UNDERFLOW(0);
8050 IEM_MC_ENDIF();
8051 IEM_MC_ADVANCE_RIP();
8052
8053 IEM_MC_END();
8054 return VINF_SUCCESS;
8055}
8056
8057
8058/** Opcode 0xda 11/1. */
8059FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8060{
8061 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8063
8064 IEM_MC_BEGIN(0, 1);
8065 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8066
8067 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8068 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8069
8070 IEM_MC_PREPARE_FPU_USAGE();
8071 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8072 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8073 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8074 IEM_MC_ENDIF();
8075 IEM_MC_UPDATE_FPU_OPCODE_IP();
8076 IEM_MC_ELSE()
8077 IEM_MC_FPU_STACK_UNDERFLOW(0);
8078 IEM_MC_ENDIF();
8079 IEM_MC_ADVANCE_RIP();
8080
8081 IEM_MC_END();
8082 return VINF_SUCCESS;
8083}
8084
8085
8086/** Opcode 0xda 11/2. */
8087FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8088{
8089 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8091
8092 IEM_MC_BEGIN(0, 1);
8093 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8094
8095 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8096 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8097
8098 IEM_MC_PREPARE_FPU_USAGE();
8099 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8100 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8101 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8102 IEM_MC_ENDIF();
8103 IEM_MC_UPDATE_FPU_OPCODE_IP();
8104 IEM_MC_ELSE()
8105 IEM_MC_FPU_STACK_UNDERFLOW(0);
8106 IEM_MC_ENDIF();
8107 IEM_MC_ADVANCE_RIP();
8108
8109 IEM_MC_END();
8110 return VINF_SUCCESS;
8111}
8112
8113
8114/** Opcode 0xda 11/3. */
8115FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8116{
8117 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8119
8120 IEM_MC_BEGIN(0, 1);
8121 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8122
8123 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8124 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8125
8126 IEM_MC_PREPARE_FPU_USAGE();
8127 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8128 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8129 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8130 IEM_MC_ENDIF();
8131 IEM_MC_UPDATE_FPU_OPCODE_IP();
8132 IEM_MC_ELSE()
8133 IEM_MC_FPU_STACK_UNDERFLOW(0);
8134 IEM_MC_ENDIF();
8135 IEM_MC_ADVANCE_RIP();
8136
8137 IEM_MC_END();
8138 return VINF_SUCCESS;
8139}
8140
8141
8142/**
8143 * Common worker for FPU instructions working on ST0 and STn, only affecting
8144 * flags, and popping twice when done.
8145 *
8146 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8147 */
8148FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8149{
8150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8151
8152 IEM_MC_BEGIN(3, 1);
8153 IEM_MC_LOCAL(uint16_t, u16Fsw);
8154 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8155 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8156 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8157
8158 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8159 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8160
8161 IEM_MC_PREPARE_FPU_USAGE();
8162 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8163 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8164 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8165 IEM_MC_ELSE()
8166 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8167 IEM_MC_ENDIF();
8168 IEM_MC_ADVANCE_RIP();
8169
8170 IEM_MC_END();
8171 return VINF_SUCCESS;
8172}
8173
8174
8175/** Opcode 0xda 0xe9. */
8176FNIEMOP_DEF(iemOp_fucompp)
8177{
8178 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8179 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8180}
8181
8182
8183/**
8184 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8185 * the result in ST0.
8186 *
8187 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8188 */
8189FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8190{
8191 IEM_MC_BEGIN(3, 3);
8192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8193 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8194 IEM_MC_LOCAL(int32_t, i32Val2);
8195 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8196 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8197 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8198
8199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8201
8202 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8203 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8204 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8205
8206 IEM_MC_PREPARE_FPU_USAGE();
8207 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8208 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8209 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8210 IEM_MC_ELSE()
8211 IEM_MC_FPU_STACK_UNDERFLOW(0);
8212 IEM_MC_ENDIF();
8213 IEM_MC_ADVANCE_RIP();
8214
8215 IEM_MC_END();
8216 return VINF_SUCCESS;
8217}
8218
8219
8220/** Opcode 0xda !11/0. */
8221FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8222{
8223 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8224 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8225}
8226
8227
8228/** Opcode 0xda !11/1. */
8229FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8230{
8231 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8232 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8233}
8234
8235
8236/** Opcode 0xda !11/2. */
8237FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8238{
8239 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8240
8241 IEM_MC_BEGIN(3, 3);
8242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8243 IEM_MC_LOCAL(uint16_t, u16Fsw);
8244 IEM_MC_LOCAL(int32_t, i32Val2);
8245 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8246 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8247 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8248
8249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8251
8252 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8253 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8254 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8255
8256 IEM_MC_PREPARE_FPU_USAGE();
8257 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8258 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8259 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8260 IEM_MC_ELSE()
8261 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8262 IEM_MC_ENDIF();
8263 IEM_MC_ADVANCE_RIP();
8264
8265 IEM_MC_END();
8266 return VINF_SUCCESS;
8267}
8268
8269
8270/** Opcode 0xda !11/3. */
8271FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8272{
8273 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8274
8275 IEM_MC_BEGIN(3, 3);
8276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8277 IEM_MC_LOCAL(uint16_t, u16Fsw);
8278 IEM_MC_LOCAL(int32_t, i32Val2);
8279 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8280 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8281 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8282
8283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8285
8286 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8287 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8288 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8289
8290 IEM_MC_PREPARE_FPU_USAGE();
8291 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8292 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8293 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8294 IEM_MC_ELSE()
8295 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8296 IEM_MC_ENDIF();
8297 IEM_MC_ADVANCE_RIP();
8298
8299 IEM_MC_END();
8300 return VINF_SUCCESS;
8301}
8302
8303
8304/** Opcode 0xda !11/4. */
8305FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8306{
8307 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8308 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8309}
8310
8311
8312/** Opcode 0xda !11/5. */
8313FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8314{
8315 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8316 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8317}
8318
8319
8320/** Opcode 0xda !11/6. */
8321FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8322{
8323 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8324 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8325}
8326
8327
8328/** Opcode 0xda !11/7. */
8329FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8330{
8331 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8332 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8333}
8334
8335
8336/**
8337 * @opcode 0xda
8338 */
8339FNIEMOP_DEF(iemOp_EscF2)
8340{
8341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8342 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8343 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8344 {
8345 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8346 {
8347 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8348 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8349 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8350 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8351 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8352 case 5:
8353 if (bRm == 0xe9)
8354 return FNIEMOP_CALL(iemOp_fucompp);
8355 return IEMOP_RAISE_INVALID_OPCODE();
8356 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8357 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8359 }
8360 }
8361 else
8362 {
8363 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8364 {
8365 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8366 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8367 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8368 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8369 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8370 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8371 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8372 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8374 }
8375 }
8376}
8377
8378
8379/** Opcode 0xdb !11/0. */
8380FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8381{
8382 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8383
8384 IEM_MC_BEGIN(2, 3);
8385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8386 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8387 IEM_MC_LOCAL(int32_t, i32Val);
8388 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8389 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8390
8391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8393
8394 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8395 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8396 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8397
8398 IEM_MC_PREPARE_FPU_USAGE();
8399 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8400 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8401 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8402 IEM_MC_ELSE()
8403 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8404 IEM_MC_ENDIF();
8405 IEM_MC_ADVANCE_RIP();
8406
8407 IEM_MC_END();
8408 return VINF_SUCCESS;
8409}
8410
8411
8412/** Opcode 0xdb !11/1. */
8413FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8414{
8415 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8416 IEM_MC_BEGIN(3, 2);
8417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8418 IEM_MC_LOCAL(uint16_t, u16Fsw);
8419 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8420 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8421 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8422
8423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8425 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8426 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8427
8428 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8429 IEM_MC_PREPARE_FPU_USAGE();
8430 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8431 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8432 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8433 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8434 IEM_MC_ELSE()
8435 IEM_MC_IF_FCW_IM()
8436 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8437 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8438 IEM_MC_ENDIF();
8439 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8440 IEM_MC_ENDIF();
8441 IEM_MC_ADVANCE_RIP();
8442
8443 IEM_MC_END();
8444 return VINF_SUCCESS;
8445}
8446
8447
8448/** Opcode 0xdb !11/2. */
8449FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8450{
8451 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8452 IEM_MC_BEGIN(3, 2);
8453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8454 IEM_MC_LOCAL(uint16_t, u16Fsw);
8455 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8456 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8457 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8458
8459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8461 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8462 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8463
8464 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8465 IEM_MC_PREPARE_FPU_USAGE();
8466 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8467 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8468 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8469 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8470 IEM_MC_ELSE()
8471 IEM_MC_IF_FCW_IM()
8472 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8473 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8474 IEM_MC_ENDIF();
8475 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8476 IEM_MC_ENDIF();
8477 IEM_MC_ADVANCE_RIP();
8478
8479 IEM_MC_END();
8480 return VINF_SUCCESS;
8481}
8482
8483
8484/** Opcode 0xdb !11/3. */
8485FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8486{
8487 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8488 IEM_MC_BEGIN(3, 2);
8489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8490 IEM_MC_LOCAL(uint16_t, u16Fsw);
8491 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8492 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8493 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8494
8495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8497 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8498 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8499
8500 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8501 IEM_MC_PREPARE_FPU_USAGE();
8502 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8503 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8504 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8505 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8506 IEM_MC_ELSE()
8507 IEM_MC_IF_FCW_IM()
8508 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8509 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8510 IEM_MC_ENDIF();
8511 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8512 IEM_MC_ENDIF();
8513 IEM_MC_ADVANCE_RIP();
8514
8515 IEM_MC_END();
8516 return VINF_SUCCESS;
8517}
8518
8519
8520/** Opcode 0xdb !11/5. */
8521FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8522{
8523 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8524
8525 IEM_MC_BEGIN(2, 3);
8526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8527 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8528 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8529 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8530 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8531
8532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8534
8535 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8536 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8537 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8538
8539 IEM_MC_PREPARE_FPU_USAGE();
8540 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8541 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8542 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8543 IEM_MC_ELSE()
8544 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8545 IEM_MC_ENDIF();
8546 IEM_MC_ADVANCE_RIP();
8547
8548 IEM_MC_END();
8549 return VINF_SUCCESS;
8550}
8551
8552
8553/** Opcode 0xdb !11/7. */
8554FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8555{
8556 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8557 IEM_MC_BEGIN(3, 2);
8558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8559 IEM_MC_LOCAL(uint16_t, u16Fsw);
8560 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8561 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8562 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8563
8564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8566 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8567 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8568
8569 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8570 IEM_MC_PREPARE_FPU_USAGE();
8571 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8572 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8573 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8574 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8575 IEM_MC_ELSE()
8576 IEM_MC_IF_FCW_IM()
8577 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8578 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8579 IEM_MC_ENDIF();
8580 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8581 IEM_MC_ENDIF();
8582 IEM_MC_ADVANCE_RIP();
8583
8584 IEM_MC_END();
8585 return VINF_SUCCESS;
8586}
8587
8588
8589/** Opcode 0xdb 11/0. */
8590FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8591{
8592 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8594
8595 IEM_MC_BEGIN(0, 1);
8596 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8597
8598 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8599 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8600
8601 IEM_MC_PREPARE_FPU_USAGE();
8602 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8603 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8604 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8605 IEM_MC_ENDIF();
8606 IEM_MC_UPDATE_FPU_OPCODE_IP();
8607 IEM_MC_ELSE()
8608 IEM_MC_FPU_STACK_UNDERFLOW(0);
8609 IEM_MC_ENDIF();
8610 IEM_MC_ADVANCE_RIP();
8611
8612 IEM_MC_END();
8613 return VINF_SUCCESS;
8614}
8615
8616
8617/** Opcode 0xdb 11/1. */
8618FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8619{
8620 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8622
8623 IEM_MC_BEGIN(0, 1);
8624 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8625
8626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8627 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8628
8629 IEM_MC_PREPARE_FPU_USAGE();
8630 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8631 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8632 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8633 IEM_MC_ENDIF();
8634 IEM_MC_UPDATE_FPU_OPCODE_IP();
8635 IEM_MC_ELSE()
8636 IEM_MC_FPU_STACK_UNDERFLOW(0);
8637 IEM_MC_ENDIF();
8638 IEM_MC_ADVANCE_RIP();
8639
8640 IEM_MC_END();
8641 return VINF_SUCCESS;
8642}
8643
8644
8645/** Opcode 0xdb 11/2. */
8646FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8647{
8648 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8650
8651 IEM_MC_BEGIN(0, 1);
8652 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8653
8654 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8655 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8656
8657 IEM_MC_PREPARE_FPU_USAGE();
8658 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8659 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8660 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8661 IEM_MC_ENDIF();
8662 IEM_MC_UPDATE_FPU_OPCODE_IP();
8663 IEM_MC_ELSE()
8664 IEM_MC_FPU_STACK_UNDERFLOW(0);
8665 IEM_MC_ENDIF();
8666 IEM_MC_ADVANCE_RIP();
8667
8668 IEM_MC_END();
8669 return VINF_SUCCESS;
8670}
8671
8672
8673/** Opcode 0xdb 11/3. */
8674FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8675{
8676 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8678
8679 IEM_MC_BEGIN(0, 1);
8680 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8681
8682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8684
8685 IEM_MC_PREPARE_FPU_USAGE();
8686 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8687 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8688 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8689 IEM_MC_ENDIF();
8690 IEM_MC_UPDATE_FPU_OPCODE_IP();
8691 IEM_MC_ELSE()
8692 IEM_MC_FPU_STACK_UNDERFLOW(0);
8693 IEM_MC_ENDIF();
8694 IEM_MC_ADVANCE_RIP();
8695
8696 IEM_MC_END();
8697 return VINF_SUCCESS;
8698}
8699
8700
8701/** Opcode 0xdb 0xe0. */
8702FNIEMOP_DEF(iemOp_fneni)
8703{
8704 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8706 IEM_MC_BEGIN(0,0);
8707 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8708 IEM_MC_ADVANCE_RIP();
8709 IEM_MC_END();
8710 return VINF_SUCCESS;
8711}
8712
8713
8714/** Opcode 0xdb 0xe1. */
8715FNIEMOP_DEF(iemOp_fndisi)
8716{
8717 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8719 IEM_MC_BEGIN(0,0);
8720 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8721 IEM_MC_ADVANCE_RIP();
8722 IEM_MC_END();
8723 return VINF_SUCCESS;
8724}
8725
8726
8727/** Opcode 0xdb 0xe2. */
8728FNIEMOP_DEF(iemOp_fnclex)
8729{
8730 IEMOP_MNEMONIC(fnclex, "fnclex");
8731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8732
8733 IEM_MC_BEGIN(0,0);
8734 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8735 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8736 IEM_MC_CLEAR_FSW_EX();
8737 IEM_MC_ADVANCE_RIP();
8738 IEM_MC_END();
8739 return VINF_SUCCESS;
8740}
8741
8742
8743/** Opcode 0xdb 0xe3. */
8744FNIEMOP_DEF(iemOp_fninit)
8745{
8746 IEMOP_MNEMONIC(fninit, "fninit");
8747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8748 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8749}
8750
8751
8752/** Opcode 0xdb 0xe4. */
8753FNIEMOP_DEF(iemOp_fnsetpm)
8754{
8755 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8757 IEM_MC_BEGIN(0,0);
8758 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8759 IEM_MC_ADVANCE_RIP();
8760 IEM_MC_END();
8761 return VINF_SUCCESS;
8762}
8763
8764
8765/** Opcode 0xdb 0xe5. */
8766FNIEMOP_DEF(iemOp_frstpm)
8767{
8768 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8769#if 0 /* #UDs on newer CPUs */
8770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8771 IEM_MC_BEGIN(0,0);
8772 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8773 IEM_MC_ADVANCE_RIP();
8774 IEM_MC_END();
8775 return VINF_SUCCESS;
8776#else
8777 return IEMOP_RAISE_INVALID_OPCODE();
8778#endif
8779}
8780
8781
8782/** Opcode 0xdb 11/5. */
8783FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8784{
8785 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8786 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8787}
8788
8789
8790/** Opcode 0xdb 11/6. */
8791FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8792{
8793 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8794 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8795}
8796
8797
8798/**
8799 * @opcode 0xdb
8800 */
8801FNIEMOP_DEF(iemOp_EscF3)
8802{
8803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8804 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8806 {
8807 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8808 {
8809 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8810 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8811 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8812 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8813 case 4:
8814 switch (bRm)
8815 {
8816 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8817 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8818 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8819 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8820 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8821 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8822 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8823 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8825 }
8826 break;
8827 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8828 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8829 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8831 }
8832 }
8833 else
8834 {
8835 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8836 {
8837 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8838 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8839 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8840 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8841 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8842 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8843 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8844 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8846 }
8847 }
8848}
8849
8850
8851/**
8852 * Common worker for FPU instructions working on STn and ST0, and storing the
8853 * result in STn unless IE, DE or ZE was raised.
8854 *
8855 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8856 */
8857FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8858{
8859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8860
8861 IEM_MC_BEGIN(3, 1);
8862 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8863 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8864 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8865 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8866
8867 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8868 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8869
8870 IEM_MC_PREPARE_FPU_USAGE();
8871 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
8872 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8873 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8874 IEM_MC_ELSE()
8875 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8876 IEM_MC_ENDIF();
8877 IEM_MC_ADVANCE_RIP();
8878
8879 IEM_MC_END();
8880 return VINF_SUCCESS;
8881}
8882
8883
8884/** Opcode 0xdc 11/0. */
8885FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
8886{
8887 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
8888 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
8889}
8890
8891
8892/** Opcode 0xdc 11/1. */
8893FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
8894{
8895 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
8896 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
8897}
8898
8899
8900/** Opcode 0xdc 11/4. */
8901FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
8902{
8903 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
8904 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
8905}
8906
8907
8908/** Opcode 0xdc 11/5. */
8909FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
8910{
8911 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
8912 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
8913}
8914
8915
8916/** Opcode 0xdc 11/6. */
8917FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
8918{
8919 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
8920 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
8921}
8922
8923
8924/** Opcode 0xdc 11/7. */
8925FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
8926{
8927 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
8928 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
8929}
8930
8931
8932/**
8933 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
8934 * memory operand, and storing the result in ST0.
8935 *
8936 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8937 */
8938FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
8939{
8940 IEM_MC_BEGIN(3, 3);
8941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8942 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8943 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
8944 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8945 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
8946 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
8947
8948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8950 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8951 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8952
8953 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8954 IEM_MC_PREPARE_FPU_USAGE();
8955 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
8956 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
8957 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8958 IEM_MC_ELSE()
8959 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8960 IEM_MC_ENDIF();
8961 IEM_MC_ADVANCE_RIP();
8962
8963 IEM_MC_END();
8964 return VINF_SUCCESS;
8965}
8966
8967
8968/** Opcode 0xdc !11/0. */
8969FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
8970{
8971 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
8972 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
8973}
8974
8975
8976/** Opcode 0xdc !11/1. */
8977FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
8978{
8979 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
8980 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
8981}
8982
8983
8984/** Opcode 0xdc !11/2. */
8985FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
8986{
8987 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
8988
8989 IEM_MC_BEGIN(3, 3);
8990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8991 IEM_MC_LOCAL(uint16_t, u16Fsw);
8992 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8993 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8994 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8995 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8996
8997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8999
9000 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9001 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9002 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9003
9004 IEM_MC_PREPARE_FPU_USAGE();
9005 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9006 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9007 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9008 IEM_MC_ELSE()
9009 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9010 IEM_MC_ENDIF();
9011 IEM_MC_ADVANCE_RIP();
9012
9013 IEM_MC_END();
9014 return VINF_SUCCESS;
9015}
9016
9017
9018/** Opcode 0xdc !11/3. */
9019FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9020{
9021 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9022
9023 IEM_MC_BEGIN(3, 3);
9024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9025 IEM_MC_LOCAL(uint16_t, u16Fsw);
9026 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9027 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9028 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9029 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9030
9031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9033
9034 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9035 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9036 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9037
9038 IEM_MC_PREPARE_FPU_USAGE();
9039 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9040 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9041 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9042 IEM_MC_ELSE()
9043 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9044 IEM_MC_ENDIF();
9045 IEM_MC_ADVANCE_RIP();
9046
9047 IEM_MC_END();
9048 return VINF_SUCCESS;
9049}
9050
9051
9052/** Opcode 0xdc !11/4. */
9053FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9054{
9055 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9056 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9057}
9058
9059
9060/** Opcode 0xdc !11/5. */
9061FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9062{
9063 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9064 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9065}
9066
9067
9068/** Opcode 0xdc !11/6. */
9069FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9070{
9071 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9072 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9073}
9074
9075
9076/** Opcode 0xdc !11/7. */
9077FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9078{
9079 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9080 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9081}
9082
9083
9084/**
9085 * @opcode 0xdc
9086 */
9087FNIEMOP_DEF(iemOp_EscF4)
9088{
9089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9090 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9092 {
9093 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9094 {
9095 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9096 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9097 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9098 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9099 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9100 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9101 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9102 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9104 }
9105 }
9106 else
9107 {
9108 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9109 {
9110 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9111 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9112 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9113 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9114 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9115 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9116 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9117 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9119 }
9120 }
9121}
9122
9123
9124/** Opcode 0xdd !11/0.
9125 * @sa iemOp_fld_m32r */
9126FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9127{
9128 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9129
9130 IEM_MC_BEGIN(2, 3);
9131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9132 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9133 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9134 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9135 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9136
9137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9139 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9140 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9141
9142 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9143 IEM_MC_PREPARE_FPU_USAGE();
9144 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9145 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9146 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9147 IEM_MC_ELSE()
9148 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9149 IEM_MC_ENDIF();
9150 IEM_MC_ADVANCE_RIP();
9151
9152 IEM_MC_END();
9153 return VINF_SUCCESS;
9154}
9155
9156
9157/** Opcode 0xdd !11/0. */
9158FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9159{
9160 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9161 IEM_MC_BEGIN(3, 2);
9162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9163 IEM_MC_LOCAL(uint16_t, u16Fsw);
9164 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9165 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9166 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9167
9168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9170 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9171 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9172
9173 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9174 IEM_MC_PREPARE_FPU_USAGE();
9175 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9176 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9177 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9178 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9179 IEM_MC_ELSE()
9180 IEM_MC_IF_FCW_IM()
9181 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9182 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9183 IEM_MC_ENDIF();
9184 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9185 IEM_MC_ENDIF();
9186 IEM_MC_ADVANCE_RIP();
9187
9188 IEM_MC_END();
9189 return VINF_SUCCESS;
9190}
9191
9192
9193/** Opcode 0xdd !11/0. */
9194FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9195{
9196 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9197 IEM_MC_BEGIN(3, 2);
9198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9199 IEM_MC_LOCAL(uint16_t, u16Fsw);
9200 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9201 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9202 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9203
9204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9206 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9207 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9208
9209 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9210 IEM_MC_PREPARE_FPU_USAGE();
9211 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9212 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9213 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9214 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9215 IEM_MC_ELSE()
9216 IEM_MC_IF_FCW_IM()
9217 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9218 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9219 IEM_MC_ENDIF();
9220 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9221 IEM_MC_ENDIF();
9222 IEM_MC_ADVANCE_RIP();
9223
9224 IEM_MC_END();
9225 return VINF_SUCCESS;
9226}
9227
9228
9229
9230
9231/** Opcode 0xdd !11/0. */
9232FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9233{
9234 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9235 IEM_MC_BEGIN(3, 2);
9236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9237 IEM_MC_LOCAL(uint16_t, u16Fsw);
9238 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9239 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9240 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9241
9242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9244 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9245 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9246
9247 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9248 IEM_MC_PREPARE_FPU_USAGE();
9249 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9250 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9251 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9252 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9253 IEM_MC_ELSE()
9254 IEM_MC_IF_FCW_IM()
9255 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9256 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9257 IEM_MC_ENDIF();
9258 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9259 IEM_MC_ENDIF();
9260 IEM_MC_ADVANCE_RIP();
9261
9262 IEM_MC_END();
9263 return VINF_SUCCESS;
9264}
9265
9266
9267/** Opcode 0xdd !11/0. */
9268FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9269{
9270 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9271 IEM_MC_BEGIN(3, 0);
9272 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9273 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9274 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9277 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9278 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9279 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9280 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9281 IEM_MC_END();
9282 return VINF_SUCCESS;
9283}
9284
9285
9286/** Opcode 0xdd !11/0. */
9287FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9288{
9289 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9290 IEM_MC_BEGIN(3, 0);
9291 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9292 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9293 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9296 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9297 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9298 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9299 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9300 IEM_MC_END();
9301 return VINF_SUCCESS;
9302
9303}
9304
9305/** Opcode 0xdd !11/0. */
9306FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9307{
9308 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9309
9310 IEM_MC_BEGIN(0, 2);
9311 IEM_MC_LOCAL(uint16_t, u16Tmp);
9312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9313
9314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9316 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9317
9318 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9319 IEM_MC_FETCH_FSW(u16Tmp);
9320 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9321 IEM_MC_ADVANCE_RIP();
9322
9323/** @todo Debug / drop a hint to the verifier that things may differ
9324 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9325 * NT4SP1. (X86_FSW_PE) */
9326 IEM_MC_END();
9327 return VINF_SUCCESS;
9328}
9329
9330
9331/** Opcode 0xdd 11/0. */
9332FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9333{
9334 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9336 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9337 unmodified. */
9338
9339 IEM_MC_BEGIN(0, 0);
9340
9341 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9342 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9343
9344 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9345 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9346 IEM_MC_UPDATE_FPU_OPCODE_IP();
9347
9348 IEM_MC_ADVANCE_RIP();
9349 IEM_MC_END();
9350 return VINF_SUCCESS;
9351}
9352
9353
9354/** Opcode 0xdd 11/1. */
9355FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9356{
9357 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9359
9360 IEM_MC_BEGIN(0, 2);
9361 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9362 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9363 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9364 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9365
9366 IEM_MC_PREPARE_FPU_USAGE();
9367 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9368 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9369 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9370 IEM_MC_ELSE()
9371 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9372 IEM_MC_ENDIF();
9373
9374 IEM_MC_ADVANCE_RIP();
9375 IEM_MC_END();
9376 return VINF_SUCCESS;
9377}
9378
9379
9380/** Opcode 0xdd 11/3. */
9381FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9382{
9383 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9384 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9385}
9386
9387
9388/** Opcode 0xdd 11/4. */
9389FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9390{
9391 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9392 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9393}
9394
9395
9396/**
9397 * @opcode 0xdd
9398 */
9399FNIEMOP_DEF(iemOp_EscF5)
9400{
9401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9402 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9403 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9404 {
9405 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9406 {
9407 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9408 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9409 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9410 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9411 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9412 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9413 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9414 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9416 }
9417 }
9418 else
9419 {
9420 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9421 {
9422 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9423 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9424 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9425 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9426 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9427 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9428 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9429 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9431 }
9432 }
9433}
9434
9435
9436/** Opcode 0xde 11/0. */
9437FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9438{
9439 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9440 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9441}
9442
9443
9444/** Opcode 0xde 11/0. */
9445FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9446{
9447 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9448 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9449}
9450
9451
9452/** Opcode 0xde 0xd9. */
9453FNIEMOP_DEF(iemOp_fcompp)
9454{
9455 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9456 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9457}
9458
9459
9460/** Opcode 0xde 11/4. */
9461FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9462{
9463 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9464 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9465}
9466
9467
9468/** Opcode 0xde 11/5. */
9469FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9470{
9471 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9472 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9473}
9474
9475
9476/** Opcode 0xde 11/6. */
9477FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9478{
9479 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9480 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9481}
9482
9483
9484/** Opcode 0xde 11/7. */
9485FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9486{
9487 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9488 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9489}
9490
9491
9492/**
9493 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9494 * the result in ST0.
9495 *
9496 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9497 */
9498FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9499{
9500 IEM_MC_BEGIN(3, 3);
9501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9502 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9503 IEM_MC_LOCAL(int16_t, i16Val2);
9504 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9505 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9506 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9507
9508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9510
9511 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9512 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9513 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9514
9515 IEM_MC_PREPARE_FPU_USAGE();
9516 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9517 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9518 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9519 IEM_MC_ELSE()
9520 IEM_MC_FPU_STACK_UNDERFLOW(0);
9521 IEM_MC_ENDIF();
9522 IEM_MC_ADVANCE_RIP();
9523
9524 IEM_MC_END();
9525 return VINF_SUCCESS;
9526}
9527
9528
9529/** Opcode 0xde !11/0. */
9530FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9531{
9532 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9533 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9534}
9535
9536
9537/** Opcode 0xde !11/1. */
9538FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9539{
9540 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9541 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9542}
9543
9544
9545/** Opcode 0xde !11/2. */
9546FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9547{
9548 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9549
9550 IEM_MC_BEGIN(3, 3);
9551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9552 IEM_MC_LOCAL(uint16_t, u16Fsw);
9553 IEM_MC_LOCAL(int16_t, i16Val2);
9554 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9555 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9556 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9557
9558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9560
9561 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9562 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9563 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9564
9565 IEM_MC_PREPARE_FPU_USAGE();
9566 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9567 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9568 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9569 IEM_MC_ELSE()
9570 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9571 IEM_MC_ENDIF();
9572 IEM_MC_ADVANCE_RIP();
9573
9574 IEM_MC_END();
9575 return VINF_SUCCESS;
9576}
9577
9578
9579/** Opcode 0xde !11/3. */
9580FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9581{
9582 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9583
9584 IEM_MC_BEGIN(3, 3);
9585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9586 IEM_MC_LOCAL(uint16_t, u16Fsw);
9587 IEM_MC_LOCAL(int16_t, i16Val2);
9588 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9589 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9590 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9591
9592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9594
9595 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9596 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9597 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9598
9599 IEM_MC_PREPARE_FPU_USAGE();
9600 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9601 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9602 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9603 IEM_MC_ELSE()
9604 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9605 IEM_MC_ENDIF();
9606 IEM_MC_ADVANCE_RIP();
9607
9608 IEM_MC_END();
9609 return VINF_SUCCESS;
9610}
9611
9612
9613/** Opcode 0xde !11/4. */
9614FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9615{
9616 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9617 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9618}
9619
9620
9621/** Opcode 0xde !11/5. */
9622FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9623{
9624 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9625 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9626}
9627
9628
9629/** Opcode 0xde !11/6. */
9630FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9631{
9632 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9633 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9634}
9635
9636
9637/** Opcode 0xde !11/7. */
9638FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9639{
9640 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9641 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9642}
9643
9644
9645/**
9646 * @opcode 0xde
9647 */
9648FNIEMOP_DEF(iemOp_EscF6)
9649{
9650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9651 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9653 {
9654 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9655 {
9656 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9657 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9658 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9659 case 3: if (bRm == 0xd9)
9660 return FNIEMOP_CALL(iemOp_fcompp);
9661 return IEMOP_RAISE_INVALID_OPCODE();
9662 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9663 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9664 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9665 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9667 }
9668 }
9669 else
9670 {
9671 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9672 {
9673 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9674 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9675 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9676 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9677 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9678 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9679 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9680 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9682 }
9683 }
9684}
9685
9686
9687/** Opcode 0xdf 11/0.
9688 * Undocument instruction, assumed to work like ffree + fincstp. */
9689FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9690{
9691 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9693
9694 IEM_MC_BEGIN(0, 0);
9695
9696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9697 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9698
9699 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9700 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9701 IEM_MC_FPU_STACK_INC_TOP();
9702 IEM_MC_UPDATE_FPU_OPCODE_IP();
9703
9704 IEM_MC_ADVANCE_RIP();
9705 IEM_MC_END();
9706 return VINF_SUCCESS;
9707}
9708
9709
9710/** Opcode 0xdf 0xe0. */
9711FNIEMOP_DEF(iemOp_fnstsw_ax)
9712{
9713 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9715
9716 IEM_MC_BEGIN(0, 1);
9717 IEM_MC_LOCAL(uint16_t, u16Tmp);
9718 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9719 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9720 IEM_MC_FETCH_FSW(u16Tmp);
9721 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9722 IEM_MC_ADVANCE_RIP();
9723 IEM_MC_END();
9724 return VINF_SUCCESS;
9725}
9726
9727
9728/** Opcode 0xdf 11/5. */
9729FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9730{
9731 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9732 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9733}
9734
9735
9736/** Opcode 0xdf 11/6. */
9737FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9738{
9739 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9740 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9741}
9742
9743
9744/** Opcode 0xdf !11/0. */
9745FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9746{
9747 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9748
9749 IEM_MC_BEGIN(2, 3);
9750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9751 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9752 IEM_MC_LOCAL(int16_t, i16Val);
9753 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9754 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9755
9756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9758
9759 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9760 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9761 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9762
9763 IEM_MC_PREPARE_FPU_USAGE();
9764 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9765 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9766 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9767 IEM_MC_ELSE()
9768 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9769 IEM_MC_ENDIF();
9770 IEM_MC_ADVANCE_RIP();
9771
9772 IEM_MC_END();
9773 return VINF_SUCCESS;
9774}
9775
9776
9777/** Opcode 0xdf !11/1. */
9778FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9779{
9780 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9781 IEM_MC_BEGIN(3, 2);
9782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9783 IEM_MC_LOCAL(uint16_t, u16Fsw);
9784 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9785 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9786 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9787
9788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9790 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9791 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9792
9793 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9794 IEM_MC_PREPARE_FPU_USAGE();
9795 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9796 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9797 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9798 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9799 IEM_MC_ELSE()
9800 IEM_MC_IF_FCW_IM()
9801 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9802 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9803 IEM_MC_ENDIF();
9804 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9805 IEM_MC_ENDIF();
9806 IEM_MC_ADVANCE_RIP();
9807
9808 IEM_MC_END();
9809 return VINF_SUCCESS;
9810}
9811
9812
9813/** Opcode 0xdf !11/2. */
9814FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9815{
9816 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9817 IEM_MC_BEGIN(3, 2);
9818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9819 IEM_MC_LOCAL(uint16_t, u16Fsw);
9820 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9821 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9822 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9823
9824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9826 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9827 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9828
9829 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9830 IEM_MC_PREPARE_FPU_USAGE();
9831 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9832 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9833 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9834 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9835 IEM_MC_ELSE()
9836 IEM_MC_IF_FCW_IM()
9837 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9838 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9839 IEM_MC_ENDIF();
9840 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9841 IEM_MC_ENDIF();
9842 IEM_MC_ADVANCE_RIP();
9843
9844 IEM_MC_END();
9845 return VINF_SUCCESS;
9846}
9847
9848
9849/** Opcode 0xdf !11/3. */
9850FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9851{
9852 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9853 IEM_MC_BEGIN(3, 2);
9854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9855 IEM_MC_LOCAL(uint16_t, u16Fsw);
9856 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9857 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9858 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9859
9860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9862 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9863 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9864
9865 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9866 IEM_MC_PREPARE_FPU_USAGE();
9867 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9868 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9869 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9870 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9871 IEM_MC_ELSE()
9872 IEM_MC_IF_FCW_IM()
9873 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9874 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9875 IEM_MC_ENDIF();
9876 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9877 IEM_MC_ENDIF();
9878 IEM_MC_ADVANCE_RIP();
9879
9880 IEM_MC_END();
9881 return VINF_SUCCESS;
9882}
9883
9884
9885/** Opcode 0xdf !11/4. */
9886FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
9887
9888
9889/** Opcode 0xdf !11/5. */
9890FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
9891{
9892 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
9893
9894 IEM_MC_BEGIN(2, 3);
9895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9896 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9897 IEM_MC_LOCAL(int64_t, i64Val);
9898 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9899 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
9900
9901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9903
9904 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9905 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9906 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9907
9908 IEM_MC_PREPARE_FPU_USAGE();
9909 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9910 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
9911 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9912 IEM_MC_ELSE()
9913 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9914 IEM_MC_ENDIF();
9915 IEM_MC_ADVANCE_RIP();
9916
9917 IEM_MC_END();
9918 return VINF_SUCCESS;
9919}
9920
9921
9922/** Opcode 0xdf !11/6. */
9923FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
9924
9925
9926/** Opcode 0xdf !11/7. */
9927FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
9928{
9929 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
9930 IEM_MC_BEGIN(3, 2);
9931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9932 IEM_MC_LOCAL(uint16_t, u16Fsw);
9933 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9934 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9935 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9936
9937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9939 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9940 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9941
9942 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9943 IEM_MC_PREPARE_FPU_USAGE();
9944 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9945 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9946 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9947 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9948 IEM_MC_ELSE()
9949 IEM_MC_IF_FCW_IM()
9950 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9951 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9952 IEM_MC_ENDIF();
9953 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9954 IEM_MC_ENDIF();
9955 IEM_MC_ADVANCE_RIP();
9956
9957 IEM_MC_END();
9958 return VINF_SUCCESS;
9959}
9960
9961
9962/**
9963 * @opcode 0xdf
9964 */
9965FNIEMOP_DEF(iemOp_EscF7)
9966{
9967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9969 {
9970 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9971 {
9972 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
9973 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
9974 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9975 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9976 case 4: if (bRm == 0xe0)
9977 return FNIEMOP_CALL(iemOp_fnstsw_ax);
9978 return IEMOP_RAISE_INVALID_OPCODE();
9979 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
9980 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
9981 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9982 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9983 }
9984 }
9985 else
9986 {
9987 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9988 {
9989 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
9990 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
9991 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
9992 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
9993 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
9994 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
9995 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
9996 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
9997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9998 }
9999 }
10000}
10001
10002
10003/**
10004 * @opcode 0xe0
10005 */
10006FNIEMOP_DEF(iemOp_loopne_Jb)
10007{
10008 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10009 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10011 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10012
10013 switch (pVCpu->iem.s.enmEffAddrMode)
10014 {
10015 case IEMMODE_16BIT:
10016 IEM_MC_BEGIN(0,0);
10017 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10018 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10019 IEM_MC_REL_JMP_S8(i8Imm);
10020 } IEM_MC_ELSE() {
10021 IEM_MC_ADVANCE_RIP();
10022 } IEM_MC_ENDIF();
10023 IEM_MC_END();
10024 return VINF_SUCCESS;
10025
10026 case IEMMODE_32BIT:
10027 IEM_MC_BEGIN(0,0);
10028 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10029 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10030 IEM_MC_REL_JMP_S8(i8Imm);
10031 } IEM_MC_ELSE() {
10032 IEM_MC_ADVANCE_RIP();
10033 } IEM_MC_ENDIF();
10034 IEM_MC_END();
10035 return VINF_SUCCESS;
10036
10037 case IEMMODE_64BIT:
10038 IEM_MC_BEGIN(0,0);
10039 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10040 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10041 IEM_MC_REL_JMP_S8(i8Imm);
10042 } IEM_MC_ELSE() {
10043 IEM_MC_ADVANCE_RIP();
10044 } IEM_MC_ENDIF();
10045 IEM_MC_END();
10046 return VINF_SUCCESS;
10047
10048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10049 }
10050}
10051
10052
10053/**
10054 * @opcode 0xe1
10055 */
10056FNIEMOP_DEF(iemOp_loope_Jb)
10057{
10058 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10059 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10061 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10062
10063 switch (pVCpu->iem.s.enmEffAddrMode)
10064 {
10065 case IEMMODE_16BIT:
10066 IEM_MC_BEGIN(0,0);
10067 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10068 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10069 IEM_MC_REL_JMP_S8(i8Imm);
10070 } IEM_MC_ELSE() {
10071 IEM_MC_ADVANCE_RIP();
10072 } IEM_MC_ENDIF();
10073 IEM_MC_END();
10074 return VINF_SUCCESS;
10075
10076 case IEMMODE_32BIT:
10077 IEM_MC_BEGIN(0,0);
10078 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10079 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10080 IEM_MC_REL_JMP_S8(i8Imm);
10081 } IEM_MC_ELSE() {
10082 IEM_MC_ADVANCE_RIP();
10083 } IEM_MC_ENDIF();
10084 IEM_MC_END();
10085 return VINF_SUCCESS;
10086
10087 case IEMMODE_64BIT:
10088 IEM_MC_BEGIN(0,0);
10089 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10090 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10091 IEM_MC_REL_JMP_S8(i8Imm);
10092 } IEM_MC_ELSE() {
10093 IEM_MC_ADVANCE_RIP();
10094 } IEM_MC_ENDIF();
10095 IEM_MC_END();
10096 return VINF_SUCCESS;
10097
10098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10099 }
10100}
10101
10102
10103/**
10104 * @opcode 0xe2
10105 */
10106FNIEMOP_DEF(iemOp_loop_Jb)
10107{
10108 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10109 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10111 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10112
10113 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
10114 * using the 32-bit operand size override. How can that be restarted? See
10115 * weird pseudo code in intel manual. */
10116 switch (pVCpu->iem.s.enmEffAddrMode)
10117 {
10118 case IEMMODE_16BIT:
10119 IEM_MC_BEGIN(0,0);
10120 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10121 {
10122 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10123 IEM_MC_IF_CX_IS_NZ() {
10124 IEM_MC_REL_JMP_S8(i8Imm);
10125 } IEM_MC_ELSE() {
10126 IEM_MC_ADVANCE_RIP();
10127 } IEM_MC_ENDIF();
10128 }
10129 else
10130 {
10131 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10132 IEM_MC_ADVANCE_RIP();
10133 }
10134 IEM_MC_END();
10135 return VINF_SUCCESS;
10136
10137 case IEMMODE_32BIT:
10138 IEM_MC_BEGIN(0,0);
10139 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10140 {
10141 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10142 IEM_MC_IF_ECX_IS_NZ() {
10143 IEM_MC_REL_JMP_S8(i8Imm);
10144 } IEM_MC_ELSE() {
10145 IEM_MC_ADVANCE_RIP();
10146 } IEM_MC_ENDIF();
10147 }
10148 else
10149 {
10150 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10151 IEM_MC_ADVANCE_RIP();
10152 }
10153 IEM_MC_END();
10154 return VINF_SUCCESS;
10155
10156 case IEMMODE_64BIT:
10157 IEM_MC_BEGIN(0,0);
10158 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10159 {
10160 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10161 IEM_MC_IF_RCX_IS_NZ() {
10162 IEM_MC_REL_JMP_S8(i8Imm);
10163 } IEM_MC_ELSE() {
10164 IEM_MC_ADVANCE_RIP();
10165 } IEM_MC_ENDIF();
10166 }
10167 else
10168 {
10169 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10170 IEM_MC_ADVANCE_RIP();
10171 }
10172 IEM_MC_END();
10173 return VINF_SUCCESS;
10174
10175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10176 }
10177}
10178
10179
10180/**
10181 * @opcode 0xe3
10182 */
10183FNIEMOP_DEF(iemOp_jecxz_Jb)
10184{
10185 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10186 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10188 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10189
10190 switch (pVCpu->iem.s.enmEffAddrMode)
10191 {
10192 case IEMMODE_16BIT:
10193 IEM_MC_BEGIN(0,0);
10194 IEM_MC_IF_CX_IS_NZ() {
10195 IEM_MC_ADVANCE_RIP();
10196 } IEM_MC_ELSE() {
10197 IEM_MC_REL_JMP_S8(i8Imm);
10198 } IEM_MC_ENDIF();
10199 IEM_MC_END();
10200 return VINF_SUCCESS;
10201
10202 case IEMMODE_32BIT:
10203 IEM_MC_BEGIN(0,0);
10204 IEM_MC_IF_ECX_IS_NZ() {
10205 IEM_MC_ADVANCE_RIP();
10206 } IEM_MC_ELSE() {
10207 IEM_MC_REL_JMP_S8(i8Imm);
10208 } IEM_MC_ENDIF();
10209 IEM_MC_END();
10210 return VINF_SUCCESS;
10211
10212 case IEMMODE_64BIT:
10213 IEM_MC_BEGIN(0,0);
10214 IEM_MC_IF_RCX_IS_NZ() {
10215 IEM_MC_ADVANCE_RIP();
10216 } IEM_MC_ELSE() {
10217 IEM_MC_REL_JMP_S8(i8Imm);
10218 } IEM_MC_ENDIF();
10219 IEM_MC_END();
10220 return VINF_SUCCESS;
10221
10222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10223 }
10224}
10225
10226
10227/** Opcode 0xe4 */
10228FNIEMOP_DEF(iemOp_in_AL_Ib)
10229{
10230 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10231 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10233 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10234}
10235
10236
10237/** Opcode 0xe5 */
10238FNIEMOP_DEF(iemOp_in_eAX_Ib)
10239{
10240 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10241 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10243 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10244}
10245
10246
10247/** Opcode 0xe6 */
10248FNIEMOP_DEF(iemOp_out_Ib_AL)
10249{
10250 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10251 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10253 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10254}
10255
10256
10257/** Opcode 0xe7 */
10258FNIEMOP_DEF(iemOp_out_Ib_eAX)
10259{
10260 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10261 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10263 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10264}
10265
10266
10267/**
10268 * @opcode 0xe8
10269 */
10270FNIEMOP_DEF(iemOp_call_Jv)
10271{
10272 IEMOP_MNEMONIC(call_Jv, "call Jv");
10273 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10274 switch (pVCpu->iem.s.enmEffOpSize)
10275 {
10276 case IEMMODE_16BIT:
10277 {
10278 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10279 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10280 }
10281
10282 case IEMMODE_32BIT:
10283 {
10284 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10285 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10286 }
10287
10288 case IEMMODE_64BIT:
10289 {
10290 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10291 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10292 }
10293
10294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10295 }
10296}
10297
10298
10299/**
10300 * @opcode 0xe9
10301 */
10302FNIEMOP_DEF(iemOp_jmp_Jv)
10303{
10304 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10305 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10306 switch (pVCpu->iem.s.enmEffOpSize)
10307 {
10308 case IEMMODE_16BIT:
10309 {
10310 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10311 IEM_MC_BEGIN(0, 0);
10312 IEM_MC_REL_JMP_S16(i16Imm);
10313 IEM_MC_END();
10314 return VINF_SUCCESS;
10315 }
10316
10317 case IEMMODE_64BIT:
10318 case IEMMODE_32BIT:
10319 {
10320 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10321 IEM_MC_BEGIN(0, 0);
10322 IEM_MC_REL_JMP_S32(i32Imm);
10323 IEM_MC_END();
10324 return VINF_SUCCESS;
10325 }
10326
10327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10328 }
10329}
10330
10331
10332/**
10333 * @opcode 0xea
10334 */
10335FNIEMOP_DEF(iemOp_jmp_Ap)
10336{
10337 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10338 IEMOP_HLP_NO_64BIT();
10339
10340 /* Decode the far pointer address and pass it on to the far call C implementation. */
10341 uint32_t offSeg;
10342 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10343 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10344 else
10345 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10346 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10348 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10349}
10350
10351
10352/**
10353 * @opcode 0xeb
10354 */
10355FNIEMOP_DEF(iemOp_jmp_Jb)
10356{
10357 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10358 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10360 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10361
10362 IEM_MC_BEGIN(0, 0);
10363 IEM_MC_REL_JMP_S8(i8Imm);
10364 IEM_MC_END();
10365 return VINF_SUCCESS;
10366}
10367
10368
10369/** Opcode 0xec */
10370FNIEMOP_DEF(iemOp_in_AL_DX)
10371{
10372 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10374 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10375}
10376
10377
10378/** Opcode 0xed */
10379FNIEMOP_DEF(iemOp_eAX_DX)
10380{
10381 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10383 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10384}
10385
10386
10387/** Opcode 0xee */
10388FNIEMOP_DEF(iemOp_out_DX_AL)
10389{
10390 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10392 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10393}
10394
10395
10396/** Opcode 0xef */
10397FNIEMOP_DEF(iemOp_out_DX_eAX)
10398{
10399 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10401 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10402}
10403
10404
10405/**
10406 * @opcode 0xf0
10407 */
10408FNIEMOP_DEF(iemOp_lock)
10409{
10410 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10411 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10412
10413 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10414 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10415}
10416
10417
10418/**
10419 * @opcode 0xf1
10420 */
10421FNIEMOP_DEF(iemOp_int1)
10422{
10423 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10424 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10425 /** @todo testcase! */
10426 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
10427}
10428
10429
10430/**
10431 * @opcode 0xf2
10432 */
10433FNIEMOP_DEF(iemOp_repne)
10434{
10435 /* This overrides any previous REPE prefix. */
10436 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10437 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10438 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10439
10440 /* For the 4 entry opcode tables, REPNZ overrides any previous
10441 REPZ and operand size prefixes. */
10442 pVCpu->iem.s.idxPrefix = 3;
10443
10444 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10445 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10446}
10447
10448
10449/**
10450 * @opcode 0xf3
10451 */
10452FNIEMOP_DEF(iemOp_repe)
10453{
10454 /* This overrides any previous REPNE prefix. */
10455 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10456 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10457 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10458
10459 /* For the 4 entry opcode tables, REPNZ overrides any previous
10460 REPNZ and operand size prefixes. */
10461 pVCpu->iem.s.idxPrefix = 2;
10462
10463 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10464 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10465}
10466
10467
10468/**
10469 * @opcode 0xf4
10470 */
10471FNIEMOP_DEF(iemOp_hlt)
10472{
10473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10474 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10475}
10476
10477
10478/**
10479 * @opcode 0xf5
10480 */
10481FNIEMOP_DEF(iemOp_cmc)
10482{
10483 IEMOP_MNEMONIC(cmc, "cmc");
10484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10485 IEM_MC_BEGIN(0, 0);
10486 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10487 IEM_MC_ADVANCE_RIP();
10488 IEM_MC_END();
10489 return VINF_SUCCESS;
10490}
10491
10492
10493/**
10494 * Common implementation of 'inc/dec/not/neg Eb'.
10495 *
10496 * @param bRm The RM byte.
10497 * @param pImpl The instruction implementation.
10498 */
10499FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10500{
10501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10502 {
10503 /* register access */
10504 IEM_MC_BEGIN(2, 0);
10505 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10506 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10507 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10508 IEM_MC_REF_EFLAGS(pEFlags);
10509 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10510 IEM_MC_ADVANCE_RIP();
10511 IEM_MC_END();
10512 }
10513 else
10514 {
10515 /* memory access. */
10516 IEM_MC_BEGIN(2, 2);
10517 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10518 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10520
10521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10522 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10523 IEM_MC_FETCH_EFLAGS(EFlags);
10524 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10525 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10526 else
10527 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10528
10529 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10530 IEM_MC_COMMIT_EFLAGS(EFlags);
10531 IEM_MC_ADVANCE_RIP();
10532 IEM_MC_END();
10533 }
10534 return VINF_SUCCESS;
10535}
10536
10537
10538/**
10539 * Common implementation of 'inc/dec/not/neg Ev'.
10540 *
10541 * @param bRm The RM byte.
10542 * @param pImpl The instruction implementation.
10543 */
10544FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10545{
10546 /* Registers are handled by a common worker. */
10547 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10548 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10549
10550 /* Memory we do here. */
10551 switch (pVCpu->iem.s.enmEffOpSize)
10552 {
10553 case IEMMODE_16BIT:
10554 IEM_MC_BEGIN(2, 2);
10555 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10556 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10558
10559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10560 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10561 IEM_MC_FETCH_EFLAGS(EFlags);
10562 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10563 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10564 else
10565 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10566
10567 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10568 IEM_MC_COMMIT_EFLAGS(EFlags);
10569 IEM_MC_ADVANCE_RIP();
10570 IEM_MC_END();
10571 return VINF_SUCCESS;
10572
10573 case IEMMODE_32BIT:
10574 IEM_MC_BEGIN(2, 2);
10575 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10576 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10578
10579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10580 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10581 IEM_MC_FETCH_EFLAGS(EFlags);
10582 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10583 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10584 else
10585 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10586
10587 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10588 IEM_MC_COMMIT_EFLAGS(EFlags);
10589 IEM_MC_ADVANCE_RIP();
10590 IEM_MC_END();
10591 return VINF_SUCCESS;
10592
10593 case IEMMODE_64BIT:
10594 IEM_MC_BEGIN(2, 2);
10595 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10596 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10598
10599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10600 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10601 IEM_MC_FETCH_EFLAGS(EFlags);
10602 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10603 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10604 else
10605 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10606
10607 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10608 IEM_MC_COMMIT_EFLAGS(EFlags);
10609 IEM_MC_ADVANCE_RIP();
10610 IEM_MC_END();
10611 return VINF_SUCCESS;
10612
10613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10614 }
10615}
10616
10617
10618/** Opcode 0xf6 /0. */
10619FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10620{
10621 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10622 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10623
10624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10625 {
10626 /* register access */
10627 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10629
10630 IEM_MC_BEGIN(3, 0);
10631 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10632 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10633 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10634 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10635 IEM_MC_REF_EFLAGS(pEFlags);
10636 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10637 IEM_MC_ADVANCE_RIP();
10638 IEM_MC_END();
10639 }
10640 else
10641 {
10642 /* memory access. */
10643 IEM_MC_BEGIN(3, 2);
10644 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10645 IEM_MC_ARG(uint8_t, u8Src, 1);
10646 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10648
10649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10650 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10651 IEM_MC_ASSIGN(u8Src, u8Imm);
10652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10653 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10654 IEM_MC_FETCH_EFLAGS(EFlags);
10655 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10656
10657 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10658 IEM_MC_COMMIT_EFLAGS(EFlags);
10659 IEM_MC_ADVANCE_RIP();
10660 IEM_MC_END();
10661 }
10662 return VINF_SUCCESS;
10663}
10664
10665
10666/** Opcode 0xf7 /0. */
10667FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10668{
10669 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10670 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10671
10672 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10673 {
10674 /* register access */
10675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10676 switch (pVCpu->iem.s.enmEffOpSize)
10677 {
10678 case IEMMODE_16BIT:
10679 {
10680 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10681 IEM_MC_BEGIN(3, 0);
10682 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10683 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10684 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10685 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10686 IEM_MC_REF_EFLAGS(pEFlags);
10687 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10688 IEM_MC_ADVANCE_RIP();
10689 IEM_MC_END();
10690 return VINF_SUCCESS;
10691 }
10692
10693 case IEMMODE_32BIT:
10694 {
10695 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10696 IEM_MC_BEGIN(3, 0);
10697 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10698 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10699 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10700 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10701 IEM_MC_REF_EFLAGS(pEFlags);
10702 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10703 /* No clearing the high dword here - test doesn't write back the result. */
10704 IEM_MC_ADVANCE_RIP();
10705 IEM_MC_END();
10706 return VINF_SUCCESS;
10707 }
10708
10709 case IEMMODE_64BIT:
10710 {
10711 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10712 IEM_MC_BEGIN(3, 0);
10713 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10714 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10715 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10716 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10717 IEM_MC_REF_EFLAGS(pEFlags);
10718 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10719 IEM_MC_ADVANCE_RIP();
10720 IEM_MC_END();
10721 return VINF_SUCCESS;
10722 }
10723
10724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10725 }
10726 }
10727 else
10728 {
10729 /* memory access. */
10730 switch (pVCpu->iem.s.enmEffOpSize)
10731 {
10732 case IEMMODE_16BIT:
10733 {
10734 IEM_MC_BEGIN(3, 2);
10735 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10736 IEM_MC_ARG(uint16_t, u16Src, 1);
10737 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10739
10740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10741 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10742 IEM_MC_ASSIGN(u16Src, u16Imm);
10743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10744 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10745 IEM_MC_FETCH_EFLAGS(EFlags);
10746 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10747
10748 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10749 IEM_MC_COMMIT_EFLAGS(EFlags);
10750 IEM_MC_ADVANCE_RIP();
10751 IEM_MC_END();
10752 return VINF_SUCCESS;
10753 }
10754
10755 case IEMMODE_32BIT:
10756 {
10757 IEM_MC_BEGIN(3, 2);
10758 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10759 IEM_MC_ARG(uint32_t, u32Src, 1);
10760 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10762
10763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10764 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10765 IEM_MC_ASSIGN(u32Src, u32Imm);
10766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10767 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10768 IEM_MC_FETCH_EFLAGS(EFlags);
10769 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10770
10771 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10772 IEM_MC_COMMIT_EFLAGS(EFlags);
10773 IEM_MC_ADVANCE_RIP();
10774 IEM_MC_END();
10775 return VINF_SUCCESS;
10776 }
10777
10778 case IEMMODE_64BIT:
10779 {
10780 IEM_MC_BEGIN(3, 2);
10781 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10782 IEM_MC_ARG(uint64_t, u64Src, 1);
10783 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10785
10786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10787 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10788 IEM_MC_ASSIGN(u64Src, u64Imm);
10789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10790 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10791 IEM_MC_FETCH_EFLAGS(EFlags);
10792 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10793
10794 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10795 IEM_MC_COMMIT_EFLAGS(EFlags);
10796 IEM_MC_ADVANCE_RIP();
10797 IEM_MC_END();
10798 return VINF_SUCCESS;
10799 }
10800
10801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10802 }
10803 }
10804}
10805
10806
10807/** Opcode 0xf6 /4, /5, /6 and /7. */
10808FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10809{
10810 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10811 {
10812 /* register access */
10813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10814 IEM_MC_BEGIN(3, 1);
10815 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10816 IEM_MC_ARG(uint8_t, u8Value, 1);
10817 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10818 IEM_MC_LOCAL(int32_t, rc);
10819
10820 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10821 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10822 IEM_MC_REF_EFLAGS(pEFlags);
10823 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10824 IEM_MC_IF_LOCAL_IS_Z(rc) {
10825 IEM_MC_ADVANCE_RIP();
10826 } IEM_MC_ELSE() {
10827 IEM_MC_RAISE_DIVIDE_ERROR();
10828 } IEM_MC_ENDIF();
10829
10830 IEM_MC_END();
10831 }
10832 else
10833 {
10834 /* memory access. */
10835 IEM_MC_BEGIN(3, 2);
10836 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10837 IEM_MC_ARG(uint8_t, u8Value, 1);
10838 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10840 IEM_MC_LOCAL(int32_t, rc);
10841
10842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10844 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10845 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10846 IEM_MC_REF_EFLAGS(pEFlags);
10847 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10848 IEM_MC_IF_LOCAL_IS_Z(rc) {
10849 IEM_MC_ADVANCE_RIP();
10850 } IEM_MC_ELSE() {
10851 IEM_MC_RAISE_DIVIDE_ERROR();
10852 } IEM_MC_ENDIF();
10853
10854 IEM_MC_END();
10855 }
10856 return VINF_SUCCESS;
10857}
10858
10859
10860/** Opcode 0xf7 /4, /5, /6 and /7. */
10861FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10862{
10863 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10864
10865 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10866 {
10867 /* register access */
10868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10869 switch (pVCpu->iem.s.enmEffOpSize)
10870 {
10871 case IEMMODE_16BIT:
10872 {
10873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10874 IEM_MC_BEGIN(4, 1);
10875 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10876 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10877 IEM_MC_ARG(uint16_t, u16Value, 2);
10878 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10879 IEM_MC_LOCAL(int32_t, rc);
10880
10881 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10882 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10883 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10884 IEM_MC_REF_EFLAGS(pEFlags);
10885 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10886 IEM_MC_IF_LOCAL_IS_Z(rc) {
10887 IEM_MC_ADVANCE_RIP();
10888 } IEM_MC_ELSE() {
10889 IEM_MC_RAISE_DIVIDE_ERROR();
10890 } IEM_MC_ENDIF();
10891
10892 IEM_MC_END();
10893 return VINF_SUCCESS;
10894 }
10895
10896 case IEMMODE_32BIT:
10897 {
10898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10899 IEM_MC_BEGIN(4, 1);
10900 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10901 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10902 IEM_MC_ARG(uint32_t, u32Value, 2);
10903 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10904 IEM_MC_LOCAL(int32_t, rc);
10905
10906 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10907 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10908 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10909 IEM_MC_REF_EFLAGS(pEFlags);
10910 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10911 IEM_MC_IF_LOCAL_IS_Z(rc) {
10912 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10913 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10914 IEM_MC_ADVANCE_RIP();
10915 } IEM_MC_ELSE() {
10916 IEM_MC_RAISE_DIVIDE_ERROR();
10917 } IEM_MC_ENDIF();
10918
10919 IEM_MC_END();
10920 return VINF_SUCCESS;
10921 }
10922
10923 case IEMMODE_64BIT:
10924 {
10925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10926 IEM_MC_BEGIN(4, 1);
10927 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10928 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10929 IEM_MC_ARG(uint64_t, u64Value, 2);
10930 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10931 IEM_MC_LOCAL(int32_t, rc);
10932
10933 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10934 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10935 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10936 IEM_MC_REF_EFLAGS(pEFlags);
10937 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10938 IEM_MC_IF_LOCAL_IS_Z(rc) {
10939 IEM_MC_ADVANCE_RIP();
10940 } IEM_MC_ELSE() {
10941 IEM_MC_RAISE_DIVIDE_ERROR();
10942 } IEM_MC_ENDIF();
10943
10944 IEM_MC_END();
10945 return VINF_SUCCESS;
10946 }
10947
10948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10949 }
10950 }
10951 else
10952 {
10953 /* memory access. */
10954 switch (pVCpu->iem.s.enmEffOpSize)
10955 {
10956 case IEMMODE_16BIT:
10957 {
10958 IEM_MC_BEGIN(4, 2);
10959 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10960 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10961 IEM_MC_ARG(uint16_t, u16Value, 2);
10962 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10964 IEM_MC_LOCAL(int32_t, rc);
10965
10966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10968 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10969 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10970 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10971 IEM_MC_REF_EFLAGS(pEFlags);
10972 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10973 IEM_MC_IF_LOCAL_IS_Z(rc) {
10974 IEM_MC_ADVANCE_RIP();
10975 } IEM_MC_ELSE() {
10976 IEM_MC_RAISE_DIVIDE_ERROR();
10977 } IEM_MC_ENDIF();
10978
10979 IEM_MC_END();
10980 return VINF_SUCCESS;
10981 }
10982
10983 case IEMMODE_32BIT:
10984 {
10985 IEM_MC_BEGIN(4, 2);
10986 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10987 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10988 IEM_MC_ARG(uint32_t, u32Value, 2);
10989 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10991 IEM_MC_LOCAL(int32_t, rc);
10992
10993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10995 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10996 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10997 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10998 IEM_MC_REF_EFLAGS(pEFlags);
10999 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11000 IEM_MC_IF_LOCAL_IS_Z(rc) {
11001 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11002 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11003 IEM_MC_ADVANCE_RIP();
11004 } IEM_MC_ELSE() {
11005 IEM_MC_RAISE_DIVIDE_ERROR();
11006 } IEM_MC_ENDIF();
11007
11008 IEM_MC_END();
11009 return VINF_SUCCESS;
11010 }
11011
11012 case IEMMODE_64BIT:
11013 {
11014 IEM_MC_BEGIN(4, 2);
11015 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11016 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11017 IEM_MC_ARG(uint64_t, u64Value, 2);
11018 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11020 IEM_MC_LOCAL(int32_t, rc);
11021
11022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11024 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11025 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11026 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11027 IEM_MC_REF_EFLAGS(pEFlags);
11028 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11029 IEM_MC_IF_LOCAL_IS_Z(rc) {
11030 IEM_MC_ADVANCE_RIP();
11031 } IEM_MC_ELSE() {
11032 IEM_MC_RAISE_DIVIDE_ERROR();
11033 } IEM_MC_ENDIF();
11034
11035 IEM_MC_END();
11036 return VINF_SUCCESS;
11037 }
11038
11039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11040 }
11041 }
11042}
11043
11044/**
11045 * @opcode 0xf6
11046 */
11047FNIEMOP_DEF(iemOp_Grp3_Eb)
11048{
11049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11050 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11051 {
11052 case 0:
11053 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11054 case 1:
11055/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11056 return IEMOP_RAISE_INVALID_OPCODE();
11057 case 2:
11058 IEMOP_MNEMONIC(not_Eb, "not Eb");
11059 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11060 case 3:
11061 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11062 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11063 case 4:
11064 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11065 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11066 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
11067 case 5:
11068 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11069 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11070 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
11071 case 6:
11072 IEMOP_MNEMONIC(div_Eb, "div Eb");
11073 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11074 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
11075 case 7:
11076 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11077 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11078 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
11079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11080 }
11081}
11082
11083
11084/**
11085 * @opcode 0xf7
11086 */
11087FNIEMOP_DEF(iemOp_Grp3_Ev)
11088{
11089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11090 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11091 {
11092 case 0:
11093 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11094 case 1:
11095/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11096 return IEMOP_RAISE_INVALID_OPCODE();
11097 case 2:
11098 IEMOP_MNEMONIC(not_Ev, "not Ev");
11099 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11100 case 3:
11101 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11102 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11103 case 4:
11104 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11105 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11106 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
11107 case 5:
11108 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11110 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
11111 case 6:
11112 IEMOP_MNEMONIC(div_Ev, "div Ev");
11113 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11114 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
11115 case 7:
11116 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11117 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11118 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
11119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11120 }
11121}
11122
11123
11124/**
11125 * @opcode 0xf8
11126 */
11127FNIEMOP_DEF(iemOp_clc)
11128{
11129 IEMOP_MNEMONIC(clc, "clc");
11130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11131 IEM_MC_BEGIN(0, 0);
11132 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11133 IEM_MC_ADVANCE_RIP();
11134 IEM_MC_END();
11135 return VINF_SUCCESS;
11136}
11137
11138
11139/**
11140 * @opcode 0xf9
11141 */
11142FNIEMOP_DEF(iemOp_stc)
11143{
11144 IEMOP_MNEMONIC(stc, "stc");
11145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11146 IEM_MC_BEGIN(0, 0);
11147 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11148 IEM_MC_ADVANCE_RIP();
11149 IEM_MC_END();
11150 return VINF_SUCCESS;
11151}
11152
11153
11154/**
11155 * @opcode 0xfa
11156 */
11157FNIEMOP_DEF(iemOp_cli)
11158{
11159 IEMOP_MNEMONIC(cli, "cli");
11160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11161 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11162}
11163
11164
11165FNIEMOP_DEF(iemOp_sti)
11166{
11167 IEMOP_MNEMONIC(sti, "sti");
11168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11169 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11170}
11171
11172
11173/**
11174 * @opcode 0xfc
11175 */
11176FNIEMOP_DEF(iemOp_cld)
11177{
11178 IEMOP_MNEMONIC(cld, "cld");
11179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11180 IEM_MC_BEGIN(0, 0);
11181 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11182 IEM_MC_ADVANCE_RIP();
11183 IEM_MC_END();
11184 return VINF_SUCCESS;
11185}
11186
11187
11188/**
11189 * @opcode 0xfd
11190 */
11191FNIEMOP_DEF(iemOp_std)
11192{
11193 IEMOP_MNEMONIC(std, "std");
11194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11195 IEM_MC_BEGIN(0, 0);
11196 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11197 IEM_MC_ADVANCE_RIP();
11198 IEM_MC_END();
11199 return VINF_SUCCESS;
11200}
11201
11202
11203/**
11204 * @opcode 0xfe
11205 */
11206FNIEMOP_DEF(iemOp_Grp4)
11207{
11208 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11209 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11210 {
11211 case 0:
11212 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11213 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11214 case 1:
11215 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11216 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11217 default:
11218 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11219 return IEMOP_RAISE_INVALID_OPCODE();
11220 }
11221}
11222
11223
11224/**
11225 * Opcode 0xff /2.
11226 * @param bRm The RM byte.
11227 */
11228FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11229{
11230 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11231 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11232
11233 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11234 {
11235 /* The new RIP is taken from a register. */
11236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11237 switch (pVCpu->iem.s.enmEffOpSize)
11238 {
11239 case IEMMODE_16BIT:
11240 IEM_MC_BEGIN(1, 0);
11241 IEM_MC_ARG(uint16_t, u16Target, 0);
11242 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11243 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11244 IEM_MC_END()
11245 return VINF_SUCCESS;
11246
11247 case IEMMODE_32BIT:
11248 IEM_MC_BEGIN(1, 0);
11249 IEM_MC_ARG(uint32_t, u32Target, 0);
11250 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11251 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11252 IEM_MC_END()
11253 return VINF_SUCCESS;
11254
11255 case IEMMODE_64BIT:
11256 IEM_MC_BEGIN(1, 0);
11257 IEM_MC_ARG(uint64_t, u64Target, 0);
11258 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11259 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11260 IEM_MC_END()
11261 return VINF_SUCCESS;
11262
11263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11264 }
11265 }
11266 else
11267 {
11268 /* The new RIP is taken from a register. */
11269 switch (pVCpu->iem.s.enmEffOpSize)
11270 {
11271 case IEMMODE_16BIT:
11272 IEM_MC_BEGIN(1, 1);
11273 IEM_MC_ARG(uint16_t, u16Target, 0);
11274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11277 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11278 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11279 IEM_MC_END()
11280 return VINF_SUCCESS;
11281
11282 case IEMMODE_32BIT:
11283 IEM_MC_BEGIN(1, 1);
11284 IEM_MC_ARG(uint32_t, u32Target, 0);
11285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11288 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11289 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11290 IEM_MC_END()
11291 return VINF_SUCCESS;
11292
11293 case IEMMODE_64BIT:
11294 IEM_MC_BEGIN(1, 1);
11295 IEM_MC_ARG(uint64_t, u64Target, 0);
11296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11299 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11300 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11301 IEM_MC_END()
11302 return VINF_SUCCESS;
11303
11304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11305 }
11306 }
11307}
11308
11309typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11310
11311FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11312{
11313 /* Registers? How?? */
11314 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11315 { /* likely */ }
11316 else
11317 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11318
11319 /* Far pointer loaded from memory. */
11320 switch (pVCpu->iem.s.enmEffOpSize)
11321 {
11322 case IEMMODE_16BIT:
11323 IEM_MC_BEGIN(3, 1);
11324 IEM_MC_ARG(uint16_t, u16Sel, 0);
11325 IEM_MC_ARG(uint16_t, offSeg, 1);
11326 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11330 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11331 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11332 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11333 IEM_MC_END();
11334 return VINF_SUCCESS;
11335
11336 case IEMMODE_64BIT:
11337 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11338 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11339 * and call far qword [rsp] encodings. */
11340 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11341 {
11342 IEM_MC_BEGIN(3, 1);
11343 IEM_MC_ARG(uint16_t, u16Sel, 0);
11344 IEM_MC_ARG(uint64_t, offSeg, 1);
11345 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11349 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11350 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11351 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11352 IEM_MC_END();
11353 return VINF_SUCCESS;
11354 }
11355 /* AMD falls thru. */
11356 /* fall thru */
11357
11358 case IEMMODE_32BIT:
11359 IEM_MC_BEGIN(3, 1);
11360 IEM_MC_ARG(uint16_t, u16Sel, 0);
11361 IEM_MC_ARG(uint32_t, offSeg, 1);
11362 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11366 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11367 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11368 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11369 IEM_MC_END();
11370 return VINF_SUCCESS;
11371
11372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11373 }
11374}
11375
11376
11377/**
11378 * Opcode 0xff /3.
11379 * @param bRm The RM byte.
11380 */
11381FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11382{
11383 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11384 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11385}
11386
11387
11388/**
11389 * Opcode 0xff /4.
11390 * @param bRm The RM byte.
11391 */
11392FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11393{
11394 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11395 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11396
11397 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11398 {
11399 /* The new RIP is taken from a register. */
11400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11401 switch (pVCpu->iem.s.enmEffOpSize)
11402 {
11403 case IEMMODE_16BIT:
11404 IEM_MC_BEGIN(0, 1);
11405 IEM_MC_LOCAL(uint16_t, u16Target);
11406 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11407 IEM_MC_SET_RIP_U16(u16Target);
11408 IEM_MC_END()
11409 return VINF_SUCCESS;
11410
11411 case IEMMODE_32BIT:
11412 IEM_MC_BEGIN(0, 1);
11413 IEM_MC_LOCAL(uint32_t, u32Target);
11414 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11415 IEM_MC_SET_RIP_U32(u32Target);
11416 IEM_MC_END()
11417 return VINF_SUCCESS;
11418
11419 case IEMMODE_64BIT:
11420 IEM_MC_BEGIN(0, 1);
11421 IEM_MC_LOCAL(uint64_t, u64Target);
11422 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11423 IEM_MC_SET_RIP_U64(u64Target);
11424 IEM_MC_END()
11425 return VINF_SUCCESS;
11426
11427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11428 }
11429 }
11430 else
11431 {
11432 /* The new RIP is taken from a memory location. */
11433 switch (pVCpu->iem.s.enmEffOpSize)
11434 {
11435 case IEMMODE_16BIT:
11436 IEM_MC_BEGIN(0, 2);
11437 IEM_MC_LOCAL(uint16_t, u16Target);
11438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11441 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11442 IEM_MC_SET_RIP_U16(u16Target);
11443 IEM_MC_END()
11444 return VINF_SUCCESS;
11445
11446 case IEMMODE_32BIT:
11447 IEM_MC_BEGIN(0, 2);
11448 IEM_MC_LOCAL(uint32_t, u32Target);
11449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11452 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11453 IEM_MC_SET_RIP_U32(u32Target);
11454 IEM_MC_END()
11455 return VINF_SUCCESS;
11456
11457 case IEMMODE_64BIT:
11458 IEM_MC_BEGIN(0, 2);
11459 IEM_MC_LOCAL(uint64_t, u64Target);
11460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11463 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11464 IEM_MC_SET_RIP_U64(u64Target);
11465 IEM_MC_END()
11466 return VINF_SUCCESS;
11467
11468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11469 }
11470 }
11471}
11472
11473
11474/**
11475 * Opcode 0xff /5.
11476 * @param bRm The RM byte.
11477 */
11478FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11479{
11480 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11481 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11482}
11483
11484
11485/**
11486 * Opcode 0xff /6.
11487 * @param bRm The RM byte.
11488 */
11489FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11490{
11491 IEMOP_MNEMONIC(push_Ev, "push Ev");
11492
11493 /* Registers are handled by a common worker. */
11494 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11495 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11496
11497 /* Memory we do here. */
11498 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11499 switch (pVCpu->iem.s.enmEffOpSize)
11500 {
11501 case IEMMODE_16BIT:
11502 IEM_MC_BEGIN(0, 2);
11503 IEM_MC_LOCAL(uint16_t, u16Src);
11504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11507 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11508 IEM_MC_PUSH_U16(u16Src);
11509 IEM_MC_ADVANCE_RIP();
11510 IEM_MC_END();
11511 return VINF_SUCCESS;
11512
11513 case IEMMODE_32BIT:
11514 IEM_MC_BEGIN(0, 2);
11515 IEM_MC_LOCAL(uint32_t, u32Src);
11516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11519 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11520 IEM_MC_PUSH_U32(u32Src);
11521 IEM_MC_ADVANCE_RIP();
11522 IEM_MC_END();
11523 return VINF_SUCCESS;
11524
11525 case IEMMODE_64BIT:
11526 IEM_MC_BEGIN(0, 2);
11527 IEM_MC_LOCAL(uint64_t, u64Src);
11528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11531 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11532 IEM_MC_PUSH_U64(u64Src);
11533 IEM_MC_ADVANCE_RIP();
11534 IEM_MC_END();
11535 return VINF_SUCCESS;
11536
11537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11538 }
11539}
11540
11541
11542/**
11543 * @opcode 0xff
11544 */
11545FNIEMOP_DEF(iemOp_Grp5)
11546{
11547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11548 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11549 {
11550 case 0:
11551 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11552 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11553 case 1:
11554 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11555 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11556 case 2:
11557 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11558 case 3:
11559 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11560 case 4:
11561 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11562 case 5:
11563 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11564 case 6:
11565 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11566 case 7:
11567 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11568 return IEMOP_RAISE_INVALID_OPCODE();
11569 }
11570 AssertFailedReturn(VERR_IEM_IPE_3);
11571}
11572
11573
11574
11575const PFNIEMOP g_apfnOneByteMap[256] =
11576{
11577 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11578 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11579 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11580 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11581 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11582 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11583 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11584 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11585 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11586 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11587 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11588 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11589 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11590 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11591 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11592 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11593 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11594 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11595 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11596 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11597 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11598 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11599 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11600 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11601 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11602 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11603 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11604 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11605 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11606 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11607 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11608 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11609 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11610 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11611 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11612 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11613 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11614 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11615 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11616 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11617 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11618 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11619 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11620 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11621 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11622 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11623 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11624 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11625 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11626 /* 0xc4 */ iemOp_les_Gv_Mp__vex2, iemOp_lds_Gv_Mp__vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11627 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11628 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11629 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11630 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11631 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11632 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11633 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11634 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11635 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11636 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11637 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11638 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11639 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11640 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11641};
11642
11643
11644/** @} */
11645
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette