VirtualBox

source: vbox/trunk/src/recompiler_new/target-i386/translate.c@ 15991

Last change on this file since 15991 was 15982, checked in by vboxsync, 16 years ago

Set the limit to 0xffff when loading a segment register in real mode.

  • Property svn:eol-style set to native
File size: 275.7 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int pvi; /* CR4.PVI */
148 int record_call; /* record calls for CSAM or not? */
149#endif
150 int cpl;
151 int iopl;
152 int tf; /* TF cpu flag */
153 int singlestep_enabled; /* "hardware" single step enabled */
154 int jmp_opt; /* use direct block chaining for direct jumps */
155 int mem_index; /* select memory access functions */
156 uint64_t flags; /* all execution flags */
157 struct TranslationBlock *tb;
158 int popl_esp_hack; /* for correct popl with esp base handling */
159 int rip_offset; /* only used in x86_64, but left for simplicity */
160 int cpuid_features;
161 int cpuid_ext_features;
162 int cpuid_ext2_features;
163 int cpuid_ext3_features;
164} DisasContext;
165
166static void gen_eob(DisasContext *s);
167static void gen_jmp(DisasContext *s, target_ulong eip);
168static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
169
170#ifdef VBOX
171static void gen_check_external_event();
172#endif
173
174/* i386 arith/logic operations */
175enum {
176 OP_ADDL,
177 OP_ORL,
178 OP_ADCL,
179 OP_SBBL,
180 OP_ANDL,
181 OP_SUBL,
182 OP_XORL,
183 OP_CMPL,
184};
185
186/* i386 shift ops */
187enum {
188 OP_ROL,
189 OP_ROR,
190 OP_RCL,
191 OP_RCR,
192 OP_SHL,
193 OP_SHR,
194 OP_SHL1, /* undocumented */
195 OP_SAR = 7,
196};
197
198enum {
199 JCC_O,
200 JCC_B,
201 JCC_Z,
202 JCC_BE,
203 JCC_S,
204 JCC_P,
205 JCC_L,
206 JCC_LE,
207};
208
209/* operand size */
210enum {
211 OT_BYTE = 0,
212 OT_WORD,
213 OT_LONG,
214 OT_QUAD,
215};
216
217enum {
218 /* I386 int registers */
219 OR_EAX, /* MUST be even numbered */
220 OR_ECX,
221 OR_EDX,
222 OR_EBX,
223 OR_ESP,
224 OR_EBP,
225 OR_ESI,
226 OR_EDI,
227
228 OR_TMP0 = 16, /* temporary operand register */
229 OR_TMP1,
230 OR_A0, /* temporary register used when doing address evaluation */
231};
232
233#ifndef VBOX
234static inline void gen_op_movl_T0_0(void)
235#else /* VBOX */
236DECLINLINE(void) gen_op_movl_T0_0(void)
237#endif /* VBOX */
238{
239 tcg_gen_movi_tl(cpu_T[0], 0);
240}
241
242#ifndef VBOX
243static inline void gen_op_movl_T0_im(int32_t val)
244#else /* VBOX */
245DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
246#endif /* VBOX */
247{
248 tcg_gen_movi_tl(cpu_T[0], val);
249}
250
251#ifndef VBOX
252static inline void gen_op_movl_T0_imu(uint32_t val)
253#else /* VBOX */
254DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
255#endif /* VBOX */
256{
257 tcg_gen_movi_tl(cpu_T[0], val);
258}
259
260#ifndef VBOX
261static inline void gen_op_movl_T1_im(int32_t val)
262#else /* VBOX */
263DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
264#endif /* VBOX */
265{
266 tcg_gen_movi_tl(cpu_T[1], val);
267}
268
269#ifndef VBOX
270static inline void gen_op_movl_T1_imu(uint32_t val)
271#else /* VBOX */
272DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
273#endif /* VBOX */
274{
275 tcg_gen_movi_tl(cpu_T[1], val);
276}
277
278#ifndef VBOX
279static inline void gen_op_movl_A0_im(uint32_t val)
280#else /* VBOX */
281DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
282#endif /* VBOX */
283{
284 tcg_gen_movi_tl(cpu_A0, val);
285}
286
287#ifdef TARGET_X86_64
288#ifndef VBOX
289static inline void gen_op_movq_A0_im(int64_t val)
290#else /* VBOX */
291DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
292#endif /* VBOX */
293{
294 tcg_gen_movi_tl(cpu_A0, val);
295}
296#endif
297
298#ifndef VBOX
299static inline void gen_movtl_T0_im(target_ulong val)
300#else /* VBOX */
301DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
302#endif /* VBOX */
303{
304 tcg_gen_movi_tl(cpu_T[0], val);
305}
306
307#ifndef VBOX
308static inline void gen_movtl_T1_im(target_ulong val)
309#else /* VBOX */
310DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
311#endif /* VBOX */
312{
313 tcg_gen_movi_tl(cpu_T[1], val);
314}
315
316#ifndef VBOX
317static inline void gen_op_andl_T0_ffff(void)
318#else /* VBOX */
319DECLINLINE(void) gen_op_andl_T0_ffff(void)
320#endif /* VBOX */
321{
322 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
323}
324
325#ifndef VBOX
326static inline void gen_op_andl_T0_im(uint32_t val)
327#else /* VBOX */
328DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
329#endif /* VBOX */
330{
331 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
332}
333
334#ifndef VBOX
335static inline void gen_op_movl_T0_T1(void)
336#else /* VBOX */
337DECLINLINE(void) gen_op_movl_T0_T1(void)
338#endif /* VBOX */
339{
340 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
341}
342
343#ifndef VBOX
344static inline void gen_op_andl_A0_ffff(void)
345#else /* VBOX */
346DECLINLINE(void) gen_op_andl_A0_ffff(void)
347#endif /* VBOX */
348{
349 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
350}
351
352#ifdef TARGET_X86_64
353
354#define NB_OP_SIZES 4
355
356#else /* !TARGET_X86_64 */
357
358#define NB_OP_SIZES 3
359
360#endif /* !TARGET_X86_64 */
361
362#if defined(WORDS_BIGENDIAN)
363#define REG_B_OFFSET (sizeof(target_ulong) - 1)
364#define REG_H_OFFSET (sizeof(target_ulong) - 2)
365#define REG_W_OFFSET (sizeof(target_ulong) - 2)
366#define REG_L_OFFSET (sizeof(target_ulong) - 4)
367#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
368#else
369#define REG_B_OFFSET 0
370#define REG_H_OFFSET 1
371#define REG_W_OFFSET 0
372#define REG_L_OFFSET 0
373#define REG_LH_OFFSET 4
374#endif
375
376#ifndef VBOX
377static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
378#else /* VBOX */
379DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
380#endif /* VBOX */
381{
382 switch(ot) {
383 case OT_BYTE:
384 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
385 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
386 } else {
387 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
388 }
389 break;
390 case OT_WORD:
391 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
392 break;
393#ifdef TARGET_X86_64
394 case OT_LONG:
395 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
396 /* high part of register set to zero */
397 tcg_gen_movi_tl(cpu_tmp0, 0);
398 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
399 break;
400 default:
401 case OT_QUAD:
402 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
403 break;
404#else
405 default:
406 case OT_LONG:
407 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
408 break;
409#endif
410 }
411}
412
413#ifndef VBOX
414static inline void gen_op_mov_reg_T0(int ot, int reg)
415#else /* VBOX */
416DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
417#endif /* VBOX */
418{
419 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
420}
421
422#ifndef VBOX
423static inline void gen_op_mov_reg_T1(int ot, int reg)
424#else /* VBOX */
425DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
426#endif /* VBOX */
427{
428 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
429}
430
431#ifndef VBOX
432static inline void gen_op_mov_reg_A0(int size, int reg)
433#else /* VBOX */
434DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
435#endif /* VBOX */
436{
437 switch(size) {
438 case 0:
439 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
440 break;
441#ifdef TARGET_X86_64
442 case 1:
443 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
444 /* high part of register set to zero */
445 tcg_gen_movi_tl(cpu_tmp0, 0);
446 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
447 break;
448 default:
449 case 2:
450 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
451 break;
452#else
453 default:
454 case 1:
455 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
456 break;
457#endif
458 }
459}
460
461#ifndef VBOX
462static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
463#else /* VBOX */
464DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
465#endif /* VBOX */
466{
467 switch(ot) {
468 case OT_BYTE:
469 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
470#ifndef VBOX
471 goto std_case;
472#else
473 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
474#endif
475 } else {
476 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
477 }
478 break;
479 default:
480 std_case:
481 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
482 break;
483 }
484}
485
486#ifndef VBOX
487static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
488#else /* VBOX */
489DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
490#endif /* VBOX */
491{
492 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
493}
494
495#ifndef VBOX
496static inline void gen_op_movl_A0_reg(int reg)
497#else /* VBOX */
498DECLINLINE(void) gen_op_movl_A0_reg(int reg)
499#endif /* VBOX */
500{
501 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
502}
503
504#ifndef VBOX
505static inline void gen_op_addl_A0_im(int32_t val)
506#else /* VBOX */
507DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
508#endif /* VBOX */
509{
510 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
511#ifdef TARGET_X86_64
512 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
513#endif
514}
515
516#ifdef TARGET_X86_64
517#ifndef VBOX
518static inline void gen_op_addq_A0_im(int64_t val)
519#else /* VBOX */
520DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
521#endif /* VBOX */
522{
523 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
524}
525#endif
526
527static void gen_add_A0_im(DisasContext *s, int val)
528{
529#ifdef TARGET_X86_64
530 if (CODE64(s))
531 gen_op_addq_A0_im(val);
532 else
533#endif
534 gen_op_addl_A0_im(val);
535}
536
537#ifndef VBOX
538static inline void gen_op_addl_T0_T1(void)
539#else /* VBOX */
540DECLINLINE(void) gen_op_addl_T0_T1(void)
541#endif /* VBOX */
542{
543 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
544}
545
546#ifndef VBOX
547static inline void gen_op_jmp_T0(void)
548#else /* VBOX */
549DECLINLINE(void) gen_op_jmp_T0(void)
550#endif /* VBOX */
551{
552 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
553}
554
555#ifndef VBOX
556static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
557#else /* VBOX */
558DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
559#endif /* VBOX */
560{
561 switch(size) {
562 case 0:
563 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
564 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
565 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
566 break;
567 case 1:
568 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
569 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
570#ifdef TARGET_X86_64
571 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
572#endif
573 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
574 break;
575#ifdef TARGET_X86_64
576 case 2:
577 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
578 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
579 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
580 break;
581#endif
582 }
583}
584
585#ifndef VBOX
586static inline void gen_op_add_reg_T0(int size, int reg)
587#else /* VBOX */
588DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
589#endif /* VBOX */
590{
591 switch(size) {
592 case 0:
593 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
594 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
595 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
596 break;
597 case 1:
598 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
599 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
600#ifdef TARGET_X86_64
601 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
602#endif
603 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
604 break;
605#ifdef TARGET_X86_64
606 case 2:
607 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
608 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
609 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
610 break;
611#endif
612 }
613}
614
615#ifndef VBOX
616static inline void gen_op_set_cc_op(int32_t val)
617#else /* VBOX */
618DECLINLINE(void) gen_op_set_cc_op(int32_t val)
619#endif /* VBOX */
620{
621 tcg_gen_movi_i32(cpu_cc_op, val);
622}
623
624#ifndef VBOX
625static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
626#else /* VBOX */
627DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
628#endif /* VBOX */
629{
630 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
631 if (shift != 0)
632 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
633 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
634#ifdef TARGET_X86_64
635 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
636#endif
637}
638#ifdef VBOX
639DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0)
640{
641 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */
642#if 0
643 /* Our segments could be outdated, thus check for newselector field to see if update really needed */
644 int skip_label;
645 TCGv t0, a0;
646
647 /* For other segments this check is waste of time, and also TCG is unable to cope with this code,
648 for data/stack segments, as expects alive cpu_T[0] */
649 if (reg != R_GS)
650 return;
651
652 if (keepA0)
653 {
654 /* we need to store old cpu_A0 */
655 a0 = tcg_temp_local_new(TCG_TYPE_TL);
656 tcg_gen_mov_tl(a0, cpu_A0);
657 }
658
659 skip_label = gen_new_label();
660 t0 = tcg_temp_local_new(TCG_TYPE_TL);
661
662 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, segs[reg].newselector) + REG_L_OFFSET);
663 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
664 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, eflags) + REG_L_OFFSET);
665 tcg_gen_andi_tl(t0, t0, VM_MASK);
666 tcg_gen_brcondi_i32(TCG_COND_NE, t0, 0, skip_label);
667 tcg_gen_movi_tl(t0, reg);
668
669 tcg_gen_helper_0_1(helper_sync_seg, t0);
670
671 tcg_temp_free(t0);
672
673 gen_set_label(skip_label);
674 if (keepA0)
675 {
676 tcg_gen_mov_tl(cpu_A0, a0);
677 tcg_temp_free(a0);
678 }
679#endif /* 0 */
680}
681#endif
682
683#ifndef VBOX
684static inline void gen_op_movl_A0_seg(int reg)
685#else /* VBOX */
686DECLINLINE(void) gen_op_movl_A0_seg(int reg)
687#endif /* VBOX */
688{
689#ifdef VBOX
690 gen_op_seg_check(reg, false);
691#endif
692 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
693}
694
695#ifndef VBOX
696static inline void gen_op_addl_A0_seg(int reg)
697#else /* VBOX */
698DECLINLINE(void) gen_op_addl_A0_seg(int reg)
699#endif /* VBOX */
700{
701#ifdef VBOX
702 gen_op_seg_check(reg, true);
703#endif
704 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
705 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
706#ifdef TARGET_X86_64
707 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
708#endif
709}
710
711#ifdef TARGET_X86_64
712#ifndef VBOX
713static inline void gen_op_movq_A0_seg(int reg)
714#else /* VBOX */
715DECLINLINE(void) gen_op_movq_A0_seg(int reg)
716#endif /* VBOX */
717{
718#ifdef VBOX
719 gen_op_seg_check(reg, false);
720#endif
721 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
722}
723
724#ifndef VBOX
725static inline void gen_op_addq_A0_seg(int reg)
726#else /* VBOX */
727DECLINLINE(void) gen_op_addq_A0_seg(int reg)
728#endif /* VBOX */
729{
730#ifdef VBOX
731 gen_op_seg_check(reg, true);
732#endif
733 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
734 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
735}
736
737#ifndef VBOX
738static inline void gen_op_movq_A0_reg(int reg)
739#else /* VBOX */
740DECLINLINE(void) gen_op_movq_A0_reg(int reg)
741#endif /* VBOX */
742{
743 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
744}
745
746#ifndef VBOX
747static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
748#else /* VBOX */
749DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
750#endif /* VBOX */
751{
752 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
753 if (shift != 0)
754 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
755 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
756}
757#endif
758
759#ifndef VBOX
760static inline void gen_op_lds_T0_A0(int idx)
761#else /* VBOX */
762DECLINLINE(void) gen_op_lds_T0_A0(int idx)
763#endif /* VBOX */
764{
765 int mem_index = (idx >> 2) - 1;
766 switch(idx & 3) {
767 case 0:
768 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
769 break;
770 case 1:
771 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
772 break;
773 default:
774 case 2:
775 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
776 break;
777 }
778}
779
780#ifndef VBOX
781static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
782#else /* VBOX */
783DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
784#endif /* VBOX */
785{
786 int mem_index = (idx >> 2) - 1;
787 switch(idx & 3) {
788 case 0:
789 tcg_gen_qemu_ld8u(t0, a0, mem_index);
790 break;
791 case 1:
792 tcg_gen_qemu_ld16u(t0, a0, mem_index);
793 break;
794 case 2:
795 tcg_gen_qemu_ld32u(t0, a0, mem_index);
796 break;
797 default:
798 case 3:
799 tcg_gen_qemu_ld64(t0, a0, mem_index);
800 break;
801 }
802}
803
804/* XXX: always use ldu or lds */
805#ifndef VBOX
806static inline void gen_op_ld_T0_A0(int idx)
807#else /* VBOX */
808DECLINLINE(void) gen_op_ld_T0_A0(int idx)
809#endif /* VBOX */
810{
811 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
812}
813
814#ifndef VBOX
815static inline void gen_op_ldu_T0_A0(int idx)
816#else /* VBOX */
817DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
818#endif /* VBOX */
819{
820 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
821}
822
823#ifndef VBOX
824static inline void gen_op_ld_T1_A0(int idx)
825#else /* VBOX */
826DECLINLINE(void) gen_op_ld_T1_A0(int idx)
827#endif /* VBOX */
828{
829 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
830}
831
832#ifndef VBOX
833static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
834#else /* VBOX */
835DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
836#endif /* VBOX */
837{
838 int mem_index = (idx >> 2) - 1;
839 switch(idx & 3) {
840 case 0:
841 tcg_gen_qemu_st8(t0, a0, mem_index);
842 break;
843 case 1:
844 tcg_gen_qemu_st16(t0, a0, mem_index);
845 break;
846 case 2:
847 tcg_gen_qemu_st32(t0, a0, mem_index);
848 break;
849 default:
850 case 3:
851 tcg_gen_qemu_st64(t0, a0, mem_index);
852 break;
853 }
854}
855
856#ifndef VBOX
857static inline void gen_op_st_T0_A0(int idx)
858#else /* VBOX */
859DECLINLINE(void) gen_op_st_T0_A0(int idx)
860#endif /* VBOX */
861{
862 gen_op_st_v(idx, cpu_T[0], cpu_A0);
863}
864
865#ifndef VBOX
866static inline void gen_op_st_T1_A0(int idx)
867#else /* VBOX */
868DECLINLINE(void) gen_op_st_T1_A0(int idx)
869#endif /* VBOX */
870{
871 gen_op_st_v(idx, cpu_T[1], cpu_A0);
872}
873
874#ifdef VBOX
875static void gen_check_external_event()
876{
877 int skip_label;
878 TCGv t0;
879
880 skip_label = gen_new_label();
881 t0 = tcg_temp_local_new(TCG_TYPE_TL);
882 /* t0 = cpu_tmp0; */
883
884 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
885 /* Keep in sync with helper_check_external_event() */
886 tcg_gen_andi_tl(t0, t0,
887 CPU_INTERRUPT_EXTERNAL_EXIT
888 | CPU_INTERRUPT_EXTERNAL_TIMER
889 | CPU_INTERRUPT_EXTERNAL_DMA
890 | CPU_INTERRUPT_EXTERNAL_HARD);
891 /** @todo: predict branch as taken */
892 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
893 tcg_temp_free(t0);
894
895 tcg_gen_helper_0_0(helper_check_external_event);
896
897 gen_set_label(skip_label);
898}
899
900static void gen_check_external_event2()
901{
902 tcg_gen_helper_0_0(helper_check_external_event);
903}
904
905#endif
906
907#ifndef VBOX
908static inline void gen_jmp_im(target_ulong pc)
909#else /* VBOX */
910DECLINLINE(void) gen_jmp_im(target_ulong pc)
911#endif /* VBOX */
912{
913 tcg_gen_movi_tl(cpu_tmp0, pc);
914 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
915}
916
917#ifdef VBOX
918DECLINLINE(void) gen_update_eip(target_ulong pc)
919{
920 gen_jmp_im(pc);
921#ifdef VBOX_DUMP_STATE
922 tcg_gen_helper_0_0(helper_dump_state);
923#endif
924}
925
926#endif
927
928#ifndef VBOX
929static inline void gen_string_movl_A0_ESI(DisasContext *s)
930#else /* VBOX */
931DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
932#endif /* VBOX */
933{
934 int override;
935
936 override = s->override;
937#ifdef TARGET_X86_64
938 if (s->aflag == 2) {
939 if (override >= 0) {
940 gen_op_movq_A0_seg(override);
941 gen_op_addq_A0_reg_sN(0, R_ESI);
942 } else {
943 gen_op_movq_A0_reg(R_ESI);
944 }
945 } else
946#endif
947 if (s->aflag) {
948 /* 32 bit address */
949 if (s->addseg && override < 0)
950 override = R_DS;
951 if (override >= 0) {
952 gen_op_movl_A0_seg(override);
953 gen_op_addl_A0_reg_sN(0, R_ESI);
954 } else {
955 gen_op_movl_A0_reg(R_ESI);
956 }
957 } else {
958 /* 16 address, always override */
959 if (override < 0)
960 override = R_DS;
961 gen_op_movl_A0_reg(R_ESI);
962 gen_op_andl_A0_ffff();
963 gen_op_addl_A0_seg(override);
964 }
965}
966
967#ifndef VBOX
968static inline void gen_string_movl_A0_EDI(DisasContext *s)
969#else /* VBOX */
970DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
971#endif /* VBOX */
972{
973#ifdef TARGET_X86_64
974 if (s->aflag == 2) {
975 gen_op_movq_A0_reg(R_EDI);
976 } else
977#endif
978 if (s->aflag) {
979 if (s->addseg) {
980 gen_op_movl_A0_seg(R_ES);
981 gen_op_addl_A0_reg_sN(0, R_EDI);
982 } else {
983 gen_op_movl_A0_reg(R_EDI);
984 }
985 } else {
986 gen_op_movl_A0_reg(R_EDI);
987 gen_op_andl_A0_ffff();
988 gen_op_addl_A0_seg(R_ES);
989 }
990}
991
992#ifndef VBOX
993static inline void gen_op_movl_T0_Dshift(int ot)
994#else /* VBOX */
995DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
996#endif /* VBOX */
997{
998 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
999 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
1000};
1001
1002static void gen_extu(int ot, TCGv reg)
1003{
1004 switch(ot) {
1005 case OT_BYTE:
1006 tcg_gen_ext8u_tl(reg, reg);
1007 break;
1008 case OT_WORD:
1009 tcg_gen_ext16u_tl(reg, reg);
1010 break;
1011 case OT_LONG:
1012 tcg_gen_ext32u_tl(reg, reg);
1013 break;
1014 default:
1015 break;
1016 }
1017}
1018
1019static void gen_exts(int ot, TCGv reg)
1020{
1021 switch(ot) {
1022 case OT_BYTE:
1023 tcg_gen_ext8s_tl(reg, reg);
1024 break;
1025 case OT_WORD:
1026 tcg_gen_ext16s_tl(reg, reg);
1027 break;
1028 case OT_LONG:
1029 tcg_gen_ext32s_tl(reg, reg);
1030 break;
1031 default:
1032 break;
1033 }
1034}
1035
1036#ifndef VBOX
1037static inline void gen_op_jnz_ecx(int size, int label1)
1038#else /* VBOX */
1039DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
1040#endif /* VBOX */
1041{
1042 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1043 gen_extu(size + 1, cpu_tmp0);
1044 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
1045}
1046
1047#ifndef VBOX
1048static inline void gen_op_jz_ecx(int size, int label1)
1049#else /* VBOX */
1050DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
1051#endif /* VBOX */
1052{
1053 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1054 gen_extu(size + 1, cpu_tmp0);
1055 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1056}
1057
1058static void *helper_in_func[3] = {
1059 helper_inb,
1060 helper_inw,
1061 helper_inl,
1062};
1063
1064static void *helper_out_func[3] = {
1065 helper_outb,
1066 helper_outw,
1067 helper_outl,
1068};
1069
1070static void *gen_check_io_func[3] = {
1071 helper_check_iob,
1072 helper_check_iow,
1073 helper_check_iol,
1074};
1075
1076static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1077 uint32_t svm_flags)
1078{
1079 int state_saved;
1080 target_ulong next_eip;
1081
1082 state_saved = 0;
1083 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1084 if (s->cc_op != CC_OP_DYNAMIC)
1085 gen_op_set_cc_op(s->cc_op);
1086 gen_jmp_im(cur_eip);
1087 state_saved = 1;
1088 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1089 tcg_gen_helper_0_1(gen_check_io_func[ot],
1090 cpu_tmp2_i32);
1091 }
1092 if(s->flags & HF_SVMI_MASK) {
1093 if (!state_saved) {
1094 if (s->cc_op != CC_OP_DYNAMIC)
1095 gen_op_set_cc_op(s->cc_op);
1096 gen_jmp_im(cur_eip);
1097 state_saved = 1;
1098 }
1099 svm_flags |= (1 << (4 + ot));
1100 next_eip = s->pc - s->cs_base;
1101 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1102 tcg_gen_helper_0_3(helper_svm_check_io,
1103 cpu_tmp2_i32,
1104 tcg_const_i32(svm_flags),
1105 tcg_const_i32(next_eip - cur_eip));
1106 }
1107}
1108
1109#ifndef VBOX
1110static inline void gen_movs(DisasContext *s, int ot)
1111#else /* VBOX */
1112DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1113#endif /* VBOX */
1114{
1115 gen_string_movl_A0_ESI(s);
1116 gen_op_ld_T0_A0(ot + s->mem_index);
1117 gen_string_movl_A0_EDI(s);
1118 gen_op_st_T0_A0(ot + s->mem_index);
1119 gen_op_movl_T0_Dshift(ot);
1120 gen_op_add_reg_T0(s->aflag, R_ESI);
1121 gen_op_add_reg_T0(s->aflag, R_EDI);
1122}
1123
1124#ifndef VBOX
1125static inline void gen_update_cc_op(DisasContext *s)
1126#else /* VBOX */
1127DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1128#endif /* VBOX */
1129{
1130 if (s->cc_op != CC_OP_DYNAMIC) {
1131 gen_op_set_cc_op(s->cc_op);
1132 s->cc_op = CC_OP_DYNAMIC;
1133 }
1134}
1135
1136static void gen_op_update1_cc(void)
1137{
1138 tcg_gen_discard_tl(cpu_cc_src);
1139 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1140}
1141
1142static void gen_op_update2_cc(void)
1143{
1144 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1145 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1146}
1147
1148#ifndef VBOX
1149static inline void gen_op_cmpl_T0_T1_cc(void)
1150#else /* VBOX */
1151DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1152#endif /* VBOX */
1153{
1154 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1155 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1156}
1157
1158#ifndef VBOX
1159static inline void gen_op_testl_T0_T1_cc(void)
1160#else /* VBOX */
1161DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1162#endif /* VBOX */
1163{
1164 tcg_gen_discard_tl(cpu_cc_src);
1165 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1166}
1167
1168static void gen_op_update_neg_cc(void)
1169{
1170 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1171 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1172}
1173
1174/* compute eflags.C to reg */
1175static void gen_compute_eflags_c(TCGv reg)
1176{
1177#if TCG_TARGET_REG_BITS == 32
1178 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1179 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1180 (long)cc_table + offsetof(CCTable, compute_c));
1181 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1182 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1183 1, &cpu_tmp2_i32, 0, NULL);
1184#else
1185 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1186 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1187 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1188 (long)cc_table + offsetof(CCTable, compute_c));
1189 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1190 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1191 1, &cpu_tmp2_i32, 0, NULL);
1192#endif
1193 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1194}
1195
1196/* compute all eflags to cc_src */
1197static void gen_compute_eflags(TCGv reg)
1198{
1199#if TCG_TARGET_REG_BITS == 32
1200 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1201 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1202 (long)cc_table + offsetof(CCTable, compute_all));
1203 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1204 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1205 1, &cpu_tmp2_i32, 0, NULL);
1206#else
1207 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1208 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1209 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1210 (long)cc_table + offsetof(CCTable, compute_all));
1211 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1212 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1213 1, &cpu_tmp2_i32, 0, NULL);
1214#endif
1215 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1216}
1217
1218#ifndef VBOX
1219static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1220#else /* VBOX */
1221DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1222#endif /* VBOX */
1223{
1224 if (s->cc_op != CC_OP_DYNAMIC)
1225 gen_op_set_cc_op(s->cc_op);
1226 switch(jcc_op) {
1227 case JCC_O:
1228 gen_compute_eflags(cpu_T[0]);
1229 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1230 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1231 break;
1232 case JCC_B:
1233 gen_compute_eflags_c(cpu_T[0]);
1234 break;
1235 case JCC_Z:
1236 gen_compute_eflags(cpu_T[0]);
1237 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1238 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1239 break;
1240 case JCC_BE:
1241 gen_compute_eflags(cpu_tmp0);
1242 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1243 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1244 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1245 break;
1246 case JCC_S:
1247 gen_compute_eflags(cpu_T[0]);
1248 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1249 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1250 break;
1251 case JCC_P:
1252 gen_compute_eflags(cpu_T[0]);
1253 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1254 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1255 break;
1256 case JCC_L:
1257 gen_compute_eflags(cpu_tmp0);
1258 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1259 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1260 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1261 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1262 break;
1263 default:
1264 case JCC_LE:
1265 gen_compute_eflags(cpu_tmp0);
1266 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1267 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1268 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1269 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1270 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1271 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1272 break;
1273 }
1274}
1275
1276/* return true if setcc_slow is not needed (WARNING: must be kept in
1277 sync with gen_jcc1) */
1278static int is_fast_jcc_case(DisasContext *s, int b)
1279{
1280 int jcc_op;
1281 jcc_op = (b >> 1) & 7;
1282 switch(s->cc_op) {
1283 /* we optimize the cmp/jcc case */
1284 case CC_OP_SUBB:
1285 case CC_OP_SUBW:
1286 case CC_OP_SUBL:
1287 case CC_OP_SUBQ:
1288 if (jcc_op == JCC_O || jcc_op == JCC_P)
1289 goto slow_jcc;
1290 break;
1291
1292 /* some jumps are easy to compute */
1293 case CC_OP_ADDB:
1294 case CC_OP_ADDW:
1295 case CC_OP_ADDL:
1296 case CC_OP_ADDQ:
1297
1298 case CC_OP_LOGICB:
1299 case CC_OP_LOGICW:
1300 case CC_OP_LOGICL:
1301 case CC_OP_LOGICQ:
1302
1303 case CC_OP_INCB:
1304 case CC_OP_INCW:
1305 case CC_OP_INCL:
1306 case CC_OP_INCQ:
1307
1308 case CC_OP_DECB:
1309 case CC_OP_DECW:
1310 case CC_OP_DECL:
1311 case CC_OP_DECQ:
1312
1313 case CC_OP_SHLB:
1314 case CC_OP_SHLW:
1315 case CC_OP_SHLL:
1316 case CC_OP_SHLQ:
1317 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1318 goto slow_jcc;
1319 break;
1320 default:
1321 slow_jcc:
1322 return 0;
1323 }
1324 return 1;
1325}
1326
1327/* generate a conditional jump to label 'l1' according to jump opcode
1328 value 'b'. In the fast case, T0 is guaranted not to be used. */
1329#ifndef VBOX
1330static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1331#else /* VBOX */
1332DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1333#endif /* VBOX */
1334{
1335 int inv, jcc_op, size, cond;
1336 TCGv t0;
1337
1338 inv = b & 1;
1339 jcc_op = (b >> 1) & 7;
1340
1341 switch(cc_op) {
1342 /* we optimize the cmp/jcc case */
1343 case CC_OP_SUBB:
1344 case CC_OP_SUBW:
1345 case CC_OP_SUBL:
1346 case CC_OP_SUBQ:
1347
1348 size = cc_op - CC_OP_SUBB;
1349 switch(jcc_op) {
1350 case JCC_Z:
1351 fast_jcc_z:
1352 switch(size) {
1353 case 0:
1354 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1355 t0 = cpu_tmp0;
1356 break;
1357 case 1:
1358 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1359 t0 = cpu_tmp0;
1360 break;
1361#ifdef TARGET_X86_64
1362 case 2:
1363 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1364 t0 = cpu_tmp0;
1365 break;
1366#endif
1367 default:
1368 t0 = cpu_cc_dst;
1369 break;
1370 }
1371 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1372 break;
1373 case JCC_S:
1374 fast_jcc_s:
1375 switch(size) {
1376 case 0:
1377 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1378 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1379 0, l1);
1380 break;
1381 case 1:
1382 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1383 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1384 0, l1);
1385 break;
1386#ifdef TARGET_X86_64
1387 case 2:
1388 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1389 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1390 0, l1);
1391 break;
1392#endif
1393 default:
1394 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1395 0, l1);
1396 break;
1397 }
1398 break;
1399
1400 case JCC_B:
1401 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1402 goto fast_jcc_b;
1403 case JCC_BE:
1404 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1405 fast_jcc_b:
1406 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1407 switch(size) {
1408 case 0:
1409 t0 = cpu_tmp0;
1410 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1411 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1412 break;
1413 case 1:
1414 t0 = cpu_tmp0;
1415 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1416 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1417 break;
1418#ifdef TARGET_X86_64
1419 case 2:
1420 t0 = cpu_tmp0;
1421 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1422 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1423 break;
1424#endif
1425 default:
1426 t0 = cpu_cc_src;
1427 break;
1428 }
1429 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1430 break;
1431
1432 case JCC_L:
1433 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1434 goto fast_jcc_l;
1435 case JCC_LE:
1436 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1437 fast_jcc_l:
1438 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1439 switch(size) {
1440 case 0:
1441 t0 = cpu_tmp0;
1442 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1443 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1444 break;
1445 case 1:
1446 t0 = cpu_tmp0;
1447 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1448 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1449 break;
1450#ifdef TARGET_X86_64
1451 case 2:
1452 t0 = cpu_tmp0;
1453 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1454 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1455 break;
1456#endif
1457 default:
1458 t0 = cpu_cc_src;
1459 break;
1460 }
1461 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1462 break;
1463
1464 default:
1465 goto slow_jcc;
1466 }
1467 break;
1468
1469 /* some jumps are easy to compute */
1470 case CC_OP_ADDB:
1471 case CC_OP_ADDW:
1472 case CC_OP_ADDL:
1473 case CC_OP_ADDQ:
1474
1475 case CC_OP_ADCB:
1476 case CC_OP_ADCW:
1477 case CC_OP_ADCL:
1478 case CC_OP_ADCQ:
1479
1480 case CC_OP_SBBB:
1481 case CC_OP_SBBW:
1482 case CC_OP_SBBL:
1483 case CC_OP_SBBQ:
1484
1485 case CC_OP_LOGICB:
1486 case CC_OP_LOGICW:
1487 case CC_OP_LOGICL:
1488 case CC_OP_LOGICQ:
1489
1490 case CC_OP_INCB:
1491 case CC_OP_INCW:
1492 case CC_OP_INCL:
1493 case CC_OP_INCQ:
1494
1495 case CC_OP_DECB:
1496 case CC_OP_DECW:
1497 case CC_OP_DECL:
1498 case CC_OP_DECQ:
1499
1500 case CC_OP_SHLB:
1501 case CC_OP_SHLW:
1502 case CC_OP_SHLL:
1503 case CC_OP_SHLQ:
1504
1505 case CC_OP_SARB:
1506 case CC_OP_SARW:
1507 case CC_OP_SARL:
1508 case CC_OP_SARQ:
1509 switch(jcc_op) {
1510 case JCC_Z:
1511 size = (cc_op - CC_OP_ADDB) & 3;
1512 goto fast_jcc_z;
1513 case JCC_S:
1514 size = (cc_op - CC_OP_ADDB) & 3;
1515 goto fast_jcc_s;
1516 default:
1517 goto slow_jcc;
1518 }
1519 break;
1520 default:
1521 slow_jcc:
1522 gen_setcc_slow_T0(s, jcc_op);
1523 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1524 cpu_T[0], 0, l1);
1525 break;
1526 }
1527}
1528
1529/* XXX: does not work with gdbstub "ice" single step - not a
1530 serious problem */
1531static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1532{
1533 int l1, l2;
1534
1535 l1 = gen_new_label();
1536 l2 = gen_new_label();
1537 gen_op_jnz_ecx(s->aflag, l1);
1538 gen_set_label(l2);
1539 gen_jmp_tb(s, next_eip, 1);
1540 gen_set_label(l1);
1541 return l2;
1542}
1543
1544#ifndef VBOX
1545static inline void gen_stos(DisasContext *s, int ot)
1546#else /* VBOX */
1547DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1548#endif /* VBOX */
1549{
1550 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1551 gen_string_movl_A0_EDI(s);
1552 gen_op_st_T0_A0(ot + s->mem_index);
1553 gen_op_movl_T0_Dshift(ot);
1554 gen_op_add_reg_T0(s->aflag, R_EDI);
1555}
1556
1557#ifndef VBOX
1558static inline void gen_lods(DisasContext *s, int ot)
1559#else /* VBOX */
1560DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1561#endif /* VBOX */
1562{
1563 gen_string_movl_A0_ESI(s);
1564 gen_op_ld_T0_A0(ot + s->mem_index);
1565 gen_op_mov_reg_T0(ot, R_EAX);
1566 gen_op_movl_T0_Dshift(ot);
1567 gen_op_add_reg_T0(s->aflag, R_ESI);
1568}
1569
1570#ifndef VBOX
1571static inline void gen_scas(DisasContext *s, int ot)
1572#else /* VBOX */
1573DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1574#endif /* VBOX */
1575{
1576 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1577 gen_string_movl_A0_EDI(s);
1578 gen_op_ld_T1_A0(ot + s->mem_index);
1579 gen_op_cmpl_T0_T1_cc();
1580 gen_op_movl_T0_Dshift(ot);
1581 gen_op_add_reg_T0(s->aflag, R_EDI);
1582}
1583
1584#ifndef VBOX
1585static inline void gen_cmps(DisasContext *s, int ot)
1586#else /* VBOX */
1587DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1588#endif /* VBOX */
1589{
1590 gen_string_movl_A0_ESI(s);
1591 gen_op_ld_T0_A0(ot + s->mem_index);
1592 gen_string_movl_A0_EDI(s);
1593 gen_op_ld_T1_A0(ot + s->mem_index);
1594 gen_op_cmpl_T0_T1_cc();
1595 gen_op_movl_T0_Dshift(ot);
1596 gen_op_add_reg_T0(s->aflag, R_ESI);
1597 gen_op_add_reg_T0(s->aflag, R_EDI);
1598}
1599
1600#ifndef VBOX
1601static inline void gen_ins(DisasContext *s, int ot)
1602#else /* VBOX */
1603DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1604#endif /* VBOX */
1605{
1606 if (use_icount)
1607 gen_io_start();
1608 gen_string_movl_A0_EDI(s);
1609 /* Note: we must do this dummy write first to be restartable in
1610 case of page fault. */
1611 gen_op_movl_T0_0();
1612 gen_op_st_T0_A0(ot + s->mem_index);
1613 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1614 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1615 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1616 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1617 gen_op_st_T0_A0(ot + s->mem_index);
1618 gen_op_movl_T0_Dshift(ot);
1619 gen_op_add_reg_T0(s->aflag, R_EDI);
1620 if (use_icount)
1621 gen_io_end();
1622}
1623
1624#ifndef VBOX
1625static inline void gen_outs(DisasContext *s, int ot)
1626#else /* VBOX */
1627DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1628#endif /* VBOX */
1629{
1630 if (use_icount)
1631 gen_io_start();
1632 gen_string_movl_A0_ESI(s);
1633 gen_op_ld_T0_A0(ot + s->mem_index);
1634
1635 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1636 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1637 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1638 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1639 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1640
1641 gen_op_movl_T0_Dshift(ot);
1642 gen_op_add_reg_T0(s->aflag, R_ESI);
1643 if (use_icount)
1644 gen_io_end();
1645}
1646
1647/* same method as Valgrind : we generate jumps to current or next
1648 instruction */
1649#ifndef VBOX
1650#define GEN_REPZ(op) \
1651static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1652 target_ulong cur_eip, target_ulong next_eip) \
1653{ \
1654 int l2; \
1655 gen_update_cc_op(s); \
1656 l2 = gen_jz_ecx_string(s, next_eip); \
1657 gen_ ## op(s, ot); \
1658 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1659 /* a loop would cause two single step exceptions if ECX = 1 \
1660 before rep string_insn */ \
1661 if (!s->jmp_opt) \
1662 gen_op_jz_ecx(s->aflag, l2); \
1663 gen_jmp(s, cur_eip); \
1664}
1665#else /* VBOX */
1666#define GEN_REPZ(op) \
1667DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1668 target_ulong cur_eip, target_ulong next_eip) \
1669{ \
1670 int l2; \
1671 gen_update_cc_op(s); \
1672 l2 = gen_jz_ecx_string(s, next_eip); \
1673 gen_ ## op(s, ot); \
1674 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1675 /* a loop would cause two single step exceptions if ECX = 1 \
1676 before rep string_insn */ \
1677 if (!s->jmp_opt) \
1678 gen_op_jz_ecx(s->aflag, l2); \
1679 gen_jmp(s, cur_eip); \
1680}
1681#endif /* VBOX */
1682
1683#ifndef VBOX
1684#define GEN_REPZ2(op) \
1685static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1686 target_ulong cur_eip, \
1687 target_ulong next_eip, \
1688 int nz) \
1689{ \
1690 int l2; \
1691 gen_update_cc_op(s); \
1692 l2 = gen_jz_ecx_string(s, next_eip); \
1693 gen_ ## op(s, ot); \
1694 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1695 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1696 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1697 if (!s->jmp_opt) \
1698 gen_op_jz_ecx(s->aflag, l2); \
1699 gen_jmp(s, cur_eip); \
1700}
1701#else /* VBOX */
1702#define GEN_REPZ2(op) \
1703DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1704 target_ulong cur_eip, \
1705 target_ulong next_eip, \
1706 int nz) \
1707{ \
1708 int l2;\
1709 gen_update_cc_op(s); \
1710 l2 = gen_jz_ecx_string(s, next_eip); \
1711 gen_ ## op(s, ot); \
1712 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1713 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1714 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1715 if (!s->jmp_opt) \
1716 gen_op_jz_ecx(s->aflag, l2); \
1717 gen_jmp(s, cur_eip); \
1718}
1719#endif /* VBOX */
1720
1721GEN_REPZ(movs)
1722GEN_REPZ(stos)
1723GEN_REPZ(lods)
1724GEN_REPZ(ins)
1725GEN_REPZ(outs)
1726GEN_REPZ2(scas)
1727GEN_REPZ2(cmps)
1728
1729static void *helper_fp_arith_ST0_FT0[8] = {
1730 helper_fadd_ST0_FT0,
1731 helper_fmul_ST0_FT0,
1732 helper_fcom_ST0_FT0,
1733 helper_fcom_ST0_FT0,
1734 helper_fsub_ST0_FT0,
1735 helper_fsubr_ST0_FT0,
1736 helper_fdiv_ST0_FT0,
1737 helper_fdivr_ST0_FT0,
1738};
1739
1740/* NOTE the exception in "r" op ordering */
1741static void *helper_fp_arith_STN_ST0[8] = {
1742 helper_fadd_STN_ST0,
1743 helper_fmul_STN_ST0,
1744 NULL,
1745 NULL,
1746 helper_fsubr_STN_ST0,
1747 helper_fsub_STN_ST0,
1748 helper_fdivr_STN_ST0,
1749 helper_fdiv_STN_ST0,
1750};
1751
1752/* if d == OR_TMP0, it means memory operand (address in A0) */
1753static void gen_op(DisasContext *s1, int op, int ot, int d)
1754{
1755 if (d != OR_TMP0) {
1756 gen_op_mov_TN_reg(ot, 0, d);
1757 } else {
1758 gen_op_ld_T0_A0(ot + s1->mem_index);
1759 }
1760 switch(op) {
1761 case OP_ADCL:
1762 if (s1->cc_op != CC_OP_DYNAMIC)
1763 gen_op_set_cc_op(s1->cc_op);
1764 gen_compute_eflags_c(cpu_tmp4);
1765 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1766 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1767 if (d != OR_TMP0)
1768 gen_op_mov_reg_T0(ot, d);
1769 else
1770 gen_op_st_T0_A0(ot + s1->mem_index);
1771 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1772 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1773 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1774 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1775 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1776 s1->cc_op = CC_OP_DYNAMIC;
1777 break;
1778 case OP_SBBL:
1779 if (s1->cc_op != CC_OP_DYNAMIC)
1780 gen_op_set_cc_op(s1->cc_op);
1781 gen_compute_eflags_c(cpu_tmp4);
1782 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1783 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1784 if (d != OR_TMP0)
1785 gen_op_mov_reg_T0(ot, d);
1786 else
1787 gen_op_st_T0_A0(ot + s1->mem_index);
1788 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1789 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1790 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1791 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1792 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1793 s1->cc_op = CC_OP_DYNAMIC;
1794 break;
1795 case OP_ADDL:
1796 gen_op_addl_T0_T1();
1797 if (d != OR_TMP0)
1798 gen_op_mov_reg_T0(ot, d);
1799 else
1800 gen_op_st_T0_A0(ot + s1->mem_index);
1801 gen_op_update2_cc();
1802 s1->cc_op = CC_OP_ADDB + ot;
1803 break;
1804 case OP_SUBL:
1805 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1806 if (d != OR_TMP0)
1807 gen_op_mov_reg_T0(ot, d);
1808 else
1809 gen_op_st_T0_A0(ot + s1->mem_index);
1810 gen_op_update2_cc();
1811 s1->cc_op = CC_OP_SUBB + ot;
1812 break;
1813 default:
1814 case OP_ANDL:
1815 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1816 if (d != OR_TMP0)
1817 gen_op_mov_reg_T0(ot, d);
1818 else
1819 gen_op_st_T0_A0(ot + s1->mem_index);
1820 gen_op_update1_cc();
1821 s1->cc_op = CC_OP_LOGICB + ot;
1822 break;
1823 case OP_ORL:
1824 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1825 if (d != OR_TMP0)
1826 gen_op_mov_reg_T0(ot, d);
1827 else
1828 gen_op_st_T0_A0(ot + s1->mem_index);
1829 gen_op_update1_cc();
1830 s1->cc_op = CC_OP_LOGICB + ot;
1831 break;
1832 case OP_XORL:
1833 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1834 if (d != OR_TMP0)
1835 gen_op_mov_reg_T0(ot, d);
1836 else
1837 gen_op_st_T0_A0(ot + s1->mem_index);
1838 gen_op_update1_cc();
1839 s1->cc_op = CC_OP_LOGICB + ot;
1840 break;
1841 case OP_CMPL:
1842 gen_op_cmpl_T0_T1_cc();
1843 s1->cc_op = CC_OP_SUBB + ot;
1844 break;
1845 }
1846}
1847
1848/* if d == OR_TMP0, it means memory operand (address in A0) */
1849static void gen_inc(DisasContext *s1, int ot, int d, int c)
1850{
1851 if (d != OR_TMP0)
1852 gen_op_mov_TN_reg(ot, 0, d);
1853 else
1854 gen_op_ld_T0_A0(ot + s1->mem_index);
1855 if (s1->cc_op != CC_OP_DYNAMIC)
1856 gen_op_set_cc_op(s1->cc_op);
1857 if (c > 0) {
1858 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1859 s1->cc_op = CC_OP_INCB + ot;
1860 } else {
1861 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1862 s1->cc_op = CC_OP_DECB + ot;
1863 }
1864 if (d != OR_TMP0)
1865 gen_op_mov_reg_T0(ot, d);
1866 else
1867 gen_op_st_T0_A0(ot + s1->mem_index);
1868 gen_compute_eflags_c(cpu_cc_src);
1869 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1870}
1871
1872static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1873 int is_right, int is_arith)
1874{
1875 target_ulong mask;
1876 int shift_label;
1877 TCGv t0, t1;
1878
1879 if (ot == OT_QUAD)
1880 mask = 0x3f;
1881 else
1882 mask = 0x1f;
1883
1884 /* load */
1885 if (op1 == OR_TMP0)
1886 gen_op_ld_T0_A0(ot + s->mem_index);
1887 else
1888 gen_op_mov_TN_reg(ot, 0, op1);
1889
1890 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1891
1892 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1893
1894 if (is_right) {
1895 if (is_arith) {
1896 gen_exts(ot, cpu_T[0]);
1897 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1898 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1899 } else {
1900 gen_extu(ot, cpu_T[0]);
1901 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1902 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1903 }
1904 } else {
1905 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1906 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1907 }
1908
1909 /* store */
1910 if (op1 == OR_TMP0)
1911 gen_op_st_T0_A0(ot + s->mem_index);
1912 else
1913 gen_op_mov_reg_T0(ot, op1);
1914
1915 /* update eflags if non zero shift */
1916 if (s->cc_op != CC_OP_DYNAMIC)
1917 gen_op_set_cc_op(s->cc_op);
1918
1919 /* XXX: inefficient */
1920 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1921 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1922
1923 tcg_gen_mov_tl(t0, cpu_T[0]);
1924 tcg_gen_mov_tl(t1, cpu_T3);
1925
1926 shift_label = gen_new_label();
1927 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1928
1929 tcg_gen_mov_tl(cpu_cc_src, t1);
1930 tcg_gen_mov_tl(cpu_cc_dst, t0);
1931 if (is_right)
1932 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1933 else
1934 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1935
1936 gen_set_label(shift_label);
1937 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1938
1939 tcg_temp_free(t0);
1940 tcg_temp_free(t1);
1941}
1942
1943static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1944 int is_right, int is_arith)
1945{
1946 int mask;
1947
1948 if (ot == OT_QUAD)
1949 mask = 0x3f;
1950 else
1951 mask = 0x1f;
1952
1953 /* load */
1954 if (op1 == OR_TMP0)
1955 gen_op_ld_T0_A0(ot + s->mem_index);
1956 else
1957 gen_op_mov_TN_reg(ot, 0, op1);
1958
1959 op2 &= mask;
1960 if (op2 != 0) {
1961 if (is_right) {
1962 if (is_arith) {
1963 gen_exts(ot, cpu_T[0]);
1964 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1965 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1966 } else {
1967 gen_extu(ot, cpu_T[0]);
1968 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1969 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1970 }
1971 } else {
1972 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1973 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1974 }
1975 }
1976
1977 /* store */
1978 if (op1 == OR_TMP0)
1979 gen_op_st_T0_A0(ot + s->mem_index);
1980 else
1981 gen_op_mov_reg_T0(ot, op1);
1982
1983 /* update eflags if non zero shift */
1984 if (op2 != 0) {
1985 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1986 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1987 if (is_right)
1988 s->cc_op = CC_OP_SARB + ot;
1989 else
1990 s->cc_op = CC_OP_SHLB + ot;
1991 }
1992}
1993
1994#ifndef VBOX
1995static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1996#else /* VBOX */
1997DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1998#endif /* VBOX */
1999{
2000 if (arg2 >= 0)
2001 tcg_gen_shli_tl(ret, arg1, arg2);
2002 else
2003 tcg_gen_shri_tl(ret, arg1, -arg2);
2004}
2005
2006/* XXX: add faster immediate case */
2007static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
2008 int is_right)
2009{
2010 target_ulong mask;
2011 int label1, label2, data_bits;
2012 TCGv t0, t1, t2, a0;
2013
2014 /* XXX: inefficient, but we must use local temps */
2015 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2016 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2017 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2018 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2019
2020 if (ot == OT_QUAD)
2021 mask = 0x3f;
2022 else
2023 mask = 0x1f;
2024
2025 /* load */
2026 if (op1 == OR_TMP0) {
2027 tcg_gen_mov_tl(a0, cpu_A0);
2028 gen_op_ld_v(ot + s->mem_index, t0, a0);
2029 } else {
2030 gen_op_mov_v_reg(ot, t0, op1);
2031 }
2032
2033 tcg_gen_mov_tl(t1, cpu_T[1]);
2034
2035 tcg_gen_andi_tl(t1, t1, mask);
2036
2037 /* Must test zero case to avoid using undefined behaviour in TCG
2038 shifts. */
2039 label1 = gen_new_label();
2040 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
2041
2042 if (ot <= OT_WORD)
2043 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
2044 else
2045 tcg_gen_mov_tl(cpu_tmp0, t1);
2046
2047 gen_extu(ot, t0);
2048 tcg_gen_mov_tl(t2, t0);
2049
2050 data_bits = 8 << ot;
2051 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
2052 fix TCG definition) */
2053 if (is_right) {
2054 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
2055 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2056 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
2057 } else {
2058 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
2059 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2060 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2061 }
2062 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2063
2064 gen_set_label(label1);
2065 /* store */
2066 if (op1 == OR_TMP0) {
2067 gen_op_st_v(ot + s->mem_index, t0, a0);
2068 } else {
2069 gen_op_mov_reg_v(ot, op1, t0);
2070 }
2071
2072 /* update eflags */
2073 if (s->cc_op != CC_OP_DYNAMIC)
2074 gen_op_set_cc_op(s->cc_op);
2075
2076 label2 = gen_new_label();
2077 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2078
2079 gen_compute_eflags(cpu_cc_src);
2080 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2081 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2082 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2083 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2084 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2085 if (is_right) {
2086 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2087 }
2088 tcg_gen_andi_tl(t0, t0, CC_C);
2089 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2090
2091 tcg_gen_discard_tl(cpu_cc_dst);
2092 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2093
2094 gen_set_label(label2);
2095 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2096
2097 tcg_temp_free(t0);
2098 tcg_temp_free(t1);
2099 tcg_temp_free(t2);
2100 tcg_temp_free(a0);
2101}
2102
2103static void *helper_rotc[8] = {
2104 helper_rclb,
2105 helper_rclw,
2106 helper_rcll,
2107 X86_64_ONLY(helper_rclq),
2108 helper_rcrb,
2109 helper_rcrw,
2110 helper_rcrl,
2111 X86_64_ONLY(helper_rcrq),
2112};
2113
2114/* XXX: add faster immediate = 1 case */
2115static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2116 int is_right)
2117{
2118 int label1;
2119
2120 if (s->cc_op != CC_OP_DYNAMIC)
2121 gen_op_set_cc_op(s->cc_op);
2122
2123 /* load */
2124 if (op1 == OR_TMP0)
2125 gen_op_ld_T0_A0(ot + s->mem_index);
2126 else
2127 gen_op_mov_TN_reg(ot, 0, op1);
2128
2129 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2130 cpu_T[0], cpu_T[0], cpu_T[1]);
2131 /* store */
2132 if (op1 == OR_TMP0)
2133 gen_op_st_T0_A0(ot + s->mem_index);
2134 else
2135 gen_op_mov_reg_T0(ot, op1);
2136
2137 /* update eflags */
2138 label1 = gen_new_label();
2139 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2140
2141 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2142 tcg_gen_discard_tl(cpu_cc_dst);
2143 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2144
2145 gen_set_label(label1);
2146 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2147}
2148
2149/* XXX: add faster immediate case */
2150static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2151 int is_right)
2152{
2153 int label1, label2, data_bits;
2154 target_ulong mask;
2155 TCGv t0, t1, t2, a0;
2156
2157 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2158 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2159 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2160 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2161
2162 if (ot == OT_QUAD)
2163 mask = 0x3f;
2164 else
2165 mask = 0x1f;
2166
2167 /* load */
2168 if (op1 == OR_TMP0) {
2169 tcg_gen_mov_tl(a0, cpu_A0);
2170 gen_op_ld_v(ot + s->mem_index, t0, a0);
2171 } else {
2172 gen_op_mov_v_reg(ot, t0, op1);
2173 }
2174
2175 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2176
2177 tcg_gen_mov_tl(t1, cpu_T[1]);
2178 tcg_gen_mov_tl(t2, cpu_T3);
2179
2180 /* Must test zero case to avoid using undefined behaviour in TCG
2181 shifts. */
2182 label1 = gen_new_label();
2183 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2184
2185 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2186 if (ot == OT_WORD) {
2187 /* Note: we implement the Intel behaviour for shift count > 16 */
2188 if (is_right) {
2189 tcg_gen_andi_tl(t0, t0, 0xffff);
2190 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2191 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2192 tcg_gen_ext32u_tl(t0, t0);
2193
2194 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2195
2196 /* only needed if count > 16, but a test would complicate */
2197 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2198 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2199
2200 tcg_gen_shr_tl(t0, t0, t2);
2201
2202 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2203 } else {
2204 /* XXX: not optimal */
2205 tcg_gen_andi_tl(t0, t0, 0xffff);
2206 tcg_gen_shli_tl(t1, t1, 16);
2207 tcg_gen_or_tl(t1, t1, t0);
2208 tcg_gen_ext32u_tl(t1, t1);
2209
2210 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2211 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2212 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2213 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2214
2215 tcg_gen_shl_tl(t0, t0, t2);
2216 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2217 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2218 tcg_gen_or_tl(t0, t0, t1);
2219 }
2220 } else {
2221 data_bits = 8 << ot;
2222 if (is_right) {
2223 if (ot == OT_LONG)
2224 tcg_gen_ext32u_tl(t0, t0);
2225
2226 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2227
2228 tcg_gen_shr_tl(t0, t0, t2);
2229 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2230 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2231 tcg_gen_or_tl(t0, t0, t1);
2232
2233 } else {
2234 if (ot == OT_LONG)
2235 tcg_gen_ext32u_tl(t1, t1);
2236
2237 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2238
2239 tcg_gen_shl_tl(t0, t0, t2);
2240 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2241 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2242 tcg_gen_or_tl(t0, t0, t1);
2243 }
2244 }
2245 tcg_gen_mov_tl(t1, cpu_tmp4);
2246
2247 gen_set_label(label1);
2248 /* store */
2249 if (op1 == OR_TMP0) {
2250 gen_op_st_v(ot + s->mem_index, t0, a0);
2251 } else {
2252 gen_op_mov_reg_v(ot, op1, t0);
2253 }
2254
2255 /* update eflags */
2256 if (s->cc_op != CC_OP_DYNAMIC)
2257 gen_op_set_cc_op(s->cc_op);
2258
2259 label2 = gen_new_label();
2260 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2261
2262 tcg_gen_mov_tl(cpu_cc_src, t1);
2263 tcg_gen_mov_tl(cpu_cc_dst, t0);
2264 if (is_right) {
2265 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2266 } else {
2267 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2268 }
2269 gen_set_label(label2);
2270 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2271
2272 tcg_temp_free(t0);
2273 tcg_temp_free(t1);
2274 tcg_temp_free(t2);
2275 tcg_temp_free(a0);
2276}
2277
2278static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2279{
2280 if (s != OR_TMP1)
2281 gen_op_mov_TN_reg(ot, 1, s);
2282 switch(op) {
2283 case OP_ROL:
2284 gen_rot_rm_T1(s1, ot, d, 0);
2285 break;
2286 case OP_ROR:
2287 gen_rot_rm_T1(s1, ot, d, 1);
2288 break;
2289 case OP_SHL:
2290 case OP_SHL1:
2291 gen_shift_rm_T1(s1, ot, d, 0, 0);
2292 break;
2293 case OP_SHR:
2294 gen_shift_rm_T1(s1, ot, d, 1, 0);
2295 break;
2296 case OP_SAR:
2297 gen_shift_rm_T1(s1, ot, d, 1, 1);
2298 break;
2299 case OP_RCL:
2300 gen_rotc_rm_T1(s1, ot, d, 0);
2301 break;
2302 case OP_RCR:
2303 gen_rotc_rm_T1(s1, ot, d, 1);
2304 break;
2305 }
2306}
2307
2308static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2309{
2310 switch(op) {
2311 case OP_SHL:
2312 case OP_SHL1:
2313 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2314 break;
2315 case OP_SHR:
2316 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2317 break;
2318 case OP_SAR:
2319 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2320 break;
2321 default:
2322 /* currently not optimized */
2323 gen_op_movl_T1_im(c);
2324 gen_shift(s1, op, ot, d, OR_TMP1);
2325 break;
2326 }
2327}
2328
2329static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2330{
2331 target_long disp;
2332 int havesib;
2333 int base;
2334 int index;
2335 int scale;
2336 int opreg;
2337 int mod, rm, code, override, must_add_seg;
2338
2339 override = s->override;
2340 must_add_seg = s->addseg;
2341 if (override >= 0)
2342 must_add_seg = 1;
2343 mod = (modrm >> 6) & 3;
2344 rm = modrm & 7;
2345
2346 if (s->aflag) {
2347
2348 havesib = 0;
2349 base = rm;
2350 index = 0;
2351 scale = 0;
2352
2353 if (base == 4) {
2354 havesib = 1;
2355 code = ldub_code(s->pc++);
2356 scale = (code >> 6) & 3;
2357 index = ((code >> 3) & 7) | REX_X(s);
2358 base = (code & 7);
2359 }
2360 base |= REX_B(s);
2361
2362 switch (mod) {
2363 case 0:
2364 if ((base & 7) == 5) {
2365 base = -1;
2366 disp = (int32_t)ldl_code(s->pc);
2367 s->pc += 4;
2368 if (CODE64(s) && !havesib) {
2369 disp += s->pc + s->rip_offset;
2370 }
2371 } else {
2372 disp = 0;
2373 }
2374 break;
2375 case 1:
2376 disp = (int8_t)ldub_code(s->pc++);
2377 break;
2378 default:
2379 case 2:
2380#ifdef VBOX
2381 disp = (int32_t)ldl_code(s->pc);
2382#else
2383 disp = ldl_code(s->pc);
2384#endif
2385 s->pc += 4;
2386 break;
2387 }
2388
2389 if (base >= 0) {
2390 /* for correct popl handling with esp */
2391 if (base == 4 && s->popl_esp_hack)
2392 disp += s->popl_esp_hack;
2393#ifdef TARGET_X86_64
2394 if (s->aflag == 2) {
2395 gen_op_movq_A0_reg(base);
2396 if (disp != 0) {
2397 gen_op_addq_A0_im(disp);
2398 }
2399 } else
2400#endif
2401 {
2402 gen_op_movl_A0_reg(base);
2403 if (disp != 0)
2404 gen_op_addl_A0_im(disp);
2405 }
2406 } else {
2407#ifdef TARGET_X86_64
2408 if (s->aflag == 2) {
2409 gen_op_movq_A0_im(disp);
2410 } else
2411#endif
2412 {
2413 gen_op_movl_A0_im(disp);
2414 }
2415 }
2416 /* XXX: index == 4 is always invalid */
2417 if (havesib && (index != 4 || scale != 0)) {
2418#ifdef TARGET_X86_64
2419 if (s->aflag == 2) {
2420 gen_op_addq_A0_reg_sN(scale, index);
2421 } else
2422#endif
2423 {
2424 gen_op_addl_A0_reg_sN(scale, index);
2425 }
2426 }
2427 if (must_add_seg) {
2428 if (override < 0) {
2429 if (base == R_EBP || base == R_ESP)
2430 override = R_SS;
2431 else
2432 override = R_DS;
2433 }
2434#ifdef TARGET_X86_64
2435 if (s->aflag == 2) {
2436 gen_op_addq_A0_seg(override);
2437 } else
2438#endif
2439 {
2440 gen_op_addl_A0_seg(override);
2441 }
2442 }
2443 } else {
2444 switch (mod) {
2445 case 0:
2446 if (rm == 6) {
2447 disp = lduw_code(s->pc);
2448 s->pc += 2;
2449 gen_op_movl_A0_im(disp);
2450 rm = 0; /* avoid SS override */
2451 goto no_rm;
2452 } else {
2453 disp = 0;
2454 }
2455 break;
2456 case 1:
2457 disp = (int8_t)ldub_code(s->pc++);
2458 break;
2459 default:
2460 case 2:
2461 disp = lduw_code(s->pc);
2462 s->pc += 2;
2463 break;
2464 }
2465 switch(rm) {
2466 case 0:
2467 gen_op_movl_A0_reg(R_EBX);
2468 gen_op_addl_A0_reg_sN(0, R_ESI);
2469 break;
2470 case 1:
2471 gen_op_movl_A0_reg(R_EBX);
2472 gen_op_addl_A0_reg_sN(0, R_EDI);
2473 break;
2474 case 2:
2475 gen_op_movl_A0_reg(R_EBP);
2476 gen_op_addl_A0_reg_sN(0, R_ESI);
2477 break;
2478 case 3:
2479 gen_op_movl_A0_reg(R_EBP);
2480 gen_op_addl_A0_reg_sN(0, R_EDI);
2481 break;
2482 case 4:
2483 gen_op_movl_A0_reg(R_ESI);
2484 break;
2485 case 5:
2486 gen_op_movl_A0_reg(R_EDI);
2487 break;
2488 case 6:
2489 gen_op_movl_A0_reg(R_EBP);
2490 break;
2491 default:
2492 case 7:
2493 gen_op_movl_A0_reg(R_EBX);
2494 break;
2495 }
2496 if (disp != 0)
2497 gen_op_addl_A0_im(disp);
2498 gen_op_andl_A0_ffff();
2499 no_rm:
2500 if (must_add_seg) {
2501 if (override < 0) {
2502 if (rm == 2 || rm == 3 || rm == 6)
2503 override = R_SS;
2504 else
2505 override = R_DS;
2506 }
2507 gen_op_addl_A0_seg(override);
2508 }
2509 }
2510
2511 opreg = OR_A0;
2512 disp = 0;
2513 *reg_ptr = opreg;
2514 *offset_ptr = disp;
2515}
2516
2517static void gen_nop_modrm(DisasContext *s, int modrm)
2518{
2519 int mod, rm, base, code;
2520
2521 mod = (modrm >> 6) & 3;
2522 if (mod == 3)
2523 return;
2524 rm = modrm & 7;
2525
2526 if (s->aflag) {
2527
2528 base = rm;
2529
2530 if (base == 4) {
2531 code = ldub_code(s->pc++);
2532 base = (code & 7);
2533 }
2534
2535 switch (mod) {
2536 case 0:
2537 if (base == 5) {
2538 s->pc += 4;
2539 }
2540 break;
2541 case 1:
2542 s->pc++;
2543 break;
2544 default:
2545 case 2:
2546 s->pc += 4;
2547 break;
2548 }
2549 } else {
2550 switch (mod) {
2551 case 0:
2552 if (rm == 6) {
2553 s->pc += 2;
2554 }
2555 break;
2556 case 1:
2557 s->pc++;
2558 break;
2559 default:
2560 case 2:
2561 s->pc += 2;
2562 break;
2563 }
2564 }
2565}
2566
2567/* used for LEA and MOV AX, mem */
2568static void gen_add_A0_ds_seg(DisasContext *s)
2569{
2570 int override, must_add_seg;
2571 must_add_seg = s->addseg;
2572 override = R_DS;
2573 if (s->override >= 0) {
2574 override = s->override;
2575 must_add_seg = 1;
2576 } else {
2577 override = R_DS;
2578 }
2579 if (must_add_seg) {
2580#ifdef TARGET_X86_64
2581 if (CODE64(s)) {
2582 gen_op_addq_A0_seg(override);
2583 } else
2584#endif
2585 {
2586 gen_op_addl_A0_seg(override);
2587 }
2588 }
2589}
2590
2591/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2592 OR_TMP0 */
2593static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2594{
2595 int mod, rm, opreg, disp;
2596
2597 mod = (modrm >> 6) & 3;
2598 rm = (modrm & 7) | REX_B(s);
2599 if (mod == 3) {
2600 if (is_store) {
2601 if (reg != OR_TMP0)
2602 gen_op_mov_TN_reg(ot, 0, reg);
2603 gen_op_mov_reg_T0(ot, rm);
2604 } else {
2605 gen_op_mov_TN_reg(ot, 0, rm);
2606 if (reg != OR_TMP0)
2607 gen_op_mov_reg_T0(ot, reg);
2608 }
2609 } else {
2610 gen_lea_modrm(s, modrm, &opreg, &disp);
2611 if (is_store) {
2612 if (reg != OR_TMP0)
2613 gen_op_mov_TN_reg(ot, 0, reg);
2614 gen_op_st_T0_A0(ot + s->mem_index);
2615 } else {
2616 gen_op_ld_T0_A0(ot + s->mem_index);
2617 if (reg != OR_TMP0)
2618 gen_op_mov_reg_T0(ot, reg);
2619 }
2620 }
2621}
2622
2623#ifndef VBOX
2624static inline uint32_t insn_get(DisasContext *s, int ot)
2625#else /* VBOX */
2626DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2627#endif /* VBOX */
2628{
2629 uint32_t ret;
2630
2631 switch(ot) {
2632 case OT_BYTE:
2633 ret = ldub_code(s->pc);
2634 s->pc++;
2635 break;
2636 case OT_WORD:
2637 ret = lduw_code(s->pc);
2638 s->pc += 2;
2639 break;
2640 default:
2641 case OT_LONG:
2642 ret = ldl_code(s->pc);
2643 s->pc += 4;
2644 break;
2645 }
2646 return ret;
2647}
2648
2649#ifndef VBOX
2650static inline int insn_const_size(unsigned int ot)
2651#else /* VBOX */
2652DECLINLINE(int) insn_const_size(unsigned int ot)
2653#endif /* VBOX */
2654{
2655 if (ot <= OT_LONG)
2656 return 1 << ot;
2657 else
2658 return 4;
2659}
2660
2661#ifndef VBOX
2662static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2663#else /* VBOX */
2664DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2665#endif /* VBOX */
2666{
2667 TranslationBlock *tb;
2668 target_ulong pc;
2669
2670 pc = s->cs_base + eip;
2671 tb = s->tb;
2672 /* NOTE: we handle the case where the TB spans two pages here */
2673 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2674 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2675#ifdef VBOX
2676 gen_check_external_event(s);
2677#endif /* VBOX */
2678 /* jump to same page: we can use a direct jump */
2679 tcg_gen_goto_tb(tb_num);
2680 gen_jmp_im(eip);
2681 tcg_gen_exit_tb((long)tb + tb_num);
2682 } else {
2683 /* jump to another page: currently not optimized */
2684 gen_jmp_im(eip);
2685 gen_eob(s);
2686 }
2687}
2688
2689#ifndef VBOX
2690static inline void gen_jcc(DisasContext *s, int b,
2691#else /* VBOX */
2692DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2693#endif /* VBOX */
2694 target_ulong val, target_ulong next_eip)
2695{
2696 int l1, l2, cc_op;
2697
2698 cc_op = s->cc_op;
2699 if (s->cc_op != CC_OP_DYNAMIC) {
2700 gen_op_set_cc_op(s->cc_op);
2701 s->cc_op = CC_OP_DYNAMIC;
2702 }
2703 if (s->jmp_opt) {
2704 l1 = gen_new_label();
2705 gen_jcc1(s, cc_op, b, l1);
2706
2707 gen_goto_tb(s, 0, next_eip);
2708
2709 gen_set_label(l1);
2710 gen_goto_tb(s, 1, val);
2711 s->is_jmp = 3;
2712 } else {
2713
2714 l1 = gen_new_label();
2715 l2 = gen_new_label();
2716 gen_jcc1(s, cc_op, b, l1);
2717
2718 gen_jmp_im(next_eip);
2719 tcg_gen_br(l2);
2720
2721 gen_set_label(l1);
2722 gen_jmp_im(val);
2723 gen_set_label(l2);
2724 gen_eob(s);
2725 }
2726}
2727
2728static void gen_setcc(DisasContext *s, int b)
2729{
2730 int inv, jcc_op, l1;
2731 TCGv t0;
2732
2733 if (is_fast_jcc_case(s, b)) {
2734 /* nominal case: we use a jump */
2735 /* XXX: make it faster by adding new instructions in TCG */
2736 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2737 tcg_gen_movi_tl(t0, 0);
2738 l1 = gen_new_label();
2739 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2740 tcg_gen_movi_tl(t0, 1);
2741 gen_set_label(l1);
2742 tcg_gen_mov_tl(cpu_T[0], t0);
2743 tcg_temp_free(t0);
2744 } else {
2745 /* slow case: it is more efficient not to generate a jump,
2746 although it is questionnable whether this optimization is
2747 worth to */
2748 inv = b & 1;
2749 jcc_op = (b >> 1) & 7;
2750 gen_setcc_slow_T0(s, jcc_op);
2751 if (inv) {
2752 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2753 }
2754 }
2755}
2756
2757#ifndef VBOX
2758static inline void gen_op_movl_T0_seg(int seg_reg)
2759#else /* VBOX */
2760DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2761#endif /* VBOX */
2762{
2763 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2764 offsetof(CPUX86State,segs[seg_reg].selector));
2765}
2766
2767#ifndef VBOX
2768static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2769#else /* VBOX */
2770DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2771#endif /* VBOX */
2772{
2773 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2774 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2775 offsetof(CPUX86State,segs[seg_reg].selector));
2776 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2777 tcg_gen_st_tl(cpu_T[0], cpu_env,
2778 offsetof(CPUX86State,segs[seg_reg].base));
2779#ifdef VBOX
2780 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2781 if (seg_reg == R_CS)
2782 flags |= DESC_CS_MASK;
2783 gen_op_movl_T0_im(flags);
2784 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2785
2786 /* Set the limit to 0xffff. */
2787 gen_op_movl_T0_im(0xffff);
2788 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].limit));
2789#endif
2790}
2791
2792/* move T0 to seg_reg and compute if the CPU state may change. Never
2793 call this function with seg_reg == R_CS */
2794static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2795{
2796 if (s->pe && !s->vm86) {
2797 /* XXX: optimize by finding processor state dynamically */
2798 if (s->cc_op != CC_OP_DYNAMIC)
2799 gen_op_set_cc_op(s->cc_op);
2800 gen_jmp_im(cur_eip);
2801 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2802 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2803 /* abort translation because the addseg value may change or
2804 because ss32 may change. For R_SS, translation must always
2805 stop as a special handling must be done to disable hardware
2806 interrupts for the next instruction */
2807 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2808 s->is_jmp = 3;
2809 } else {
2810 gen_op_movl_seg_T0_vm(seg_reg);
2811 if (seg_reg == R_SS)
2812 s->is_jmp = 3;
2813 }
2814}
2815
2816#ifndef VBOX
2817static inline int svm_is_rep(int prefixes)
2818#else /* VBOX */
2819DECLINLINE(int) svm_is_rep(int prefixes)
2820#endif /* VBOX */
2821{
2822 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2823}
2824
2825#ifndef VBOX
2826static inline void
2827#else /* VBOX */
2828DECLINLINE(void)
2829#endif /* VBOX */
2830gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2831 uint32_t type, uint64_t param)
2832{
2833 /* no SVM activated; fast case */
2834 if (likely(!(s->flags & HF_SVMI_MASK)))
2835 return;
2836 if (s->cc_op != CC_OP_DYNAMIC)
2837 gen_op_set_cc_op(s->cc_op);
2838 gen_jmp_im(pc_start - s->cs_base);
2839 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2840 tcg_const_i32(type), tcg_const_i64(param));
2841}
2842
2843#ifndef VBOX
2844static inline void
2845#else /* VBOX */
2846DECLINLINE(void)
2847#endif
2848gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2849{
2850 gen_svm_check_intercept_param(s, pc_start, type, 0);
2851}
2852
2853#ifndef VBOX
2854static inline void gen_stack_update(DisasContext *s, int addend)
2855#else /* VBOX */
2856DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2857#endif /* VBOX */
2858{
2859#ifdef TARGET_X86_64
2860 if (CODE64(s)) {
2861 gen_op_add_reg_im(2, R_ESP, addend);
2862 } else
2863#endif
2864 if (s->ss32) {
2865 gen_op_add_reg_im(1, R_ESP, addend);
2866 } else {
2867 gen_op_add_reg_im(0, R_ESP, addend);
2868 }
2869}
2870
2871/* generate a push. It depends on ss32, addseg and dflag */
2872static void gen_push_T0(DisasContext *s)
2873{
2874#ifdef TARGET_X86_64
2875 if (CODE64(s)) {
2876 gen_op_movq_A0_reg(R_ESP);
2877 if (s->dflag) {
2878 gen_op_addq_A0_im(-8);
2879 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2880 } else {
2881 gen_op_addq_A0_im(-2);
2882 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2883 }
2884 gen_op_mov_reg_A0(2, R_ESP);
2885 } else
2886#endif
2887 {
2888 gen_op_movl_A0_reg(R_ESP);
2889 if (!s->dflag)
2890 gen_op_addl_A0_im(-2);
2891 else
2892 gen_op_addl_A0_im(-4);
2893 if (s->ss32) {
2894 if (s->addseg) {
2895 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2896 gen_op_addl_A0_seg(R_SS);
2897 }
2898 } else {
2899 gen_op_andl_A0_ffff();
2900 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2901 gen_op_addl_A0_seg(R_SS);
2902 }
2903 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2904 if (s->ss32 && !s->addseg)
2905 gen_op_mov_reg_A0(1, R_ESP);
2906 else
2907 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2908 }
2909}
2910
2911/* generate a push. It depends on ss32, addseg and dflag */
2912/* slower version for T1, only used for call Ev */
2913static void gen_push_T1(DisasContext *s)
2914{
2915#ifdef TARGET_X86_64
2916 if (CODE64(s)) {
2917 gen_op_movq_A0_reg(R_ESP);
2918 if (s->dflag) {
2919 gen_op_addq_A0_im(-8);
2920 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2921 } else {
2922 gen_op_addq_A0_im(-2);
2923 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2924 }
2925 gen_op_mov_reg_A0(2, R_ESP);
2926 } else
2927#endif
2928 {
2929 gen_op_movl_A0_reg(R_ESP);
2930 if (!s->dflag)
2931 gen_op_addl_A0_im(-2);
2932 else
2933 gen_op_addl_A0_im(-4);
2934 if (s->ss32) {
2935 if (s->addseg) {
2936 gen_op_addl_A0_seg(R_SS);
2937 }
2938 } else {
2939 gen_op_andl_A0_ffff();
2940 gen_op_addl_A0_seg(R_SS);
2941 }
2942 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2943
2944 if (s->ss32 && !s->addseg)
2945 gen_op_mov_reg_A0(1, R_ESP);
2946 else
2947 gen_stack_update(s, (-2) << s->dflag);
2948 }
2949}
2950
2951/* two step pop is necessary for precise exceptions */
2952static void gen_pop_T0(DisasContext *s)
2953{
2954#ifdef TARGET_X86_64
2955 if (CODE64(s)) {
2956 gen_op_movq_A0_reg(R_ESP);
2957 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2958 } else
2959#endif
2960 {
2961 gen_op_movl_A0_reg(R_ESP);
2962 if (s->ss32) {
2963 if (s->addseg)
2964 gen_op_addl_A0_seg(R_SS);
2965 } else {
2966 gen_op_andl_A0_ffff();
2967 gen_op_addl_A0_seg(R_SS);
2968 }
2969 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2970 }
2971}
2972
2973static void gen_pop_update(DisasContext *s)
2974{
2975#ifdef TARGET_X86_64
2976 if (CODE64(s) && s->dflag) {
2977 gen_stack_update(s, 8);
2978 } else
2979#endif
2980 {
2981 gen_stack_update(s, 2 << s->dflag);
2982 }
2983}
2984
2985static void gen_stack_A0(DisasContext *s)
2986{
2987 gen_op_movl_A0_reg(R_ESP);
2988 if (!s->ss32)
2989 gen_op_andl_A0_ffff();
2990 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2991 if (s->addseg)
2992 gen_op_addl_A0_seg(R_SS);
2993}
2994
2995/* NOTE: wrap around in 16 bit not fully handled */
2996static void gen_pusha(DisasContext *s)
2997{
2998 int i;
2999 gen_op_movl_A0_reg(R_ESP);
3000 gen_op_addl_A0_im(-16 << s->dflag);
3001 if (!s->ss32)
3002 gen_op_andl_A0_ffff();
3003 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3004 if (s->addseg)
3005 gen_op_addl_A0_seg(R_SS);
3006 for(i = 0;i < 8; i++) {
3007 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
3008 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
3009 gen_op_addl_A0_im(2 << s->dflag);
3010 }
3011 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3012}
3013
3014/* NOTE: wrap around in 16 bit not fully handled */
3015static void gen_popa(DisasContext *s)
3016{
3017 int i;
3018 gen_op_movl_A0_reg(R_ESP);
3019 if (!s->ss32)
3020 gen_op_andl_A0_ffff();
3021 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3022 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
3023 if (s->addseg)
3024 gen_op_addl_A0_seg(R_SS);
3025 for(i = 0;i < 8; i++) {
3026 /* ESP is not reloaded */
3027 if (i != 3) {
3028 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
3029 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
3030 }
3031 gen_op_addl_A0_im(2 << s->dflag);
3032 }
3033 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3034}
3035
3036static void gen_enter(DisasContext *s, int esp_addend, int level)
3037{
3038 int ot, opsize;
3039
3040 level &= 0x1f;
3041#ifdef TARGET_X86_64
3042 if (CODE64(s)) {
3043 ot = s->dflag ? OT_QUAD : OT_WORD;
3044 opsize = 1 << ot;
3045
3046 gen_op_movl_A0_reg(R_ESP);
3047 gen_op_addq_A0_im(-opsize);
3048 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3049
3050 /* push bp */
3051 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3052 gen_op_st_T0_A0(ot + s->mem_index);
3053 if (level) {
3054 /* XXX: must save state */
3055 tcg_gen_helper_0_3(helper_enter64_level,
3056 tcg_const_i32(level),
3057 tcg_const_i32((ot == OT_QUAD)),
3058 cpu_T[1]);
3059 }
3060 gen_op_mov_reg_T1(ot, R_EBP);
3061 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3062 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
3063 } else
3064#endif
3065 {
3066 ot = s->dflag + OT_WORD;
3067 opsize = 2 << s->dflag;
3068
3069 gen_op_movl_A0_reg(R_ESP);
3070 gen_op_addl_A0_im(-opsize);
3071 if (!s->ss32)
3072 gen_op_andl_A0_ffff();
3073 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3074 if (s->addseg)
3075 gen_op_addl_A0_seg(R_SS);
3076 /* push bp */
3077 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3078 gen_op_st_T0_A0(ot + s->mem_index);
3079 if (level) {
3080 /* XXX: must save state */
3081 tcg_gen_helper_0_3(helper_enter_level,
3082 tcg_const_i32(level),
3083 tcg_const_i32(s->dflag),
3084 cpu_T[1]);
3085 }
3086 gen_op_mov_reg_T1(ot, R_EBP);
3087 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3088 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3089 }
3090}
3091
3092static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3093{
3094 if (s->cc_op != CC_OP_DYNAMIC)
3095 gen_op_set_cc_op(s->cc_op);
3096 gen_jmp_im(cur_eip);
3097 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3098 s->is_jmp = 3;
3099}
3100
3101/* an interrupt is different from an exception because of the
3102 privilege checks */
3103static void gen_interrupt(DisasContext *s, int intno,
3104 target_ulong cur_eip, target_ulong next_eip)
3105{
3106 if (s->cc_op != CC_OP_DYNAMIC)
3107 gen_op_set_cc_op(s->cc_op);
3108 gen_jmp_im(cur_eip);
3109 tcg_gen_helper_0_2(helper_raise_interrupt,
3110 tcg_const_i32(intno),
3111 tcg_const_i32(next_eip - cur_eip));
3112 s->is_jmp = 3;
3113}
3114
3115static void gen_debug(DisasContext *s, target_ulong cur_eip)
3116{
3117 if (s->cc_op != CC_OP_DYNAMIC)
3118 gen_op_set_cc_op(s->cc_op);
3119 gen_jmp_im(cur_eip);
3120 tcg_gen_helper_0_0(helper_debug);
3121 s->is_jmp = 3;
3122}
3123
3124/* generate a generic end of block. Trace exception is also generated
3125 if needed */
3126static void gen_eob(DisasContext *s)
3127{
3128#ifdef VBOX
3129 gen_check_external_event(s);
3130#endif /* VBOX */
3131 if (s->cc_op != CC_OP_DYNAMIC)
3132 gen_op_set_cc_op(s->cc_op);
3133 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3134 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3135 }
3136 if (s->singlestep_enabled) {
3137 tcg_gen_helper_0_0(helper_debug);
3138 } else if (s->tf) {
3139 tcg_gen_helper_0_0(helper_single_step);
3140 } else {
3141 tcg_gen_exit_tb(0);
3142 }
3143 s->is_jmp = 3;
3144}
3145
3146/* generate a jump to eip. No segment change must happen before as a
3147 direct call to the next block may occur */
3148static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3149{
3150 if (s->jmp_opt) {
3151 if (s->cc_op != CC_OP_DYNAMIC) {
3152 gen_op_set_cc_op(s->cc_op);
3153 s->cc_op = CC_OP_DYNAMIC;
3154 }
3155 gen_goto_tb(s, tb_num, eip);
3156 s->is_jmp = 3;
3157 } else {
3158 gen_jmp_im(eip);
3159 gen_eob(s);
3160 }
3161}
3162
3163static void gen_jmp(DisasContext *s, target_ulong eip)
3164{
3165 gen_jmp_tb(s, eip, 0);
3166}
3167
3168#ifndef VBOX
3169static inline void gen_ldq_env_A0(int idx, int offset)
3170#else /* VBOX */
3171DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3172#endif /* VBOX */
3173{
3174 int mem_index = (idx >> 2) - 1;
3175 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3176 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3177}
3178
3179#ifndef VBOX
3180static inline void gen_stq_env_A0(int idx, int offset)
3181#else /* VBOX */
3182DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3183#endif /* VBOX */
3184{
3185 int mem_index = (idx >> 2) - 1;
3186 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3187 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3188}
3189
3190#ifndef VBOX
3191static inline void gen_ldo_env_A0(int idx, int offset)
3192#else /* VBOX */
3193DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3194#endif /* VBOX */
3195{
3196 int mem_index = (idx >> 2) - 1;
3197 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3198 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3199 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3200 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3201 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3202}
3203
3204#ifndef VBOX
3205static inline void gen_sto_env_A0(int idx, int offset)
3206#else /* VBOX */
3207DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3208#endif /* VBOX */
3209{
3210 int mem_index = (idx >> 2) - 1;
3211 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3212 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3213 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3214 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3215 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3216}
3217
3218#ifndef VBOX
3219static inline void gen_op_movo(int d_offset, int s_offset)
3220#else /* VBOX */
3221DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3222#endif /* VBOX */
3223{
3224 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3225 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3226 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3227 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3228}
3229
3230#ifndef VBOX
3231static inline void gen_op_movq(int d_offset, int s_offset)
3232#else /* VBOX */
3233DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3234#endif /* VBOX */
3235{
3236 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3237 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3238}
3239
3240#ifndef VBOX
3241static inline void gen_op_movl(int d_offset, int s_offset)
3242#else /* VBOX */
3243DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3244#endif /* VBOX */
3245{
3246 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3247 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3248}
3249
3250#ifndef VBOX
3251static inline void gen_op_movq_env_0(int d_offset)
3252#else /* VBOX */
3253DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3254#endif /* VBOX */
3255{
3256 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3257 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3258}
3259
3260#define SSE_SPECIAL ((void *)1)
3261#define SSE_DUMMY ((void *)2)
3262
3263#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3264#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3265 helper_ ## x ## ss, helper_ ## x ## sd, }
3266
3267static void *sse_op_table1[256][4] = {
3268 /* 3DNow! extensions */
3269 [0x0e] = { SSE_DUMMY }, /* femms */
3270 [0x0f] = { SSE_DUMMY }, /* pf... */
3271 /* pure SSE operations */
3272 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3273 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3274 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3275 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3276 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3277 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3278 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3279 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3280
3281 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3282 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3283 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3284 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3285 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3286 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3287 [0x2e] = { helper_ucomiss, helper_ucomisd },
3288 [0x2f] = { helper_comiss, helper_comisd },
3289 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3290 [0x51] = SSE_FOP(sqrt),
3291 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3292 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3293 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3294 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3295 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3296 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3297 [0x58] = SSE_FOP(add),
3298 [0x59] = SSE_FOP(mul),
3299 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3300 helper_cvtss2sd, helper_cvtsd2ss },
3301 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3302 [0x5c] = SSE_FOP(sub),
3303 [0x5d] = SSE_FOP(min),
3304 [0x5e] = SSE_FOP(div),
3305 [0x5f] = SSE_FOP(max),
3306
3307 [0xc2] = SSE_FOP(cmpeq),
3308 [0xc6] = { helper_shufps, helper_shufpd },
3309
3310 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3311 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3312
3313 /* MMX ops and their SSE extensions */
3314 [0x60] = MMX_OP2(punpcklbw),
3315 [0x61] = MMX_OP2(punpcklwd),
3316 [0x62] = MMX_OP2(punpckldq),
3317 [0x63] = MMX_OP2(packsswb),
3318 [0x64] = MMX_OP2(pcmpgtb),
3319 [0x65] = MMX_OP2(pcmpgtw),
3320 [0x66] = MMX_OP2(pcmpgtl),
3321 [0x67] = MMX_OP2(packuswb),
3322 [0x68] = MMX_OP2(punpckhbw),
3323 [0x69] = MMX_OP2(punpckhwd),
3324 [0x6a] = MMX_OP2(punpckhdq),
3325 [0x6b] = MMX_OP2(packssdw),
3326 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3327 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3328 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3329 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3330 [0x70] = { helper_pshufw_mmx,
3331 helper_pshufd_xmm,
3332 helper_pshufhw_xmm,
3333 helper_pshuflw_xmm },
3334 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3335 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3336 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3337 [0x74] = MMX_OP2(pcmpeqb),
3338 [0x75] = MMX_OP2(pcmpeqw),
3339 [0x76] = MMX_OP2(pcmpeql),
3340 [0x77] = { SSE_DUMMY }, /* emms */
3341 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3342 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3343 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3344 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3345 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3346 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3347 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3348 [0xd1] = MMX_OP2(psrlw),
3349 [0xd2] = MMX_OP2(psrld),
3350 [0xd3] = MMX_OP2(psrlq),
3351 [0xd4] = MMX_OP2(paddq),
3352 [0xd5] = MMX_OP2(pmullw),
3353 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3354 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3355 [0xd8] = MMX_OP2(psubusb),
3356 [0xd9] = MMX_OP2(psubusw),
3357 [0xda] = MMX_OP2(pminub),
3358 [0xdb] = MMX_OP2(pand),
3359 [0xdc] = MMX_OP2(paddusb),
3360 [0xdd] = MMX_OP2(paddusw),
3361 [0xde] = MMX_OP2(pmaxub),
3362 [0xdf] = MMX_OP2(pandn),
3363 [0xe0] = MMX_OP2(pavgb),
3364 [0xe1] = MMX_OP2(psraw),
3365 [0xe2] = MMX_OP2(psrad),
3366 [0xe3] = MMX_OP2(pavgw),
3367 [0xe4] = MMX_OP2(pmulhuw),
3368 [0xe5] = MMX_OP2(pmulhw),
3369 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3370 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3371 [0xe8] = MMX_OP2(psubsb),
3372 [0xe9] = MMX_OP2(psubsw),
3373 [0xea] = MMX_OP2(pminsw),
3374 [0xeb] = MMX_OP2(por),
3375 [0xec] = MMX_OP2(paddsb),
3376 [0xed] = MMX_OP2(paddsw),
3377 [0xee] = MMX_OP2(pmaxsw),
3378 [0xef] = MMX_OP2(pxor),
3379 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3380 [0xf1] = MMX_OP2(psllw),
3381 [0xf2] = MMX_OP2(pslld),
3382 [0xf3] = MMX_OP2(psllq),
3383 [0xf4] = MMX_OP2(pmuludq),
3384 [0xf5] = MMX_OP2(pmaddwd),
3385 [0xf6] = MMX_OP2(psadbw),
3386 [0xf7] = MMX_OP2(maskmov),
3387 [0xf8] = MMX_OP2(psubb),
3388 [0xf9] = MMX_OP2(psubw),
3389 [0xfa] = MMX_OP2(psubl),
3390 [0xfb] = MMX_OP2(psubq),
3391 [0xfc] = MMX_OP2(paddb),
3392 [0xfd] = MMX_OP2(paddw),
3393 [0xfe] = MMX_OP2(paddl),
3394};
3395
3396static void *sse_op_table2[3 * 8][2] = {
3397 [0 + 2] = MMX_OP2(psrlw),
3398 [0 + 4] = MMX_OP2(psraw),
3399 [0 + 6] = MMX_OP2(psllw),
3400 [8 + 2] = MMX_OP2(psrld),
3401 [8 + 4] = MMX_OP2(psrad),
3402 [8 + 6] = MMX_OP2(pslld),
3403 [16 + 2] = MMX_OP2(psrlq),
3404 [16 + 3] = { NULL, helper_psrldq_xmm },
3405 [16 + 6] = MMX_OP2(psllq),
3406 [16 + 7] = { NULL, helper_pslldq_xmm },
3407};
3408
3409static void *sse_op_table3[4 * 3] = {
3410 helper_cvtsi2ss,
3411 helper_cvtsi2sd,
3412 X86_64_ONLY(helper_cvtsq2ss),
3413 X86_64_ONLY(helper_cvtsq2sd),
3414
3415 helper_cvttss2si,
3416 helper_cvttsd2si,
3417 X86_64_ONLY(helper_cvttss2sq),
3418 X86_64_ONLY(helper_cvttsd2sq),
3419
3420 helper_cvtss2si,
3421 helper_cvtsd2si,
3422 X86_64_ONLY(helper_cvtss2sq),
3423 X86_64_ONLY(helper_cvtsd2sq),
3424};
3425
3426static void *sse_op_table4[8][4] = {
3427 SSE_FOP(cmpeq),
3428 SSE_FOP(cmplt),
3429 SSE_FOP(cmple),
3430 SSE_FOP(cmpunord),
3431 SSE_FOP(cmpneq),
3432 SSE_FOP(cmpnlt),
3433 SSE_FOP(cmpnle),
3434 SSE_FOP(cmpord),
3435};
3436
3437static void *sse_op_table5[256] = {
3438 [0x0c] = helper_pi2fw,
3439 [0x0d] = helper_pi2fd,
3440 [0x1c] = helper_pf2iw,
3441 [0x1d] = helper_pf2id,
3442 [0x8a] = helper_pfnacc,
3443 [0x8e] = helper_pfpnacc,
3444 [0x90] = helper_pfcmpge,
3445 [0x94] = helper_pfmin,
3446 [0x96] = helper_pfrcp,
3447 [0x97] = helper_pfrsqrt,
3448 [0x9a] = helper_pfsub,
3449 [0x9e] = helper_pfadd,
3450 [0xa0] = helper_pfcmpgt,
3451 [0xa4] = helper_pfmax,
3452 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3453 [0xa7] = helper_movq, /* pfrsqit1 */
3454 [0xaa] = helper_pfsubr,
3455 [0xae] = helper_pfacc,
3456 [0xb0] = helper_pfcmpeq,
3457 [0xb4] = helper_pfmul,
3458 [0xb6] = helper_movq, /* pfrcpit2 */
3459 [0xb7] = helper_pmulhrw_mmx,
3460 [0xbb] = helper_pswapd,
3461 [0xbf] = helper_pavgb_mmx /* pavgusb */
3462};
3463
3464struct sse_op_helper_s {
3465 void *op[2]; uint32_t ext_mask;
3466};
3467#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3468#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3469#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3470#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3471static struct sse_op_helper_s sse_op_table6[256] = {
3472 [0x00] = SSSE3_OP(pshufb),
3473 [0x01] = SSSE3_OP(phaddw),
3474 [0x02] = SSSE3_OP(phaddd),
3475 [0x03] = SSSE3_OP(phaddsw),
3476 [0x04] = SSSE3_OP(pmaddubsw),
3477 [0x05] = SSSE3_OP(phsubw),
3478 [0x06] = SSSE3_OP(phsubd),
3479 [0x07] = SSSE3_OP(phsubsw),
3480 [0x08] = SSSE3_OP(psignb),
3481 [0x09] = SSSE3_OP(psignw),
3482 [0x0a] = SSSE3_OP(psignd),
3483 [0x0b] = SSSE3_OP(pmulhrsw),
3484 [0x10] = SSE41_OP(pblendvb),
3485 [0x14] = SSE41_OP(blendvps),
3486 [0x15] = SSE41_OP(blendvpd),
3487 [0x17] = SSE41_OP(ptest),
3488 [0x1c] = SSSE3_OP(pabsb),
3489 [0x1d] = SSSE3_OP(pabsw),
3490 [0x1e] = SSSE3_OP(pabsd),
3491 [0x20] = SSE41_OP(pmovsxbw),
3492 [0x21] = SSE41_OP(pmovsxbd),
3493 [0x22] = SSE41_OP(pmovsxbq),
3494 [0x23] = SSE41_OP(pmovsxwd),
3495 [0x24] = SSE41_OP(pmovsxwq),
3496 [0x25] = SSE41_OP(pmovsxdq),
3497 [0x28] = SSE41_OP(pmuldq),
3498 [0x29] = SSE41_OP(pcmpeqq),
3499 [0x2a] = SSE41_SPECIAL, /* movntqda */
3500 [0x2b] = SSE41_OP(packusdw),
3501 [0x30] = SSE41_OP(pmovzxbw),
3502 [0x31] = SSE41_OP(pmovzxbd),
3503 [0x32] = SSE41_OP(pmovzxbq),
3504 [0x33] = SSE41_OP(pmovzxwd),
3505 [0x34] = SSE41_OP(pmovzxwq),
3506 [0x35] = SSE41_OP(pmovzxdq),
3507 [0x37] = SSE42_OP(pcmpgtq),
3508 [0x38] = SSE41_OP(pminsb),
3509 [0x39] = SSE41_OP(pminsd),
3510 [0x3a] = SSE41_OP(pminuw),
3511 [0x3b] = SSE41_OP(pminud),
3512 [0x3c] = SSE41_OP(pmaxsb),
3513 [0x3d] = SSE41_OP(pmaxsd),
3514 [0x3e] = SSE41_OP(pmaxuw),
3515 [0x3f] = SSE41_OP(pmaxud),
3516 [0x40] = SSE41_OP(pmulld),
3517 [0x41] = SSE41_OP(phminposuw),
3518};
3519
3520static struct sse_op_helper_s sse_op_table7[256] = {
3521 [0x08] = SSE41_OP(roundps),
3522 [0x09] = SSE41_OP(roundpd),
3523 [0x0a] = SSE41_OP(roundss),
3524 [0x0b] = SSE41_OP(roundsd),
3525 [0x0c] = SSE41_OP(blendps),
3526 [0x0d] = SSE41_OP(blendpd),
3527 [0x0e] = SSE41_OP(pblendw),
3528 [0x0f] = SSSE3_OP(palignr),
3529 [0x14] = SSE41_SPECIAL, /* pextrb */
3530 [0x15] = SSE41_SPECIAL, /* pextrw */
3531 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3532 [0x17] = SSE41_SPECIAL, /* extractps */
3533 [0x20] = SSE41_SPECIAL, /* pinsrb */
3534 [0x21] = SSE41_SPECIAL, /* insertps */
3535 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3536 [0x40] = SSE41_OP(dpps),
3537 [0x41] = SSE41_OP(dppd),
3538 [0x42] = SSE41_OP(mpsadbw),
3539 [0x60] = SSE42_OP(pcmpestrm),
3540 [0x61] = SSE42_OP(pcmpestri),
3541 [0x62] = SSE42_OP(pcmpistrm),
3542 [0x63] = SSE42_OP(pcmpistri),
3543};
3544
3545static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3546{
3547 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3548 int modrm, mod, rm, reg, reg_addr, offset_addr;
3549 void *sse_op2;
3550
3551 b &= 0xff;
3552 if (s->prefix & PREFIX_DATA)
3553 b1 = 1;
3554 else if (s->prefix & PREFIX_REPZ)
3555 b1 = 2;
3556 else if (s->prefix & PREFIX_REPNZ)
3557 b1 = 3;
3558 else
3559 b1 = 0;
3560 sse_op2 = sse_op_table1[b][b1];
3561 if (!sse_op2)
3562 goto illegal_op;
3563 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3564 is_xmm = 1;
3565 } else {
3566 if (b1 == 0) {
3567 /* MMX case */
3568 is_xmm = 0;
3569 } else {
3570 is_xmm = 1;
3571 }
3572 }
3573 /* simple MMX/SSE operation */
3574 if (s->flags & HF_TS_MASK) {
3575 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3576 return;
3577 }
3578 if (s->flags & HF_EM_MASK) {
3579 illegal_op:
3580 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3581 return;
3582 }
3583 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3584 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3585 goto illegal_op;
3586 if (b == 0x0e) {
3587 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3588 goto illegal_op;
3589 /* femms */
3590 tcg_gen_helper_0_0(helper_emms);
3591 return;
3592 }
3593 if (b == 0x77) {
3594 /* emms */
3595 tcg_gen_helper_0_0(helper_emms);
3596 return;
3597 }
3598 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3599 the static cpu state) */
3600 if (!is_xmm) {
3601 tcg_gen_helper_0_0(helper_enter_mmx);
3602 }
3603
3604 modrm = ldub_code(s->pc++);
3605 reg = ((modrm >> 3) & 7);
3606 if (is_xmm)
3607 reg |= rex_r;
3608 mod = (modrm >> 6) & 3;
3609 if (sse_op2 == SSE_SPECIAL) {
3610 b |= (b1 << 8);
3611 switch(b) {
3612 case 0x0e7: /* movntq */
3613 if (mod == 3)
3614 goto illegal_op;
3615 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3616 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3617 break;
3618 case 0x1e7: /* movntdq */
3619 case 0x02b: /* movntps */
3620 case 0x12b: /* movntps */
3621 case 0x3f0: /* lddqu */
3622 if (mod == 3)
3623 goto illegal_op;
3624 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3625 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3626 break;
3627 case 0x6e: /* movd mm, ea */
3628#ifdef TARGET_X86_64
3629 if (s->dflag == 2) {
3630 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3631 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3632 } else
3633#endif
3634 {
3635 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3636 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3637 offsetof(CPUX86State,fpregs[reg].mmx));
3638 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3639 }
3640 break;
3641 case 0x16e: /* movd xmm, ea */
3642#ifdef TARGET_X86_64
3643 if (s->dflag == 2) {
3644 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3645 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3646 offsetof(CPUX86State,xmm_regs[reg]));
3647 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3648 } else
3649#endif
3650 {
3651 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3652 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3653 offsetof(CPUX86State,xmm_regs[reg]));
3654 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3655 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3656 }
3657 break;
3658 case 0x6f: /* movq mm, ea */
3659 if (mod != 3) {
3660 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3661 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3662 } else {
3663 rm = (modrm & 7);
3664 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3665 offsetof(CPUX86State,fpregs[rm].mmx));
3666 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3667 offsetof(CPUX86State,fpregs[reg].mmx));
3668 }
3669 break;
3670 case 0x010: /* movups */
3671 case 0x110: /* movupd */
3672 case 0x028: /* movaps */
3673 case 0x128: /* movapd */
3674 case 0x16f: /* movdqa xmm, ea */
3675 case 0x26f: /* movdqu xmm, ea */
3676 if (mod != 3) {
3677 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3678 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3679 } else {
3680 rm = (modrm & 7) | REX_B(s);
3681 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3682 offsetof(CPUX86State,xmm_regs[rm]));
3683 }
3684 break;
3685 case 0x210: /* movss xmm, ea */
3686 if (mod != 3) {
3687 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3688 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3689 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3690 gen_op_movl_T0_0();
3691 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3692 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3693 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3694 } else {
3695 rm = (modrm & 7) | REX_B(s);
3696 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3697 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3698 }
3699 break;
3700 case 0x310: /* movsd xmm, ea */
3701 if (mod != 3) {
3702 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3703 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3704 gen_op_movl_T0_0();
3705 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3706 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3707 } else {
3708 rm = (modrm & 7) | REX_B(s);
3709 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3710 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3711 }
3712 break;
3713 case 0x012: /* movlps */
3714 case 0x112: /* movlpd */
3715 if (mod != 3) {
3716 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3717 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3718 } else {
3719 /* movhlps */
3720 rm = (modrm & 7) | REX_B(s);
3721 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3722 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3723 }
3724 break;
3725 case 0x212: /* movsldup */
3726 if (mod != 3) {
3727 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3728 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3729 } else {
3730 rm = (modrm & 7) | REX_B(s);
3731 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3732 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3733 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3734 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3735 }
3736 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3737 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3738 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3739 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3740 break;
3741 case 0x312: /* movddup */
3742 if (mod != 3) {
3743 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3744 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3745 } else {
3746 rm = (modrm & 7) | REX_B(s);
3747 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3748 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3749 }
3750 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3751 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3752 break;
3753 case 0x016: /* movhps */
3754 case 0x116: /* movhpd */
3755 if (mod != 3) {
3756 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3757 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3758 } else {
3759 /* movlhps */
3760 rm = (modrm & 7) | REX_B(s);
3761 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3762 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3763 }
3764 break;
3765 case 0x216: /* movshdup */
3766 if (mod != 3) {
3767 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3768 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3769 } else {
3770 rm = (modrm & 7) | REX_B(s);
3771 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3772 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3773 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3774 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3775 }
3776 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3777 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3778 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3779 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3780 break;
3781 case 0x7e: /* movd ea, mm */
3782#ifdef TARGET_X86_64
3783 if (s->dflag == 2) {
3784 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3785 offsetof(CPUX86State,fpregs[reg].mmx));
3786 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3787 } else
3788#endif
3789 {
3790 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3791 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3792 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3793 }
3794 break;
3795 case 0x17e: /* movd ea, xmm */
3796#ifdef TARGET_X86_64
3797 if (s->dflag == 2) {
3798 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3799 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3800 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3801 } else
3802#endif
3803 {
3804 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3805 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3806 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3807 }
3808 break;
3809 case 0x27e: /* movq xmm, ea */
3810 if (mod != 3) {
3811 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3812 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3813 } else {
3814 rm = (modrm & 7) | REX_B(s);
3815 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3816 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3817 }
3818 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3819 break;
3820 case 0x7f: /* movq ea, mm */
3821 if (mod != 3) {
3822 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3823 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3824 } else {
3825 rm = (modrm & 7);
3826 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3827 offsetof(CPUX86State,fpregs[reg].mmx));
3828 }
3829 break;
3830 case 0x011: /* movups */
3831 case 0x111: /* movupd */
3832 case 0x029: /* movaps */
3833 case 0x129: /* movapd */
3834 case 0x17f: /* movdqa ea, xmm */
3835 case 0x27f: /* movdqu ea, xmm */
3836 if (mod != 3) {
3837 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3838 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3839 } else {
3840 rm = (modrm & 7) | REX_B(s);
3841 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3842 offsetof(CPUX86State,xmm_regs[reg]));
3843 }
3844 break;
3845 case 0x211: /* movss ea, xmm */
3846 if (mod != 3) {
3847 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3848 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3849 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3850 } else {
3851 rm = (modrm & 7) | REX_B(s);
3852 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3853 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3854 }
3855 break;
3856 case 0x311: /* movsd ea, xmm */
3857 if (mod != 3) {
3858 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3859 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3860 } else {
3861 rm = (modrm & 7) | REX_B(s);
3862 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3863 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3864 }
3865 break;
3866 case 0x013: /* movlps */
3867 case 0x113: /* movlpd */
3868 if (mod != 3) {
3869 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3870 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3871 } else {
3872 goto illegal_op;
3873 }
3874 break;
3875 case 0x017: /* movhps */
3876 case 0x117: /* movhpd */
3877 if (mod != 3) {
3878 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3879 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3880 } else {
3881 goto illegal_op;
3882 }
3883 break;
3884 case 0x71: /* shift mm, im */
3885 case 0x72:
3886 case 0x73:
3887 case 0x171: /* shift xmm, im */
3888 case 0x172:
3889 case 0x173:
3890 val = ldub_code(s->pc++);
3891 if (is_xmm) {
3892 gen_op_movl_T0_im(val);
3893 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3894 gen_op_movl_T0_0();
3895 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3896 op1_offset = offsetof(CPUX86State,xmm_t0);
3897 } else {
3898 gen_op_movl_T0_im(val);
3899 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3900 gen_op_movl_T0_0();
3901 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3902 op1_offset = offsetof(CPUX86State,mmx_t0);
3903 }
3904 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3905 if (!sse_op2)
3906 goto illegal_op;
3907 if (is_xmm) {
3908 rm = (modrm & 7) | REX_B(s);
3909 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3910 } else {
3911 rm = (modrm & 7);
3912 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3913 }
3914 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3915 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3916 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3917 break;
3918 case 0x050: /* movmskps */
3919 rm = (modrm & 7) | REX_B(s);
3920 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3921 offsetof(CPUX86State,xmm_regs[rm]));
3922 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3923 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3924 gen_op_mov_reg_T0(OT_LONG, reg);
3925 break;
3926 case 0x150: /* movmskpd */
3927 rm = (modrm & 7) | REX_B(s);
3928 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3929 offsetof(CPUX86State,xmm_regs[rm]));
3930 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3931 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3932 gen_op_mov_reg_T0(OT_LONG, reg);
3933 break;
3934 case 0x02a: /* cvtpi2ps */
3935 case 0x12a: /* cvtpi2pd */
3936 tcg_gen_helper_0_0(helper_enter_mmx);
3937 if (mod != 3) {
3938 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3939 op2_offset = offsetof(CPUX86State,mmx_t0);
3940 gen_ldq_env_A0(s->mem_index, op2_offset);
3941 } else {
3942 rm = (modrm & 7);
3943 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3944 }
3945 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3946 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3947 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3948 switch(b >> 8) {
3949 case 0x0:
3950 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3951 break;
3952 default:
3953 case 0x1:
3954 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3955 break;
3956 }
3957 break;
3958 case 0x22a: /* cvtsi2ss */
3959 case 0x32a: /* cvtsi2sd */
3960 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3961 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3962 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3963 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3964 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3965 if (ot == OT_LONG) {
3966 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3967 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3968 } else {
3969 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3970 }
3971 break;
3972 case 0x02c: /* cvttps2pi */
3973 case 0x12c: /* cvttpd2pi */
3974 case 0x02d: /* cvtps2pi */
3975 case 0x12d: /* cvtpd2pi */
3976 tcg_gen_helper_0_0(helper_enter_mmx);
3977 if (mod != 3) {
3978 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3979 op2_offset = offsetof(CPUX86State,xmm_t0);
3980 gen_ldo_env_A0(s->mem_index, op2_offset);
3981 } else {
3982 rm = (modrm & 7) | REX_B(s);
3983 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3984 }
3985 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3986 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3987 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3988 switch(b) {
3989 case 0x02c:
3990 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3991 break;
3992 case 0x12c:
3993 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3994 break;
3995 case 0x02d:
3996 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3997 break;
3998 case 0x12d:
3999 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
4000 break;
4001 }
4002 break;
4003 case 0x22c: /* cvttss2si */
4004 case 0x32c: /* cvttsd2si */
4005 case 0x22d: /* cvtss2si */
4006 case 0x32d: /* cvtsd2si */
4007 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4008 if (mod != 3) {
4009 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4010 if ((b >> 8) & 1) {
4011 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
4012 } else {
4013 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4014 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4015 }
4016 op2_offset = offsetof(CPUX86State,xmm_t0);
4017 } else {
4018 rm = (modrm & 7) | REX_B(s);
4019 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4020 }
4021 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
4022 (b & 1) * 4];
4023 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
4024 if (ot == OT_LONG) {
4025 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
4026 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4027 } else {
4028 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
4029 }
4030 gen_op_mov_reg_T0(ot, reg);
4031 break;
4032 case 0xc4: /* pinsrw */
4033 case 0x1c4:
4034 s->rip_offset = 1;
4035 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4036 val = ldub_code(s->pc++);
4037 if (b1) {
4038 val &= 7;
4039 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4040 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
4041 } else {
4042 val &= 3;
4043 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4044 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
4045 }
4046 break;
4047 case 0xc5: /* pextrw */
4048 case 0x1c5:
4049 if (mod != 3)
4050 goto illegal_op;
4051 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4052 val = ldub_code(s->pc++);
4053 if (b1) {
4054 val &= 7;
4055 rm = (modrm & 7) | REX_B(s);
4056 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4057 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
4058 } else {
4059 val &= 3;
4060 rm = (modrm & 7);
4061 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4062 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
4063 }
4064 reg = ((modrm >> 3) & 7) | rex_r;
4065 gen_op_mov_reg_T0(ot, reg);
4066 break;
4067 case 0x1d6: /* movq ea, xmm */
4068 if (mod != 3) {
4069 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4070 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4071 } else {
4072 rm = (modrm & 7) | REX_B(s);
4073 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4074 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4075 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4076 }
4077 break;
4078 case 0x2d6: /* movq2dq */
4079 tcg_gen_helper_0_0(helper_enter_mmx);
4080 rm = (modrm & 7);
4081 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4082 offsetof(CPUX86State,fpregs[rm].mmx));
4083 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4084 break;
4085 case 0x3d6: /* movdq2q */
4086 tcg_gen_helper_0_0(helper_enter_mmx);
4087 rm = (modrm & 7) | REX_B(s);
4088 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4089 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4090 break;
4091 case 0xd7: /* pmovmskb */
4092 case 0x1d7:
4093 if (mod != 3)
4094 goto illegal_op;
4095 if (b1) {
4096 rm = (modrm & 7) | REX_B(s);
4097 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4098 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4099 } else {
4100 rm = (modrm & 7);
4101 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4102 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4103 }
4104 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4105 reg = ((modrm >> 3) & 7) | rex_r;
4106 gen_op_mov_reg_T0(OT_LONG, reg);
4107 break;
4108 case 0x138:
4109 if (s->prefix & PREFIX_REPNZ)
4110 goto crc32;
4111 case 0x038:
4112 b = modrm;
4113 modrm = ldub_code(s->pc++);
4114 rm = modrm & 7;
4115 reg = ((modrm >> 3) & 7) | rex_r;
4116 mod = (modrm >> 6) & 3;
4117
4118 sse_op2 = sse_op_table6[b].op[b1];
4119 if (!sse_op2)
4120 goto illegal_op;
4121 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4122 goto illegal_op;
4123
4124 if (b1) {
4125 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4126 if (mod == 3) {
4127 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4128 } else {
4129 op2_offset = offsetof(CPUX86State,xmm_t0);
4130 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4131 switch (b) {
4132 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4133 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4134 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4135 gen_ldq_env_A0(s->mem_index, op2_offset +
4136 offsetof(XMMReg, XMM_Q(0)));
4137 break;
4138 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4139 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4140 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4141 (s->mem_index >> 2) - 1);
4142 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4143 offsetof(XMMReg, XMM_L(0)));
4144 break;
4145 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4146 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4147 (s->mem_index >> 2) - 1);
4148 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4149 offsetof(XMMReg, XMM_W(0)));
4150 break;
4151 case 0x2a: /* movntqda */
4152 gen_ldo_env_A0(s->mem_index, op1_offset);
4153 return;
4154 default:
4155 gen_ldo_env_A0(s->mem_index, op2_offset);
4156 }
4157 }
4158 } else {
4159 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4160 if (mod == 3) {
4161 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4162 } else {
4163 op2_offset = offsetof(CPUX86State,mmx_t0);
4164 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4165 gen_ldq_env_A0(s->mem_index, op2_offset);
4166 }
4167 }
4168 if (sse_op2 == SSE_SPECIAL)
4169 goto illegal_op;
4170
4171 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4172 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4173 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4174
4175 if (b == 0x17)
4176 s->cc_op = CC_OP_EFLAGS;
4177 break;
4178 case 0x338: /* crc32 */
4179 crc32:
4180 b = modrm;
4181 modrm = ldub_code(s->pc++);
4182 reg = ((modrm >> 3) & 7) | rex_r;
4183
4184 if (b != 0xf0 && b != 0xf1)
4185 goto illegal_op;
4186 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4187 goto illegal_op;
4188
4189 if (b == 0xf0)
4190 ot = OT_BYTE;
4191 else if (b == 0xf1 && s->dflag != 2)
4192 if (s->prefix & PREFIX_DATA)
4193 ot = OT_WORD;
4194 else
4195 ot = OT_LONG;
4196 else
4197 ot = OT_QUAD;
4198
4199 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4200 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4201 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4202 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4203 cpu_T[0], tcg_const_i32(8 << ot));
4204
4205 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4206 gen_op_mov_reg_T0(ot, reg);
4207 break;
4208 case 0x03a:
4209 case 0x13a:
4210 b = modrm;
4211 modrm = ldub_code(s->pc++);
4212 rm = modrm & 7;
4213 reg = ((modrm >> 3) & 7) | rex_r;
4214 mod = (modrm >> 6) & 3;
4215
4216 sse_op2 = sse_op_table7[b].op[b1];
4217 if (!sse_op2)
4218 goto illegal_op;
4219 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4220 goto illegal_op;
4221
4222 if (sse_op2 == SSE_SPECIAL) {
4223 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4224 rm = (modrm & 7) | REX_B(s);
4225 if (mod != 3)
4226 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4227 reg = ((modrm >> 3) & 7) | rex_r;
4228 val = ldub_code(s->pc++);
4229 switch (b) {
4230 case 0x14: /* pextrb */
4231 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4232 xmm_regs[reg].XMM_B(val & 15)));
4233 if (mod == 3)
4234 gen_op_mov_reg_T0(ot, rm);
4235 else
4236 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4237 (s->mem_index >> 2) - 1);
4238 break;
4239 case 0x15: /* pextrw */
4240 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4241 xmm_regs[reg].XMM_W(val & 7)));
4242 if (mod == 3)
4243 gen_op_mov_reg_T0(ot, rm);
4244 else
4245 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4246 (s->mem_index >> 2) - 1);
4247 break;
4248 case 0x16:
4249 if (ot == OT_LONG) { /* pextrd */
4250 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4251 offsetof(CPUX86State,
4252 xmm_regs[reg].XMM_L(val & 3)));
4253 if (mod == 3)
4254 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4255 else
4256 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4257 (s->mem_index >> 2) - 1);
4258 } else { /* pextrq */
4259 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4260 offsetof(CPUX86State,
4261 xmm_regs[reg].XMM_Q(val & 1)));
4262 if (mod == 3)
4263 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4264 else
4265 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4266 (s->mem_index >> 2) - 1);
4267 }
4268 break;
4269 case 0x17: /* extractps */
4270 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4271 xmm_regs[reg].XMM_L(val & 3)));
4272 if (mod == 3)
4273 gen_op_mov_reg_T0(ot, rm);
4274 else
4275 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4276 (s->mem_index >> 2) - 1);
4277 break;
4278 case 0x20: /* pinsrb */
4279 if (mod == 3)
4280 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4281 else
4282 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4283 (s->mem_index >> 2) - 1);
4284 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4285 xmm_regs[reg].XMM_B(val & 15)));
4286 break;
4287 case 0x21: /* insertps */
4288 if (mod == 3)
4289 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4290 offsetof(CPUX86State,xmm_regs[rm]
4291 .XMM_L((val >> 6) & 3)));
4292 else
4293 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4294 (s->mem_index >> 2) - 1);
4295 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4296 offsetof(CPUX86State,xmm_regs[reg]
4297 .XMM_L((val >> 4) & 3)));
4298 if ((val >> 0) & 1)
4299 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4300 cpu_env, offsetof(CPUX86State,
4301 xmm_regs[reg].XMM_L(0)));
4302 if ((val >> 1) & 1)
4303 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4304 cpu_env, offsetof(CPUX86State,
4305 xmm_regs[reg].XMM_L(1)));
4306 if ((val >> 2) & 1)
4307 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4308 cpu_env, offsetof(CPUX86State,
4309 xmm_regs[reg].XMM_L(2)));
4310 if ((val >> 3) & 1)
4311 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4312 cpu_env, offsetof(CPUX86State,
4313 xmm_regs[reg].XMM_L(3)));
4314 break;
4315 case 0x22:
4316 if (ot == OT_LONG) { /* pinsrd */
4317 if (mod == 3)
4318 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4319 else
4320 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4321 (s->mem_index >> 2) - 1);
4322 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4323 offsetof(CPUX86State,
4324 xmm_regs[reg].XMM_L(val & 3)));
4325 } else { /* pinsrq */
4326 if (mod == 3)
4327 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4328 else
4329 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4330 (s->mem_index >> 2) - 1);
4331 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4332 offsetof(CPUX86State,
4333 xmm_regs[reg].XMM_Q(val & 1)));
4334 }
4335 break;
4336 }
4337 return;
4338 }
4339
4340 if (b1) {
4341 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4342 if (mod == 3) {
4343 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4344 } else {
4345 op2_offset = offsetof(CPUX86State,xmm_t0);
4346 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4347 gen_ldo_env_A0(s->mem_index, op2_offset);
4348 }
4349 } else {
4350 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4351 if (mod == 3) {
4352 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4353 } else {
4354 op2_offset = offsetof(CPUX86State,mmx_t0);
4355 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4356 gen_ldq_env_A0(s->mem_index, op2_offset);
4357 }
4358 }
4359 val = ldub_code(s->pc++);
4360
4361 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4362 s->cc_op = CC_OP_EFLAGS;
4363
4364 if (s->dflag == 2)
4365 /* The helper must use entire 64-bit gp registers */
4366 val |= 1 << 8;
4367 }
4368
4369 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4370 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4371 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4372 break;
4373 default:
4374 goto illegal_op;
4375 }
4376 } else {
4377 /* generic MMX or SSE operation */
4378 switch(b) {
4379 case 0x70: /* pshufx insn */
4380 case 0xc6: /* pshufx insn */
4381 case 0xc2: /* compare insns */
4382 s->rip_offset = 1;
4383 break;
4384 default:
4385 break;
4386 }
4387 if (is_xmm) {
4388 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4389 if (mod != 3) {
4390 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4391 op2_offset = offsetof(CPUX86State,xmm_t0);
4392 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4393 b == 0xc2)) {
4394 /* specific case for SSE single instructions */
4395 if (b1 == 2) {
4396 /* 32 bit access */
4397 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4398 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4399 } else {
4400 /* 64 bit access */
4401 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4402 }
4403 } else {
4404 gen_ldo_env_A0(s->mem_index, op2_offset);
4405 }
4406 } else {
4407 rm = (modrm & 7) | REX_B(s);
4408 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4409 }
4410 } else {
4411 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4412 if (mod != 3) {
4413 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4414 op2_offset = offsetof(CPUX86State,mmx_t0);
4415 gen_ldq_env_A0(s->mem_index, op2_offset);
4416 } else {
4417 rm = (modrm & 7);
4418 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4419 }
4420 }
4421 switch(b) {
4422 case 0x0f: /* 3DNow! data insns */
4423 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4424 goto illegal_op;
4425 val = ldub_code(s->pc++);
4426 sse_op2 = sse_op_table5[val];
4427 if (!sse_op2)
4428 goto illegal_op;
4429 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4430 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4431 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4432 break;
4433 case 0x70: /* pshufx insn */
4434 case 0xc6: /* pshufx insn */
4435 val = ldub_code(s->pc++);
4436 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4437 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4438 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4439 break;
4440 case 0xc2:
4441 /* compare insns */
4442 val = ldub_code(s->pc++);
4443 if (val >= 8)
4444 goto illegal_op;
4445 sse_op2 = sse_op_table4[val][b1];
4446 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4447 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4448 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4449 break;
4450 case 0xf7:
4451 /* maskmov : we must prepare A0 */
4452 if (mod != 3)
4453 goto illegal_op;
4454#ifdef TARGET_X86_64
4455 if (s->aflag == 2) {
4456 gen_op_movq_A0_reg(R_EDI);
4457 } else
4458#endif
4459 {
4460 gen_op_movl_A0_reg(R_EDI);
4461 if (s->aflag == 0)
4462 gen_op_andl_A0_ffff();
4463 }
4464 gen_add_A0_ds_seg(s);
4465
4466 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4467 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4468 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4469 break;
4470 default:
4471 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4472 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4473 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4474 break;
4475 }
4476 if (b == 0x2e || b == 0x2f) {
4477 s->cc_op = CC_OP_EFLAGS;
4478 }
4479 }
4480}
4481
4482#ifdef VBOX
4483/* Checks if it's an invalid lock sequence. Only a few instructions
4484 can be used together with the lock prefix and of those only the
4485 form that write a memory operand. So, this is kind of annoying
4486 work to do...
4487 The AMD manual lists the following instructions.
4488 ADC
4489 ADD
4490 AND
4491 BTC
4492 BTR
4493 BTS
4494 CMPXCHG
4495 CMPXCHG8B
4496 CMPXCHG16B
4497 DEC
4498 INC
4499 NEG
4500 NOT
4501 OR
4502 SBB
4503 SUB
4504 XADD
4505 XCHG
4506 XOR */
4507static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4508{
4509 target_ulong pc = s->pc;
4510 int modrm, mod, op;
4511
4512 /* X={8,16,32,64} Y={16,32,64} */
4513 switch (b)
4514 {
4515 /* /2: ADC reg/memX, immX */
4516 /* /0: ADD reg/memX, immX */
4517 /* /4: AND reg/memX, immX */
4518 /* /1: OR reg/memX, immX */
4519 /* /3: SBB reg/memX, immX */
4520 /* /5: SUB reg/memX, immX */
4521 /* /6: XOR reg/memX, immX */
4522 case 0x80:
4523 case 0x81:
4524 case 0x83:
4525 modrm = ldub_code(pc++);
4526 op = (modrm >> 3) & 7;
4527 if (op == 7) /* /7: CMP */
4528 break;
4529 mod = (modrm >> 6) & 3;
4530 if (mod == 3) /* register destination */
4531 break;
4532 return false;
4533
4534 case 0x10: /* /r: ADC reg/mem8, reg8 */
4535 case 0x11: /* /r: ADC reg/memX, regY */
4536 case 0x00: /* /r: ADD reg/mem8, reg8 */
4537 case 0x01: /* /r: ADD reg/memX, regY */
4538 case 0x20: /* /r: AND reg/mem8, reg8 */
4539 case 0x21: /* /r: AND reg/memY, regY */
4540 case 0x08: /* /r: OR reg/mem8, reg8 */
4541 case 0x09: /* /r: OR reg/memY, regY */
4542 case 0x18: /* /r: SBB reg/mem8, reg8 */
4543 case 0x19: /* /r: SBB reg/memY, regY */
4544 case 0x28: /* /r: SUB reg/mem8, reg8 */
4545 case 0x29: /* /r: SUB reg/memY, regY */
4546 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4547 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4548 case 0x30: /* /r: XOR reg/mem8, reg8 */
4549 case 0x31: /* /r: XOR reg/memY, regY */
4550 modrm = ldub_code(pc++);
4551 mod = (modrm >> 6) & 3;
4552 if (mod == 3) /* register destination */
4553 break;
4554 return false;
4555
4556 /* /1: DEC reg/memX */
4557 /* /0: INC reg/memX */
4558 case 0xfe:
4559 case 0xff:
4560 modrm = ldub_code(pc++);
4561 mod = (modrm >> 6) & 3;
4562 if (mod == 3) /* register destination */
4563 break;
4564 return false;
4565
4566 /* /3: NEG reg/memX */
4567 /* /2: NOT reg/memX */
4568 case 0xf6:
4569 case 0xf7:
4570 modrm = ldub_code(pc++);
4571 mod = (modrm >> 6) & 3;
4572 if (mod == 3) /* register destination */
4573 break;
4574 return false;
4575
4576 case 0x0f:
4577 b = ldub_code(pc++);
4578 switch (b)
4579 {
4580 /* /7: BTC reg/memY, imm8 */
4581 /* /6: BTR reg/memY, imm8 */
4582 /* /5: BTS reg/memY, imm8 */
4583 case 0xba:
4584 modrm = ldub_code(pc++);
4585 op = (modrm >> 3) & 7;
4586 if (op < 5)
4587 break;
4588 mod = (modrm >> 6) & 3;
4589 if (mod == 3) /* register destination */
4590 break;
4591 return false;
4592
4593 case 0xbb: /* /r: BTC reg/memY, regY */
4594 case 0xb3: /* /r: BTR reg/memY, regY */
4595 case 0xab: /* /r: BTS reg/memY, regY */
4596 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4597 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4598 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4599 case 0xc1: /* /r: XADD reg/memY, regY */
4600 modrm = ldub_code(pc++);
4601 mod = (modrm >> 6) & 3;
4602 if (mod == 3) /* register destination */
4603 break;
4604 return false;
4605
4606 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4607 case 0xc7:
4608 modrm = ldub_code(pc++);
4609 op = (modrm >> 3) & 7;
4610 if (op != 1)
4611 break;
4612 return false;
4613 }
4614 break;
4615 }
4616
4617 /* illegal sequence. The s->pc is past the lock prefix and that
4618 is sufficient for the TB, I think. */
4619 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
4620 return true;
4621}
4622#endif /* VBOX */
4623
4624
4625/* convert one instruction. s->is_jmp is set if the translation must
4626 be stopped. Return the next pc value */
4627static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4628{
4629 int b, prefixes, aflag, dflag;
4630 int shift, ot;
4631 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4632 target_ulong next_eip, tval;
4633 int rex_w, rex_r;
4634
4635 if (unlikely(loglevel & CPU_LOG_TB_OP))
4636 tcg_gen_debug_insn_start(pc_start);
4637
4638 s->pc = pc_start;
4639 prefixes = 0;
4640 aflag = s->code32;
4641 dflag = s->code32;
4642 s->override = -1;
4643 rex_w = -1;
4644 rex_r = 0;
4645#ifdef TARGET_X86_64
4646 s->rex_x = 0;
4647 s->rex_b = 0;
4648 x86_64_hregs = 0;
4649#endif
4650 s->rip_offset = 0; /* for relative ip address */
4651#ifdef VBOX
4652 /* nike: seems only slow down things */
4653# if 0
4654 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4655
4656 gen_update_eip(pc_start - s->cs_base);
4657# endif
4658#endif
4659
4660 next_byte:
4661 b = ldub_code(s->pc);
4662 s->pc++;
4663 /* check prefixes */
4664#ifdef TARGET_X86_64
4665 if (CODE64(s)) {
4666 switch (b) {
4667 case 0xf3:
4668 prefixes |= PREFIX_REPZ;
4669 goto next_byte;
4670 case 0xf2:
4671 prefixes |= PREFIX_REPNZ;
4672 goto next_byte;
4673 case 0xf0:
4674 prefixes |= PREFIX_LOCK;
4675 goto next_byte;
4676 case 0x2e:
4677 s->override = R_CS;
4678 goto next_byte;
4679 case 0x36:
4680 s->override = R_SS;
4681 goto next_byte;
4682 case 0x3e:
4683 s->override = R_DS;
4684 goto next_byte;
4685 case 0x26:
4686 s->override = R_ES;
4687 goto next_byte;
4688 case 0x64:
4689 s->override = R_FS;
4690 goto next_byte;
4691 case 0x65:
4692 s->override = R_GS;
4693 goto next_byte;
4694 case 0x66:
4695 prefixes |= PREFIX_DATA;
4696 goto next_byte;
4697 case 0x67:
4698 prefixes |= PREFIX_ADR;
4699 goto next_byte;
4700 case 0x40 ... 0x4f:
4701 /* REX prefix */
4702 rex_w = (b >> 3) & 1;
4703 rex_r = (b & 0x4) << 1;
4704 s->rex_x = (b & 0x2) << 2;
4705 REX_B(s) = (b & 0x1) << 3;
4706 x86_64_hregs = 1; /* select uniform byte register addressing */
4707 goto next_byte;
4708 }
4709 if (rex_w == 1) {
4710 /* 0x66 is ignored if rex.w is set */
4711 dflag = 2;
4712 } else {
4713 if (prefixes & PREFIX_DATA)
4714 dflag ^= 1;
4715 }
4716 if (!(prefixes & PREFIX_ADR))
4717 aflag = 2;
4718 } else
4719#endif
4720 {
4721 switch (b) {
4722 case 0xf3:
4723 prefixes |= PREFIX_REPZ;
4724 goto next_byte;
4725 case 0xf2:
4726 prefixes |= PREFIX_REPNZ;
4727 goto next_byte;
4728 case 0xf0:
4729 prefixes |= PREFIX_LOCK;
4730 goto next_byte;
4731 case 0x2e:
4732 s->override = R_CS;
4733 goto next_byte;
4734 case 0x36:
4735 s->override = R_SS;
4736 goto next_byte;
4737 case 0x3e:
4738 s->override = R_DS;
4739 goto next_byte;
4740 case 0x26:
4741 s->override = R_ES;
4742 goto next_byte;
4743 case 0x64:
4744 s->override = R_FS;
4745 goto next_byte;
4746 case 0x65:
4747 s->override = R_GS;
4748 goto next_byte;
4749 case 0x66:
4750 prefixes |= PREFIX_DATA;
4751 goto next_byte;
4752 case 0x67:
4753 prefixes |= PREFIX_ADR;
4754 goto next_byte;
4755 }
4756 if (prefixes & PREFIX_DATA)
4757 dflag ^= 1;
4758 if (prefixes & PREFIX_ADR)
4759 aflag ^= 1;
4760 }
4761
4762 s->prefix = prefixes;
4763 s->aflag = aflag;
4764 s->dflag = dflag;
4765
4766 /* lock generation */
4767#ifndef VBOX
4768 if (prefixes & PREFIX_LOCK)
4769 tcg_gen_helper_0_0(helper_lock);
4770#else /* VBOX */
4771 if (prefixes & PREFIX_LOCK) {
4772 if (is_invalid_lock_sequence(s, pc_start, b)) {
4773 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4774 return s->pc;
4775 }
4776 tcg_gen_helper_0_0(helper_lock);
4777 }
4778#endif /* VBOX */
4779
4780 /* now check op code */
4781 reswitch:
4782 switch(b) {
4783 case 0x0f:
4784 /**************************/
4785 /* extended op code */
4786 b = ldub_code(s->pc++) | 0x100;
4787 goto reswitch;
4788
4789 /**************************/
4790 /* arith & logic */
4791 case 0x00 ... 0x05:
4792 case 0x08 ... 0x0d:
4793 case 0x10 ... 0x15:
4794 case 0x18 ... 0x1d:
4795 case 0x20 ... 0x25:
4796 case 0x28 ... 0x2d:
4797 case 0x30 ... 0x35:
4798 case 0x38 ... 0x3d:
4799 {
4800 int op, f, val;
4801 op = (b >> 3) & 7;
4802 f = (b >> 1) & 3;
4803
4804 if ((b & 1) == 0)
4805 ot = OT_BYTE;
4806 else
4807 ot = dflag + OT_WORD;
4808
4809 switch(f) {
4810 case 0: /* OP Ev, Gv */
4811 modrm = ldub_code(s->pc++);
4812 reg = ((modrm >> 3) & 7) | rex_r;
4813 mod = (modrm >> 6) & 3;
4814 rm = (modrm & 7) | REX_B(s);
4815 if (mod != 3) {
4816 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4817 opreg = OR_TMP0;
4818 } else if (op == OP_XORL && rm == reg) {
4819 xor_zero:
4820 /* xor reg, reg optimisation */
4821 gen_op_movl_T0_0();
4822 s->cc_op = CC_OP_LOGICB + ot;
4823 gen_op_mov_reg_T0(ot, reg);
4824 gen_op_update1_cc();
4825 break;
4826 } else {
4827 opreg = rm;
4828 }
4829 gen_op_mov_TN_reg(ot, 1, reg);
4830 gen_op(s, op, ot, opreg);
4831 break;
4832 case 1: /* OP Gv, Ev */
4833 modrm = ldub_code(s->pc++);
4834 mod = (modrm >> 6) & 3;
4835 reg = ((modrm >> 3) & 7) | rex_r;
4836 rm = (modrm & 7) | REX_B(s);
4837 if (mod != 3) {
4838 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4839 gen_op_ld_T1_A0(ot + s->mem_index);
4840 } else if (op == OP_XORL && rm == reg) {
4841 goto xor_zero;
4842 } else {
4843 gen_op_mov_TN_reg(ot, 1, rm);
4844 }
4845 gen_op(s, op, ot, reg);
4846 break;
4847 case 2: /* OP A, Iv */
4848 val = insn_get(s, ot);
4849 gen_op_movl_T1_im(val);
4850 gen_op(s, op, ot, OR_EAX);
4851 break;
4852 }
4853 }
4854 break;
4855
4856 case 0x82:
4857 if (CODE64(s))
4858 goto illegal_op;
4859 case 0x80: /* GRP1 */
4860 case 0x81:
4861 case 0x83:
4862 {
4863 int val;
4864
4865 if ((b & 1) == 0)
4866 ot = OT_BYTE;
4867 else
4868 ot = dflag + OT_WORD;
4869
4870 modrm = ldub_code(s->pc++);
4871 mod = (modrm >> 6) & 3;
4872 rm = (modrm & 7) | REX_B(s);
4873 op = (modrm >> 3) & 7;
4874
4875 if (mod != 3) {
4876 if (b == 0x83)
4877 s->rip_offset = 1;
4878 else
4879 s->rip_offset = insn_const_size(ot);
4880 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4881 opreg = OR_TMP0;
4882 } else {
4883 opreg = rm;
4884 }
4885
4886 switch(b) {
4887 default:
4888 case 0x80:
4889 case 0x81:
4890 case 0x82:
4891 val = insn_get(s, ot);
4892 break;
4893 case 0x83:
4894 val = (int8_t)insn_get(s, OT_BYTE);
4895 break;
4896 }
4897 gen_op_movl_T1_im(val);
4898 gen_op(s, op, ot, opreg);
4899 }
4900 break;
4901
4902 /**************************/
4903 /* inc, dec, and other misc arith */
4904 case 0x40 ... 0x47: /* inc Gv */
4905 ot = dflag ? OT_LONG : OT_WORD;
4906 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4907 break;
4908 case 0x48 ... 0x4f: /* dec Gv */
4909 ot = dflag ? OT_LONG : OT_WORD;
4910 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4911 break;
4912 case 0xf6: /* GRP3 */
4913 case 0xf7:
4914 if ((b & 1) == 0)
4915 ot = OT_BYTE;
4916 else
4917 ot = dflag + OT_WORD;
4918
4919 modrm = ldub_code(s->pc++);
4920 mod = (modrm >> 6) & 3;
4921 rm = (modrm & 7) | REX_B(s);
4922 op = (modrm >> 3) & 7;
4923 if (mod != 3) {
4924 if (op == 0)
4925 s->rip_offset = insn_const_size(ot);
4926 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4927 gen_op_ld_T0_A0(ot + s->mem_index);
4928 } else {
4929 gen_op_mov_TN_reg(ot, 0, rm);
4930 }
4931
4932 switch(op) {
4933 case 0: /* test */
4934 val = insn_get(s, ot);
4935 gen_op_movl_T1_im(val);
4936 gen_op_testl_T0_T1_cc();
4937 s->cc_op = CC_OP_LOGICB + ot;
4938 break;
4939 case 2: /* not */
4940 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4941 if (mod != 3) {
4942 gen_op_st_T0_A0(ot + s->mem_index);
4943 } else {
4944 gen_op_mov_reg_T0(ot, rm);
4945 }
4946 break;
4947 case 3: /* neg */
4948 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4949 if (mod != 3) {
4950 gen_op_st_T0_A0(ot + s->mem_index);
4951 } else {
4952 gen_op_mov_reg_T0(ot, rm);
4953 }
4954 gen_op_update_neg_cc();
4955 s->cc_op = CC_OP_SUBB + ot;
4956 break;
4957 case 4: /* mul */
4958 switch(ot) {
4959 case OT_BYTE:
4960 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4961 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4962 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4963 /* XXX: use 32 bit mul which could be faster */
4964 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4965 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4966 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4967 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4968 s->cc_op = CC_OP_MULB;
4969 break;
4970 case OT_WORD:
4971 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4972 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4973 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4974 /* XXX: use 32 bit mul which could be faster */
4975 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4976 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4977 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4978 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4979 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4980 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4981 s->cc_op = CC_OP_MULW;
4982 break;
4983 default:
4984 case OT_LONG:
4985#ifdef TARGET_X86_64
4986 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4987 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4988 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4989 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4990 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4991 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4992 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4993 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4994 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4995#else
4996 {
4997 TCGv t0, t1;
4998 t0 = tcg_temp_new(TCG_TYPE_I64);
4999 t1 = tcg_temp_new(TCG_TYPE_I64);
5000 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5001 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
5002 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
5003 tcg_gen_mul_i64(t0, t0, t1);
5004 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5005 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5006 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5007 tcg_gen_shri_i64(t0, t0, 32);
5008 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5009 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5010 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5011 }
5012#endif
5013 s->cc_op = CC_OP_MULL;
5014 break;
5015#ifdef TARGET_X86_64
5016 case OT_QUAD:
5017 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
5018 s->cc_op = CC_OP_MULQ;
5019 break;
5020#endif
5021 }
5022 break;
5023 case 5: /* imul */
5024 switch(ot) {
5025 case OT_BYTE:
5026 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5027 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5028 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5029 /* XXX: use 32 bit mul which could be faster */
5030 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5031 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5032 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5033 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5034 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5035 s->cc_op = CC_OP_MULB;
5036 break;
5037 case OT_WORD:
5038 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5039 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5040 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5041 /* XXX: use 32 bit mul which could be faster */
5042 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5043 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5044 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5045 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5046 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5047 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5048 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5049 s->cc_op = CC_OP_MULW;
5050 break;
5051 default:
5052 case OT_LONG:
5053#ifdef TARGET_X86_64
5054 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5055 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5056 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5057 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5058 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5059 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5060 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5061 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5062 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5063 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5064#else
5065 {
5066 TCGv t0, t1;
5067 t0 = tcg_temp_new(TCG_TYPE_I64);
5068 t1 = tcg_temp_new(TCG_TYPE_I64);
5069 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5070 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5071 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5072 tcg_gen_mul_i64(t0, t0, t1);
5073 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5074 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5075 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5076 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5077 tcg_gen_shri_i64(t0, t0, 32);
5078 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5079 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5080 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5081 }
5082#endif
5083 s->cc_op = CC_OP_MULL;
5084 break;
5085#ifdef TARGET_X86_64
5086 case OT_QUAD:
5087 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5088 s->cc_op = CC_OP_MULQ;
5089 break;
5090#endif
5091 }
5092 break;
5093 case 6: /* div */
5094 switch(ot) {
5095 case OT_BYTE:
5096 gen_jmp_im(pc_start - s->cs_base);
5097 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5098 break;
5099 case OT_WORD:
5100 gen_jmp_im(pc_start - s->cs_base);
5101 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5102 break;
5103 default:
5104 case OT_LONG:
5105 gen_jmp_im(pc_start - s->cs_base);
5106 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5107 break;
5108#ifdef TARGET_X86_64
5109 case OT_QUAD:
5110 gen_jmp_im(pc_start - s->cs_base);
5111 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5112 break;
5113#endif
5114 }
5115 break;
5116 case 7: /* idiv */
5117 switch(ot) {
5118 case OT_BYTE:
5119 gen_jmp_im(pc_start - s->cs_base);
5120 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5121 break;
5122 case OT_WORD:
5123 gen_jmp_im(pc_start - s->cs_base);
5124 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5125 break;
5126 default:
5127 case OT_LONG:
5128 gen_jmp_im(pc_start - s->cs_base);
5129 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5130 break;
5131#ifdef TARGET_X86_64
5132 case OT_QUAD:
5133 gen_jmp_im(pc_start - s->cs_base);
5134 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5135 break;
5136#endif
5137 }
5138 break;
5139 default:
5140 goto illegal_op;
5141 }
5142 break;
5143
5144 case 0xfe: /* GRP4 */
5145 case 0xff: /* GRP5 */
5146 if ((b & 1) == 0)
5147 ot = OT_BYTE;
5148 else
5149 ot = dflag + OT_WORD;
5150
5151 modrm = ldub_code(s->pc++);
5152 mod = (modrm >> 6) & 3;
5153 rm = (modrm & 7) | REX_B(s);
5154 op = (modrm >> 3) & 7;
5155 if (op >= 2 && b == 0xfe) {
5156 goto illegal_op;
5157 }
5158 if (CODE64(s)) {
5159 if (op == 2 || op == 4) {
5160 /* operand size for jumps is 64 bit */
5161 ot = OT_QUAD;
5162 } else if (op == 3 || op == 5) {
5163 /* for call calls, the operand is 16 or 32 bit, even
5164 in long mode */
5165 ot = dflag ? OT_LONG : OT_WORD;
5166 } else if (op == 6) {
5167 /* default push size is 64 bit */
5168 ot = dflag ? OT_QUAD : OT_WORD;
5169 }
5170 }
5171 if (mod != 3) {
5172 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5173 if (op >= 2 && op != 3 && op != 5)
5174 gen_op_ld_T0_A0(ot + s->mem_index);
5175 } else {
5176 gen_op_mov_TN_reg(ot, 0, rm);
5177 }
5178
5179 switch(op) {
5180 case 0: /* inc Ev */
5181 if (mod != 3)
5182 opreg = OR_TMP0;
5183 else
5184 opreg = rm;
5185 gen_inc(s, ot, opreg, 1);
5186 break;
5187 case 1: /* dec Ev */
5188 if (mod != 3)
5189 opreg = OR_TMP0;
5190 else
5191 opreg = rm;
5192 gen_inc(s, ot, opreg, -1);
5193 break;
5194 case 2: /* call Ev */
5195 /* XXX: optimize if memory (no 'and' is necessary) */
5196#ifdef VBOX_WITH_CALL_RECORD
5197 if (s->record_call)
5198 gen_op_record_call();
5199#endif
5200 if (s->dflag == 0)
5201 gen_op_andl_T0_ffff();
5202 next_eip = s->pc - s->cs_base;
5203 gen_movtl_T1_im(next_eip);
5204 gen_push_T1(s);
5205 gen_op_jmp_T0();
5206 gen_eob(s);
5207 break;
5208 case 3: /* lcall Ev */
5209 gen_op_ld_T1_A0(ot + s->mem_index);
5210 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5211 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5212 do_lcall:
5213 if (s->pe && !s->vm86) {
5214 if (s->cc_op != CC_OP_DYNAMIC)
5215 gen_op_set_cc_op(s->cc_op);
5216 gen_jmp_im(pc_start - s->cs_base);
5217 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5218 tcg_gen_helper_0_4(helper_lcall_protected,
5219 cpu_tmp2_i32, cpu_T[1],
5220 tcg_const_i32(dflag),
5221 tcg_const_i32(s->pc - pc_start));
5222 } else {
5223 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5224 tcg_gen_helper_0_4(helper_lcall_real,
5225 cpu_tmp2_i32, cpu_T[1],
5226 tcg_const_i32(dflag),
5227 tcg_const_i32(s->pc - s->cs_base));
5228 }
5229 gen_eob(s);
5230 break;
5231 case 4: /* jmp Ev */
5232 if (s->dflag == 0)
5233 gen_op_andl_T0_ffff();
5234 gen_op_jmp_T0();
5235 gen_eob(s);
5236 break;
5237 case 5: /* ljmp Ev */
5238 gen_op_ld_T1_A0(ot + s->mem_index);
5239 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5240 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5241 do_ljmp:
5242 if (s->pe && !s->vm86) {
5243 if (s->cc_op != CC_OP_DYNAMIC)
5244 gen_op_set_cc_op(s->cc_op);
5245 gen_jmp_im(pc_start - s->cs_base);
5246 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5247 tcg_gen_helper_0_3(helper_ljmp_protected,
5248 cpu_tmp2_i32,
5249 cpu_T[1],
5250 tcg_const_i32(s->pc - pc_start));
5251 } else {
5252 gen_op_movl_seg_T0_vm(R_CS);
5253 gen_op_movl_T0_T1();
5254 gen_op_jmp_T0();
5255 }
5256 gen_eob(s);
5257 break;
5258 case 6: /* push Ev */
5259 gen_push_T0(s);
5260 break;
5261 default:
5262 goto illegal_op;
5263 }
5264 break;
5265
5266 case 0x84: /* test Ev, Gv */
5267 case 0x85:
5268 if ((b & 1) == 0)
5269 ot = OT_BYTE;
5270 else
5271 ot = dflag + OT_WORD;
5272
5273 modrm = ldub_code(s->pc++);
5274 mod = (modrm >> 6) & 3;
5275 rm = (modrm & 7) | REX_B(s);
5276 reg = ((modrm >> 3) & 7) | rex_r;
5277
5278 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5279 gen_op_mov_TN_reg(ot, 1, reg);
5280 gen_op_testl_T0_T1_cc();
5281 s->cc_op = CC_OP_LOGICB + ot;
5282 break;
5283
5284 case 0xa8: /* test eAX, Iv */
5285 case 0xa9:
5286 if ((b & 1) == 0)
5287 ot = OT_BYTE;
5288 else
5289 ot = dflag + OT_WORD;
5290 val = insn_get(s, ot);
5291
5292 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5293 gen_op_movl_T1_im(val);
5294 gen_op_testl_T0_T1_cc();
5295 s->cc_op = CC_OP_LOGICB + ot;
5296 break;
5297
5298 case 0x98: /* CWDE/CBW */
5299#ifdef TARGET_X86_64
5300 if (dflag == 2) {
5301 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5302 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5303 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5304 } else
5305#endif
5306 if (dflag == 1) {
5307 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5308 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5309 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5310 } else {
5311 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5312 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5313 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5314 }
5315 break;
5316 case 0x99: /* CDQ/CWD */
5317#ifdef TARGET_X86_64
5318 if (dflag == 2) {
5319 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5320 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5321 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5322 } else
5323#endif
5324 if (dflag == 1) {
5325 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5326 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5327 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5328 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5329 } else {
5330 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5331 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5332 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5333 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5334 }
5335 break;
5336 case 0x1af: /* imul Gv, Ev */
5337 case 0x69: /* imul Gv, Ev, I */
5338 case 0x6b:
5339 ot = dflag + OT_WORD;
5340 modrm = ldub_code(s->pc++);
5341 reg = ((modrm >> 3) & 7) | rex_r;
5342 if (b == 0x69)
5343 s->rip_offset = insn_const_size(ot);
5344 else if (b == 0x6b)
5345 s->rip_offset = 1;
5346 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5347 if (b == 0x69) {
5348 val = insn_get(s, ot);
5349 gen_op_movl_T1_im(val);
5350 } else if (b == 0x6b) {
5351 val = (int8_t)insn_get(s, OT_BYTE);
5352 gen_op_movl_T1_im(val);
5353 } else {
5354 gen_op_mov_TN_reg(ot, 1, reg);
5355 }
5356
5357#ifdef TARGET_X86_64
5358 if (ot == OT_QUAD) {
5359 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5360 } else
5361#endif
5362 if (ot == OT_LONG) {
5363#ifdef TARGET_X86_64
5364 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5365 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5366 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5367 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5368 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5369 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5370#else
5371 {
5372 TCGv t0, t1;
5373 t0 = tcg_temp_new(TCG_TYPE_I64);
5374 t1 = tcg_temp_new(TCG_TYPE_I64);
5375 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5376 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5377 tcg_gen_mul_i64(t0, t0, t1);
5378 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5379 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5380 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5381 tcg_gen_shri_i64(t0, t0, 32);
5382 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5383 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5384 }
5385#endif
5386 } else {
5387 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5388 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5389 /* XXX: use 32 bit mul which could be faster */
5390 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5391 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5392 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5393 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5394 }
5395 gen_op_mov_reg_T0(ot, reg);
5396 s->cc_op = CC_OP_MULB + ot;
5397 break;
5398 case 0x1c0:
5399 case 0x1c1: /* xadd Ev, Gv */
5400 if ((b & 1) == 0)
5401 ot = OT_BYTE;
5402 else
5403 ot = dflag + OT_WORD;
5404 modrm = ldub_code(s->pc++);
5405 reg = ((modrm >> 3) & 7) | rex_r;
5406 mod = (modrm >> 6) & 3;
5407 if (mod == 3) {
5408 rm = (modrm & 7) | REX_B(s);
5409 gen_op_mov_TN_reg(ot, 0, reg);
5410 gen_op_mov_TN_reg(ot, 1, rm);
5411 gen_op_addl_T0_T1();
5412 gen_op_mov_reg_T1(ot, reg);
5413 gen_op_mov_reg_T0(ot, rm);
5414 } else {
5415 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5416 gen_op_mov_TN_reg(ot, 0, reg);
5417 gen_op_ld_T1_A0(ot + s->mem_index);
5418 gen_op_addl_T0_T1();
5419 gen_op_st_T0_A0(ot + s->mem_index);
5420 gen_op_mov_reg_T1(ot, reg);
5421 }
5422 gen_op_update2_cc();
5423 s->cc_op = CC_OP_ADDB + ot;
5424 break;
5425 case 0x1b0:
5426 case 0x1b1: /* cmpxchg Ev, Gv */
5427 {
5428 int label1, label2;
5429 TCGv t0, t1, t2, a0;
5430
5431 if ((b & 1) == 0)
5432 ot = OT_BYTE;
5433 else
5434 ot = dflag + OT_WORD;
5435 modrm = ldub_code(s->pc++);
5436 reg = ((modrm >> 3) & 7) | rex_r;
5437 mod = (modrm >> 6) & 3;
5438 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5439 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5440 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5441 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5442 gen_op_mov_v_reg(ot, t1, reg);
5443 if (mod == 3) {
5444 rm = (modrm & 7) | REX_B(s);
5445 gen_op_mov_v_reg(ot, t0, rm);
5446 } else {
5447 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5448 tcg_gen_mov_tl(a0, cpu_A0);
5449 gen_op_ld_v(ot + s->mem_index, t0, a0);
5450 rm = 0; /* avoid warning */
5451 }
5452 label1 = gen_new_label();
5453 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5454 tcg_gen_sub_tl(t2, t2, t0);
5455 gen_extu(ot, t2);
5456 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5457 if (mod == 3) {
5458 label2 = gen_new_label();
5459 gen_op_mov_reg_v(ot, R_EAX, t0);
5460 tcg_gen_br(label2);
5461 gen_set_label(label1);
5462 gen_op_mov_reg_v(ot, rm, t1);
5463 gen_set_label(label2);
5464 } else {
5465 tcg_gen_mov_tl(t1, t0);
5466 gen_op_mov_reg_v(ot, R_EAX, t0);
5467 gen_set_label(label1);
5468 /* always store */
5469 gen_op_st_v(ot + s->mem_index, t1, a0);
5470 }
5471 tcg_gen_mov_tl(cpu_cc_src, t0);
5472 tcg_gen_mov_tl(cpu_cc_dst, t2);
5473 s->cc_op = CC_OP_SUBB + ot;
5474 tcg_temp_free(t0);
5475 tcg_temp_free(t1);
5476 tcg_temp_free(t2);
5477 tcg_temp_free(a0);
5478 }
5479 break;
5480 case 0x1c7: /* cmpxchg8b */
5481 modrm = ldub_code(s->pc++);
5482 mod = (modrm >> 6) & 3;
5483 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5484 goto illegal_op;
5485#ifdef TARGET_X86_64
5486 if (dflag == 2) {
5487 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5488 goto illegal_op;
5489 gen_jmp_im(pc_start - s->cs_base);
5490 if (s->cc_op != CC_OP_DYNAMIC)
5491 gen_op_set_cc_op(s->cc_op);
5492 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5493 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5494 } else
5495#endif
5496 {
5497 if (!(s->cpuid_features & CPUID_CX8))
5498 goto illegal_op;
5499 gen_jmp_im(pc_start - s->cs_base);
5500 if (s->cc_op != CC_OP_DYNAMIC)
5501 gen_op_set_cc_op(s->cc_op);
5502 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5503 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5504 }
5505 s->cc_op = CC_OP_EFLAGS;
5506 break;
5507
5508 /**************************/
5509 /* push/pop */
5510 case 0x50 ... 0x57: /* push */
5511 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5512 gen_push_T0(s);
5513 break;
5514 case 0x58 ... 0x5f: /* pop */
5515 if (CODE64(s)) {
5516 ot = dflag ? OT_QUAD : OT_WORD;
5517 } else {
5518 ot = dflag + OT_WORD;
5519 }
5520 gen_pop_T0(s);
5521 /* NOTE: order is important for pop %sp */
5522 gen_pop_update(s);
5523 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5524 break;
5525 case 0x60: /* pusha */
5526 if (CODE64(s))
5527 goto illegal_op;
5528 gen_pusha(s);
5529 break;
5530 case 0x61: /* popa */
5531 if (CODE64(s))
5532 goto illegal_op;
5533 gen_popa(s);
5534 break;
5535 case 0x68: /* push Iv */
5536 case 0x6a:
5537 if (CODE64(s)) {
5538 ot = dflag ? OT_QUAD : OT_WORD;
5539 } else {
5540 ot = dflag + OT_WORD;
5541 }
5542 if (b == 0x68)
5543 val = insn_get(s, ot);
5544 else
5545 val = (int8_t)insn_get(s, OT_BYTE);
5546 gen_op_movl_T0_im(val);
5547 gen_push_T0(s);
5548 break;
5549 case 0x8f: /* pop Ev */
5550 if (CODE64(s)) {
5551 ot = dflag ? OT_QUAD : OT_WORD;
5552 } else {
5553 ot = dflag + OT_WORD;
5554 }
5555 modrm = ldub_code(s->pc++);
5556 mod = (modrm >> 6) & 3;
5557 gen_pop_T0(s);
5558 if (mod == 3) {
5559 /* NOTE: order is important for pop %sp */
5560 gen_pop_update(s);
5561 rm = (modrm & 7) | REX_B(s);
5562 gen_op_mov_reg_T0(ot, rm);
5563 } else {
5564 /* NOTE: order is important too for MMU exceptions */
5565 s->popl_esp_hack = 1 << ot;
5566 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5567 s->popl_esp_hack = 0;
5568 gen_pop_update(s);
5569 }
5570 break;
5571 case 0xc8: /* enter */
5572 {
5573 int level;
5574 val = lduw_code(s->pc);
5575 s->pc += 2;
5576 level = ldub_code(s->pc++);
5577 gen_enter(s, val, level);
5578 }
5579 break;
5580 case 0xc9: /* leave */
5581 /* XXX: exception not precise (ESP is updated before potential exception) */
5582 if (CODE64(s)) {
5583 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5584 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5585 } else if (s->ss32) {
5586 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5587 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5588 } else {
5589 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5590 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5591 }
5592 gen_pop_T0(s);
5593 if (CODE64(s)) {
5594 ot = dflag ? OT_QUAD : OT_WORD;
5595 } else {
5596 ot = dflag + OT_WORD;
5597 }
5598 gen_op_mov_reg_T0(ot, R_EBP);
5599 gen_pop_update(s);
5600 break;
5601 case 0x06: /* push es */
5602 case 0x0e: /* push cs */
5603 case 0x16: /* push ss */
5604 case 0x1e: /* push ds */
5605 if (CODE64(s))
5606 goto illegal_op;
5607 gen_op_movl_T0_seg(b >> 3);
5608 gen_push_T0(s);
5609 break;
5610 case 0x1a0: /* push fs */
5611 case 0x1a8: /* push gs */
5612 gen_op_movl_T0_seg((b >> 3) & 7);
5613 gen_push_T0(s);
5614 break;
5615 case 0x07: /* pop es */
5616 case 0x17: /* pop ss */
5617 case 0x1f: /* pop ds */
5618 if (CODE64(s))
5619 goto illegal_op;
5620 reg = b >> 3;
5621 gen_pop_T0(s);
5622 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5623 gen_pop_update(s);
5624 if (reg == R_SS) {
5625 /* if reg == SS, inhibit interrupts/trace. */
5626 /* If several instructions disable interrupts, only the
5627 _first_ does it */
5628 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5629 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5630 s->tf = 0;
5631 }
5632 if (s->is_jmp) {
5633 gen_jmp_im(s->pc - s->cs_base);
5634 gen_eob(s);
5635 }
5636 break;
5637 case 0x1a1: /* pop fs */
5638 case 0x1a9: /* pop gs */
5639 gen_pop_T0(s);
5640 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5641 gen_pop_update(s);
5642 if (s->is_jmp) {
5643 gen_jmp_im(s->pc - s->cs_base);
5644 gen_eob(s);
5645 }
5646 break;
5647
5648 /**************************/
5649 /* mov */
5650 case 0x88:
5651 case 0x89: /* mov Gv, Ev */
5652 if ((b & 1) == 0)
5653 ot = OT_BYTE;
5654 else
5655 ot = dflag + OT_WORD;
5656 modrm = ldub_code(s->pc++);
5657 reg = ((modrm >> 3) & 7) | rex_r;
5658
5659 /* generate a generic store */
5660 gen_ldst_modrm(s, modrm, ot, reg, 1);
5661 break;
5662 case 0xc6:
5663 case 0xc7: /* mov Ev, Iv */
5664 if ((b & 1) == 0)
5665 ot = OT_BYTE;
5666 else
5667 ot = dflag + OT_WORD;
5668 modrm = ldub_code(s->pc++);
5669 mod = (modrm >> 6) & 3;
5670 if (mod != 3) {
5671 s->rip_offset = insn_const_size(ot);
5672 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5673 }
5674 val = insn_get(s, ot);
5675 gen_op_movl_T0_im(val);
5676 if (mod != 3)
5677 gen_op_st_T0_A0(ot + s->mem_index);
5678 else
5679 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5680 break;
5681 case 0x8a:
5682 case 0x8b: /* mov Ev, Gv */
5683#ifdef VBOX /* dtrace hot fix */
5684 if (prefixes & PREFIX_LOCK)
5685 goto illegal_op;
5686#endif
5687 if ((b & 1) == 0)
5688 ot = OT_BYTE;
5689 else
5690 ot = OT_WORD + dflag;
5691 modrm = ldub_code(s->pc++);
5692 reg = ((modrm >> 3) & 7) | rex_r;
5693
5694 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5695 gen_op_mov_reg_T0(ot, reg);
5696 break;
5697 case 0x8e: /* mov seg, Gv */
5698 modrm = ldub_code(s->pc++);
5699 reg = (modrm >> 3) & 7;
5700 if (reg >= 6 || reg == R_CS)
5701 goto illegal_op;
5702 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5703 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5704 if (reg == R_SS) {
5705 /* if reg == SS, inhibit interrupts/trace */
5706 /* If several instructions disable interrupts, only the
5707 _first_ does it */
5708 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5709 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5710 s->tf = 0;
5711 }
5712 if (s->is_jmp) {
5713 gen_jmp_im(s->pc - s->cs_base);
5714 gen_eob(s);
5715 }
5716 break;
5717 case 0x8c: /* mov Gv, seg */
5718 modrm = ldub_code(s->pc++);
5719 reg = (modrm >> 3) & 7;
5720 mod = (modrm >> 6) & 3;
5721 if (reg >= 6)
5722 goto illegal_op;
5723 gen_op_movl_T0_seg(reg);
5724 if (mod == 3)
5725 ot = OT_WORD + dflag;
5726 else
5727 ot = OT_WORD;
5728 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5729 break;
5730
5731 case 0x1b6: /* movzbS Gv, Eb */
5732 case 0x1b7: /* movzwS Gv, Eb */
5733 case 0x1be: /* movsbS Gv, Eb */
5734 case 0x1bf: /* movswS Gv, Eb */
5735 {
5736 int d_ot;
5737 /* d_ot is the size of destination */
5738 d_ot = dflag + OT_WORD;
5739 /* ot is the size of source */
5740 ot = (b & 1) + OT_BYTE;
5741 modrm = ldub_code(s->pc++);
5742 reg = ((modrm >> 3) & 7) | rex_r;
5743 mod = (modrm >> 6) & 3;
5744 rm = (modrm & 7) | REX_B(s);
5745
5746 if (mod == 3) {
5747 gen_op_mov_TN_reg(ot, 0, rm);
5748 switch(ot | (b & 8)) {
5749 case OT_BYTE:
5750 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5751 break;
5752 case OT_BYTE | 8:
5753 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5754 break;
5755 case OT_WORD:
5756 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5757 break;
5758 default:
5759 case OT_WORD | 8:
5760 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5761 break;
5762 }
5763 gen_op_mov_reg_T0(d_ot, reg);
5764 } else {
5765 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5766 if (b & 8) {
5767 gen_op_lds_T0_A0(ot + s->mem_index);
5768 } else {
5769 gen_op_ldu_T0_A0(ot + s->mem_index);
5770 }
5771 gen_op_mov_reg_T0(d_ot, reg);
5772 }
5773 }
5774 break;
5775
5776 case 0x8d: /* lea */
5777 ot = dflag + OT_WORD;
5778 modrm = ldub_code(s->pc++);
5779 mod = (modrm >> 6) & 3;
5780 if (mod == 3)
5781 goto illegal_op;
5782 reg = ((modrm >> 3) & 7) | rex_r;
5783 /* we must ensure that no segment is added */
5784 s->override = -1;
5785 val = s->addseg;
5786 s->addseg = 0;
5787 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5788 s->addseg = val;
5789 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5790 break;
5791
5792 case 0xa0: /* mov EAX, Ov */
5793 case 0xa1:
5794 case 0xa2: /* mov Ov, EAX */
5795 case 0xa3:
5796 {
5797 target_ulong offset_addr;
5798
5799 if ((b & 1) == 0)
5800 ot = OT_BYTE;
5801 else
5802 ot = dflag + OT_WORD;
5803#ifdef TARGET_X86_64
5804 if (s->aflag == 2) {
5805 offset_addr = ldq_code(s->pc);
5806 s->pc += 8;
5807 gen_op_movq_A0_im(offset_addr);
5808 } else
5809#endif
5810 {
5811 if (s->aflag) {
5812 offset_addr = insn_get(s, OT_LONG);
5813 } else {
5814 offset_addr = insn_get(s, OT_WORD);
5815 }
5816 gen_op_movl_A0_im(offset_addr);
5817 }
5818 gen_add_A0_ds_seg(s);
5819 if ((b & 2) == 0) {
5820 gen_op_ld_T0_A0(ot + s->mem_index);
5821 gen_op_mov_reg_T0(ot, R_EAX);
5822 } else {
5823 gen_op_mov_TN_reg(ot, 0, R_EAX);
5824 gen_op_st_T0_A0(ot + s->mem_index);
5825 }
5826 }
5827 break;
5828 case 0xd7: /* xlat */
5829#ifdef TARGET_X86_64
5830 if (s->aflag == 2) {
5831 gen_op_movq_A0_reg(R_EBX);
5832 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5833 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5834 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5835 } else
5836#endif
5837 {
5838 gen_op_movl_A0_reg(R_EBX);
5839 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5840 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5841 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5842 if (s->aflag == 0)
5843 gen_op_andl_A0_ffff();
5844 else
5845 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5846 }
5847 gen_add_A0_ds_seg(s);
5848 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5849 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5850 break;
5851 case 0xb0 ... 0xb7: /* mov R, Ib */
5852 val = insn_get(s, OT_BYTE);
5853 gen_op_movl_T0_im(val);
5854 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5855 break;
5856 case 0xb8 ... 0xbf: /* mov R, Iv */
5857#ifdef TARGET_X86_64
5858 if (dflag == 2) {
5859 uint64_t tmp;
5860 /* 64 bit case */
5861 tmp = ldq_code(s->pc);
5862 s->pc += 8;
5863 reg = (b & 7) | REX_B(s);
5864 gen_movtl_T0_im(tmp);
5865 gen_op_mov_reg_T0(OT_QUAD, reg);
5866 } else
5867#endif
5868 {
5869 ot = dflag ? OT_LONG : OT_WORD;
5870 val = insn_get(s, ot);
5871 reg = (b & 7) | REX_B(s);
5872 gen_op_movl_T0_im(val);
5873 gen_op_mov_reg_T0(ot, reg);
5874 }
5875 break;
5876
5877 case 0x91 ... 0x97: /* xchg R, EAX */
5878 ot = dflag + OT_WORD;
5879 reg = (b & 7) | REX_B(s);
5880 rm = R_EAX;
5881 goto do_xchg_reg;
5882 case 0x86:
5883 case 0x87: /* xchg Ev, Gv */
5884 if ((b & 1) == 0)
5885 ot = OT_BYTE;
5886 else
5887 ot = dflag + OT_WORD;
5888 modrm = ldub_code(s->pc++);
5889 reg = ((modrm >> 3) & 7) | rex_r;
5890 mod = (modrm >> 6) & 3;
5891 if (mod == 3) {
5892 rm = (modrm & 7) | REX_B(s);
5893 do_xchg_reg:
5894 gen_op_mov_TN_reg(ot, 0, reg);
5895 gen_op_mov_TN_reg(ot, 1, rm);
5896 gen_op_mov_reg_T0(ot, rm);
5897 gen_op_mov_reg_T1(ot, reg);
5898 } else {
5899 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5900 gen_op_mov_TN_reg(ot, 0, reg);
5901 /* for xchg, lock is implicit */
5902 if (!(prefixes & PREFIX_LOCK))
5903 tcg_gen_helper_0_0(helper_lock);
5904 gen_op_ld_T1_A0(ot + s->mem_index);
5905 gen_op_st_T0_A0(ot + s->mem_index);
5906 if (!(prefixes & PREFIX_LOCK))
5907 tcg_gen_helper_0_0(helper_unlock);
5908 gen_op_mov_reg_T1(ot, reg);
5909 }
5910 break;
5911 case 0xc4: /* les Gv */
5912 if (CODE64(s))
5913 goto illegal_op;
5914 op = R_ES;
5915 goto do_lxx;
5916 case 0xc5: /* lds Gv */
5917 if (CODE64(s))
5918 goto illegal_op;
5919 op = R_DS;
5920 goto do_lxx;
5921 case 0x1b2: /* lss Gv */
5922 op = R_SS;
5923 goto do_lxx;
5924 case 0x1b4: /* lfs Gv */
5925 op = R_FS;
5926 goto do_lxx;
5927 case 0x1b5: /* lgs Gv */
5928 op = R_GS;
5929 do_lxx:
5930 ot = dflag ? OT_LONG : OT_WORD;
5931 modrm = ldub_code(s->pc++);
5932 reg = ((modrm >> 3) & 7) | rex_r;
5933 mod = (modrm >> 6) & 3;
5934 if (mod == 3)
5935 goto illegal_op;
5936 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5937 gen_op_ld_T1_A0(ot + s->mem_index);
5938 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5939 /* load the segment first to handle exceptions properly */
5940 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5941 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5942 /* then put the data */
5943 gen_op_mov_reg_T1(ot, reg);
5944 if (s->is_jmp) {
5945 gen_jmp_im(s->pc - s->cs_base);
5946 gen_eob(s);
5947 }
5948 break;
5949
5950 /************************/
5951 /* shifts */
5952 case 0xc0:
5953 case 0xc1:
5954 /* shift Ev,Ib */
5955 shift = 2;
5956 grp2:
5957 {
5958 if ((b & 1) == 0)
5959 ot = OT_BYTE;
5960 else
5961 ot = dflag + OT_WORD;
5962
5963 modrm = ldub_code(s->pc++);
5964 mod = (modrm >> 6) & 3;
5965 op = (modrm >> 3) & 7;
5966
5967 if (mod != 3) {
5968 if (shift == 2) {
5969 s->rip_offset = 1;
5970 }
5971 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5972 opreg = OR_TMP0;
5973 } else {
5974 opreg = (modrm & 7) | REX_B(s);
5975 }
5976
5977 /* simpler op */
5978 if (shift == 0) {
5979 gen_shift(s, op, ot, opreg, OR_ECX);
5980 } else {
5981 if (shift == 2) {
5982 shift = ldub_code(s->pc++);
5983 }
5984 gen_shifti(s, op, ot, opreg, shift);
5985 }
5986 }
5987 break;
5988 case 0xd0:
5989 case 0xd1:
5990 /* shift Ev,1 */
5991 shift = 1;
5992 goto grp2;
5993 case 0xd2:
5994 case 0xd3:
5995 /* shift Ev,cl */
5996 shift = 0;
5997 goto grp2;
5998
5999 case 0x1a4: /* shld imm */
6000 op = 0;
6001 shift = 1;
6002 goto do_shiftd;
6003 case 0x1a5: /* shld cl */
6004 op = 0;
6005 shift = 0;
6006 goto do_shiftd;
6007 case 0x1ac: /* shrd imm */
6008 op = 1;
6009 shift = 1;
6010 goto do_shiftd;
6011 case 0x1ad: /* shrd cl */
6012 op = 1;
6013 shift = 0;
6014 do_shiftd:
6015 ot = dflag + OT_WORD;
6016 modrm = ldub_code(s->pc++);
6017 mod = (modrm >> 6) & 3;
6018 rm = (modrm & 7) | REX_B(s);
6019 reg = ((modrm >> 3) & 7) | rex_r;
6020 if (mod != 3) {
6021 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6022 opreg = OR_TMP0;
6023 } else {
6024 opreg = rm;
6025 }
6026 gen_op_mov_TN_reg(ot, 1, reg);
6027
6028 if (shift) {
6029 val = ldub_code(s->pc++);
6030 tcg_gen_movi_tl(cpu_T3, val);
6031 } else {
6032 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
6033 }
6034 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
6035 break;
6036
6037 /************************/
6038 /* floats */
6039 case 0xd8 ... 0xdf:
6040 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6041 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6042 /* XXX: what to do if illegal op ? */
6043 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6044 break;
6045 }
6046 modrm = ldub_code(s->pc++);
6047 mod = (modrm >> 6) & 3;
6048 rm = modrm & 7;
6049 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
6050 if (mod != 3) {
6051 /* memory op */
6052 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6053 switch(op) {
6054 case 0x00 ... 0x07: /* fxxxs */
6055 case 0x10 ... 0x17: /* fixxxl */
6056 case 0x20 ... 0x27: /* fxxxl */
6057 case 0x30 ... 0x37: /* fixxx */
6058 {
6059 int op1;
6060 op1 = op & 7;
6061
6062 switch(op >> 4) {
6063 case 0:
6064 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6065 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6066 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
6067 break;
6068 case 1:
6069 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6070 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6071 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6072 break;
6073 case 2:
6074 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6075 (s->mem_index >> 2) - 1);
6076 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6077 break;
6078 case 3:
6079 default:
6080 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6081 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6082 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6083 break;
6084 }
6085
6086 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6087 if (op1 == 3) {
6088 /* fcomp needs pop */
6089 tcg_gen_helper_0_0(helper_fpop);
6090 }
6091 }
6092 break;
6093 case 0x08: /* flds */
6094 case 0x0a: /* fsts */
6095 case 0x0b: /* fstps */
6096 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6097 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6098 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6099 switch(op & 7) {
6100 case 0:
6101 switch(op >> 4) {
6102 case 0:
6103 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6104 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6105 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6106 break;
6107 case 1:
6108 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6109 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6110 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6111 break;
6112 case 2:
6113 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6114 (s->mem_index >> 2) - 1);
6115 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6116 break;
6117 case 3:
6118 default:
6119 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6120 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6121 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6122 break;
6123 }
6124 break;
6125 case 1:
6126 /* XXX: the corresponding CPUID bit must be tested ! */
6127 switch(op >> 4) {
6128 case 1:
6129 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6130 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6131 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6132 break;
6133 case 2:
6134 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6135 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6136 (s->mem_index >> 2) - 1);
6137 break;
6138 case 3:
6139 default:
6140 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6141 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6142 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6143 break;
6144 }
6145 tcg_gen_helper_0_0(helper_fpop);
6146 break;
6147 default:
6148 switch(op >> 4) {
6149 case 0:
6150 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6151 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6152 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6153 break;
6154 case 1:
6155 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6156 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6157 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6158 break;
6159 case 2:
6160 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6161 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6162 (s->mem_index >> 2) - 1);
6163 break;
6164 case 3:
6165 default:
6166 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6167 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6168 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6169 break;
6170 }
6171 if ((op & 7) == 3)
6172 tcg_gen_helper_0_0(helper_fpop);
6173 break;
6174 }
6175 break;
6176 case 0x0c: /* fldenv mem */
6177 if (s->cc_op != CC_OP_DYNAMIC)
6178 gen_op_set_cc_op(s->cc_op);
6179 gen_jmp_im(pc_start - s->cs_base);
6180 tcg_gen_helper_0_2(helper_fldenv,
6181 cpu_A0, tcg_const_i32(s->dflag));
6182 break;
6183 case 0x0d: /* fldcw mem */
6184 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6185 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6186 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6187 break;
6188 case 0x0e: /* fnstenv mem */
6189 if (s->cc_op != CC_OP_DYNAMIC)
6190 gen_op_set_cc_op(s->cc_op);
6191 gen_jmp_im(pc_start - s->cs_base);
6192 tcg_gen_helper_0_2(helper_fstenv,
6193 cpu_A0, tcg_const_i32(s->dflag));
6194 break;
6195 case 0x0f: /* fnstcw mem */
6196 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6197 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6198 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6199 break;
6200 case 0x1d: /* fldt mem */
6201 if (s->cc_op != CC_OP_DYNAMIC)
6202 gen_op_set_cc_op(s->cc_op);
6203 gen_jmp_im(pc_start - s->cs_base);
6204 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6205 break;
6206 case 0x1f: /* fstpt mem */
6207 if (s->cc_op != CC_OP_DYNAMIC)
6208 gen_op_set_cc_op(s->cc_op);
6209 gen_jmp_im(pc_start - s->cs_base);
6210 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6211 tcg_gen_helper_0_0(helper_fpop);
6212 break;
6213 case 0x2c: /* frstor mem */
6214 if (s->cc_op != CC_OP_DYNAMIC)
6215 gen_op_set_cc_op(s->cc_op);
6216 gen_jmp_im(pc_start - s->cs_base);
6217 tcg_gen_helper_0_2(helper_frstor,
6218 cpu_A0, tcg_const_i32(s->dflag));
6219 break;
6220 case 0x2e: /* fnsave mem */
6221 if (s->cc_op != CC_OP_DYNAMIC)
6222 gen_op_set_cc_op(s->cc_op);
6223 gen_jmp_im(pc_start - s->cs_base);
6224 tcg_gen_helper_0_2(helper_fsave,
6225 cpu_A0, tcg_const_i32(s->dflag));
6226 break;
6227 case 0x2f: /* fnstsw mem */
6228 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6229 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6230 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6231 break;
6232 case 0x3c: /* fbld */
6233 if (s->cc_op != CC_OP_DYNAMIC)
6234 gen_op_set_cc_op(s->cc_op);
6235 gen_jmp_im(pc_start - s->cs_base);
6236 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6237 break;
6238 case 0x3e: /* fbstp */
6239 if (s->cc_op != CC_OP_DYNAMIC)
6240 gen_op_set_cc_op(s->cc_op);
6241 gen_jmp_im(pc_start - s->cs_base);
6242 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6243 tcg_gen_helper_0_0(helper_fpop);
6244 break;
6245 case 0x3d: /* fildll */
6246 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6247 (s->mem_index >> 2) - 1);
6248 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6249 break;
6250 case 0x3f: /* fistpll */
6251 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6252 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6253 (s->mem_index >> 2) - 1);
6254 tcg_gen_helper_0_0(helper_fpop);
6255 break;
6256 default:
6257 goto illegal_op;
6258 }
6259 } else {
6260 /* register float ops */
6261 opreg = rm;
6262
6263 switch(op) {
6264 case 0x08: /* fld sti */
6265 tcg_gen_helper_0_0(helper_fpush);
6266 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6267 break;
6268 case 0x09: /* fxchg sti */
6269 case 0x29: /* fxchg4 sti, undocumented op */
6270 case 0x39: /* fxchg7 sti, undocumented op */
6271 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6272 break;
6273 case 0x0a: /* grp d9/2 */
6274 switch(rm) {
6275 case 0: /* fnop */
6276 /* check exceptions (FreeBSD FPU probe) */
6277 if (s->cc_op != CC_OP_DYNAMIC)
6278 gen_op_set_cc_op(s->cc_op);
6279 gen_jmp_im(pc_start - s->cs_base);
6280 tcg_gen_helper_0_0(helper_fwait);
6281 break;
6282 default:
6283 goto illegal_op;
6284 }
6285 break;
6286 case 0x0c: /* grp d9/4 */
6287 switch(rm) {
6288 case 0: /* fchs */
6289 tcg_gen_helper_0_0(helper_fchs_ST0);
6290 break;
6291 case 1: /* fabs */
6292 tcg_gen_helper_0_0(helper_fabs_ST0);
6293 break;
6294 case 4: /* ftst */
6295 tcg_gen_helper_0_0(helper_fldz_FT0);
6296 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6297 break;
6298 case 5: /* fxam */
6299 tcg_gen_helper_0_0(helper_fxam_ST0);
6300 break;
6301 default:
6302 goto illegal_op;
6303 }
6304 break;
6305 case 0x0d: /* grp d9/5 */
6306 {
6307 switch(rm) {
6308 case 0:
6309 tcg_gen_helper_0_0(helper_fpush);
6310 tcg_gen_helper_0_0(helper_fld1_ST0);
6311 break;
6312 case 1:
6313 tcg_gen_helper_0_0(helper_fpush);
6314 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6315 break;
6316 case 2:
6317 tcg_gen_helper_0_0(helper_fpush);
6318 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6319 break;
6320 case 3:
6321 tcg_gen_helper_0_0(helper_fpush);
6322 tcg_gen_helper_0_0(helper_fldpi_ST0);
6323 break;
6324 case 4:
6325 tcg_gen_helper_0_0(helper_fpush);
6326 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6327 break;
6328 case 5:
6329 tcg_gen_helper_0_0(helper_fpush);
6330 tcg_gen_helper_0_0(helper_fldln2_ST0);
6331 break;
6332 case 6:
6333 tcg_gen_helper_0_0(helper_fpush);
6334 tcg_gen_helper_0_0(helper_fldz_ST0);
6335 break;
6336 default:
6337 goto illegal_op;
6338 }
6339 }
6340 break;
6341 case 0x0e: /* grp d9/6 */
6342 switch(rm) {
6343 case 0: /* f2xm1 */
6344 tcg_gen_helper_0_0(helper_f2xm1);
6345 break;
6346 case 1: /* fyl2x */
6347 tcg_gen_helper_0_0(helper_fyl2x);
6348 break;
6349 case 2: /* fptan */
6350 tcg_gen_helper_0_0(helper_fptan);
6351 break;
6352 case 3: /* fpatan */
6353 tcg_gen_helper_0_0(helper_fpatan);
6354 break;
6355 case 4: /* fxtract */
6356 tcg_gen_helper_0_0(helper_fxtract);
6357 break;
6358 case 5: /* fprem1 */
6359 tcg_gen_helper_0_0(helper_fprem1);
6360 break;
6361 case 6: /* fdecstp */
6362 tcg_gen_helper_0_0(helper_fdecstp);
6363 break;
6364 default:
6365 case 7: /* fincstp */
6366 tcg_gen_helper_0_0(helper_fincstp);
6367 break;
6368 }
6369 break;
6370 case 0x0f: /* grp d9/7 */
6371 switch(rm) {
6372 case 0: /* fprem */
6373 tcg_gen_helper_0_0(helper_fprem);
6374 break;
6375 case 1: /* fyl2xp1 */
6376 tcg_gen_helper_0_0(helper_fyl2xp1);
6377 break;
6378 case 2: /* fsqrt */
6379 tcg_gen_helper_0_0(helper_fsqrt);
6380 break;
6381 case 3: /* fsincos */
6382 tcg_gen_helper_0_0(helper_fsincos);
6383 break;
6384 case 5: /* fscale */
6385 tcg_gen_helper_0_0(helper_fscale);
6386 break;
6387 case 4: /* frndint */
6388 tcg_gen_helper_0_0(helper_frndint);
6389 break;
6390 case 6: /* fsin */
6391 tcg_gen_helper_0_0(helper_fsin);
6392 break;
6393 default:
6394 case 7: /* fcos */
6395 tcg_gen_helper_0_0(helper_fcos);
6396 break;
6397 }
6398 break;
6399 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6400 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6401 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6402 {
6403 int op1;
6404
6405 op1 = op & 7;
6406 if (op >= 0x20) {
6407 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6408 if (op >= 0x30)
6409 tcg_gen_helper_0_0(helper_fpop);
6410 } else {
6411 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6412 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6413 }
6414 }
6415 break;
6416 case 0x02: /* fcom */
6417 case 0x22: /* fcom2, undocumented op */
6418 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6419 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6420 break;
6421 case 0x03: /* fcomp */
6422 case 0x23: /* fcomp3, undocumented op */
6423 case 0x32: /* fcomp5, undocumented op */
6424 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6425 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6426 tcg_gen_helper_0_0(helper_fpop);
6427 break;
6428 case 0x15: /* da/5 */
6429 switch(rm) {
6430 case 1: /* fucompp */
6431 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6432 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6433 tcg_gen_helper_0_0(helper_fpop);
6434 tcg_gen_helper_0_0(helper_fpop);
6435 break;
6436 default:
6437 goto illegal_op;
6438 }
6439 break;
6440 case 0x1c:
6441 switch(rm) {
6442 case 0: /* feni (287 only, just do nop here) */
6443 break;
6444 case 1: /* fdisi (287 only, just do nop here) */
6445 break;
6446 case 2: /* fclex */
6447 tcg_gen_helper_0_0(helper_fclex);
6448 break;
6449 case 3: /* fninit */
6450 tcg_gen_helper_0_0(helper_fninit);
6451 break;
6452 case 4: /* fsetpm (287 only, just do nop here) */
6453 break;
6454 default:
6455 goto illegal_op;
6456 }
6457 break;
6458 case 0x1d: /* fucomi */
6459 if (s->cc_op != CC_OP_DYNAMIC)
6460 gen_op_set_cc_op(s->cc_op);
6461 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6462 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6463 s->cc_op = CC_OP_EFLAGS;
6464 break;
6465 case 0x1e: /* fcomi */
6466 if (s->cc_op != CC_OP_DYNAMIC)
6467 gen_op_set_cc_op(s->cc_op);
6468 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6469 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6470 s->cc_op = CC_OP_EFLAGS;
6471 break;
6472 case 0x28: /* ffree sti */
6473 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6474 break;
6475 case 0x2a: /* fst sti */
6476 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6477 break;
6478 case 0x2b: /* fstp sti */
6479 case 0x0b: /* fstp1 sti, undocumented op */
6480 case 0x3a: /* fstp8 sti, undocumented op */
6481 case 0x3b: /* fstp9 sti, undocumented op */
6482 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6483 tcg_gen_helper_0_0(helper_fpop);
6484 break;
6485 case 0x2c: /* fucom st(i) */
6486 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6487 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6488 break;
6489 case 0x2d: /* fucomp st(i) */
6490 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6491 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6492 tcg_gen_helper_0_0(helper_fpop);
6493 break;
6494 case 0x33: /* de/3 */
6495 switch(rm) {
6496 case 1: /* fcompp */
6497 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6498 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6499 tcg_gen_helper_0_0(helper_fpop);
6500 tcg_gen_helper_0_0(helper_fpop);
6501 break;
6502 default:
6503 goto illegal_op;
6504 }
6505 break;
6506 case 0x38: /* ffreep sti, undocumented op */
6507 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6508 tcg_gen_helper_0_0(helper_fpop);
6509 break;
6510 case 0x3c: /* df/4 */
6511 switch(rm) {
6512 case 0:
6513 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6514 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6515 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6516 break;
6517 default:
6518 goto illegal_op;
6519 }
6520 break;
6521 case 0x3d: /* fucomip */
6522 if (s->cc_op != CC_OP_DYNAMIC)
6523 gen_op_set_cc_op(s->cc_op);
6524 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6525 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6526 tcg_gen_helper_0_0(helper_fpop);
6527 s->cc_op = CC_OP_EFLAGS;
6528 break;
6529 case 0x3e: /* fcomip */
6530 if (s->cc_op != CC_OP_DYNAMIC)
6531 gen_op_set_cc_op(s->cc_op);
6532 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6533 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6534 tcg_gen_helper_0_0(helper_fpop);
6535 s->cc_op = CC_OP_EFLAGS;
6536 break;
6537 case 0x10 ... 0x13: /* fcmovxx */
6538 case 0x18 ... 0x1b:
6539 {
6540 int op1, l1;
6541 static const uint8_t fcmov_cc[8] = {
6542 (JCC_B << 1),
6543 (JCC_Z << 1),
6544 (JCC_BE << 1),
6545 (JCC_P << 1),
6546 };
6547 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6548 l1 = gen_new_label();
6549 gen_jcc1(s, s->cc_op, op1, l1);
6550 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6551 gen_set_label(l1);
6552 }
6553 break;
6554 default:
6555 goto illegal_op;
6556 }
6557 }
6558 break;
6559 /************************/
6560 /* string ops */
6561
6562 case 0xa4: /* movsS */
6563 case 0xa5:
6564 if ((b & 1) == 0)
6565 ot = OT_BYTE;
6566 else
6567 ot = dflag + OT_WORD;
6568
6569 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6570 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6571 } else {
6572 gen_movs(s, ot);
6573 }
6574 break;
6575
6576 case 0xaa: /* stosS */
6577 case 0xab:
6578 if ((b & 1) == 0)
6579 ot = OT_BYTE;
6580 else
6581 ot = dflag + OT_WORD;
6582
6583 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6584 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6585 } else {
6586 gen_stos(s, ot);
6587 }
6588 break;
6589 case 0xac: /* lodsS */
6590 case 0xad:
6591 if ((b & 1) == 0)
6592 ot = OT_BYTE;
6593 else
6594 ot = dflag + OT_WORD;
6595 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6596 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6597 } else {
6598 gen_lods(s, ot);
6599 }
6600 break;
6601 case 0xae: /* scasS */
6602 case 0xaf:
6603 if ((b & 1) == 0)
6604 ot = OT_BYTE;
6605 else
6606 ot = dflag + OT_WORD;
6607 if (prefixes & PREFIX_REPNZ) {
6608 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6609 } else if (prefixes & PREFIX_REPZ) {
6610 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6611 } else {
6612 gen_scas(s, ot);
6613 s->cc_op = CC_OP_SUBB + ot;
6614 }
6615 break;
6616
6617 case 0xa6: /* cmpsS */
6618 case 0xa7:
6619 if ((b & 1) == 0)
6620 ot = OT_BYTE;
6621 else
6622 ot = dflag + OT_WORD;
6623 if (prefixes & PREFIX_REPNZ) {
6624 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6625 } else if (prefixes & PREFIX_REPZ) {
6626 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6627 } else {
6628 gen_cmps(s, ot);
6629 s->cc_op = CC_OP_SUBB + ot;
6630 }
6631 break;
6632 case 0x6c: /* insS */
6633 case 0x6d:
6634 if ((b & 1) == 0)
6635 ot = OT_BYTE;
6636 else
6637 ot = dflag ? OT_LONG : OT_WORD;
6638 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6639 gen_op_andl_T0_ffff();
6640 gen_check_io(s, ot, pc_start - s->cs_base,
6641 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6642 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6643 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6644 } else {
6645 gen_ins(s, ot);
6646 if (use_icount) {
6647 gen_jmp(s, s->pc - s->cs_base);
6648 }
6649 }
6650 break;
6651 case 0x6e: /* outsS */
6652 case 0x6f:
6653 if ((b & 1) == 0)
6654 ot = OT_BYTE;
6655 else
6656 ot = dflag ? OT_LONG : OT_WORD;
6657 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6658 gen_op_andl_T0_ffff();
6659 gen_check_io(s, ot, pc_start - s->cs_base,
6660 svm_is_rep(prefixes) | 4);
6661 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6662 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6663 } else {
6664 gen_outs(s, ot);
6665 if (use_icount) {
6666 gen_jmp(s, s->pc - s->cs_base);
6667 }
6668 }
6669 break;
6670
6671 /************************/
6672 /* port I/O */
6673
6674 case 0xe4:
6675 case 0xe5:
6676 if ((b & 1) == 0)
6677 ot = OT_BYTE;
6678 else
6679 ot = dflag ? OT_LONG : OT_WORD;
6680 val = ldub_code(s->pc++);
6681 gen_op_movl_T0_im(val);
6682 gen_check_io(s, ot, pc_start - s->cs_base,
6683 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6684 if (use_icount)
6685 gen_io_start();
6686 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6687 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6688 gen_op_mov_reg_T1(ot, R_EAX);
6689 if (use_icount) {
6690 gen_io_end();
6691 gen_jmp(s, s->pc - s->cs_base);
6692 }
6693 break;
6694 case 0xe6:
6695 case 0xe7:
6696 if ((b & 1) == 0)
6697 ot = OT_BYTE;
6698 else
6699 ot = dflag ? OT_LONG : OT_WORD;
6700 val = ldub_code(s->pc++);
6701 gen_op_movl_T0_im(val);
6702 gen_check_io(s, ot, pc_start - s->cs_base,
6703 svm_is_rep(prefixes));
6704#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6705 if (val == 0x80)
6706 break;
6707#endif /* VBOX */
6708 gen_op_mov_TN_reg(ot, 1, R_EAX);
6709
6710 if (use_icount)
6711 gen_io_start();
6712 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6713 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6714 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6715 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6716 if (use_icount) {
6717 gen_io_end();
6718 gen_jmp(s, s->pc - s->cs_base);
6719 }
6720 break;
6721 case 0xec:
6722 case 0xed:
6723 if ((b & 1) == 0)
6724 ot = OT_BYTE;
6725 else
6726 ot = dflag ? OT_LONG : OT_WORD;
6727 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6728 gen_op_andl_T0_ffff();
6729 gen_check_io(s, ot, pc_start - s->cs_base,
6730 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6731 if (use_icount)
6732 gen_io_start();
6733 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6734 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6735 gen_op_mov_reg_T1(ot, R_EAX);
6736 if (use_icount) {
6737 gen_io_end();
6738 gen_jmp(s, s->pc - s->cs_base);
6739 }
6740 break;
6741 case 0xee:
6742 case 0xef:
6743 if ((b & 1) == 0)
6744 ot = OT_BYTE;
6745 else
6746 ot = dflag ? OT_LONG : OT_WORD;
6747 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6748 gen_op_andl_T0_ffff();
6749 gen_check_io(s, ot, pc_start - s->cs_base,
6750 svm_is_rep(prefixes));
6751 gen_op_mov_TN_reg(ot, 1, R_EAX);
6752
6753 if (use_icount)
6754 gen_io_start();
6755 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6756 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6757 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6758 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6759 if (use_icount) {
6760 gen_io_end();
6761 gen_jmp(s, s->pc - s->cs_base);
6762 }
6763 break;
6764
6765 /************************/
6766 /* control */
6767 case 0xc2: /* ret im */
6768 val = ldsw_code(s->pc);
6769 s->pc += 2;
6770 gen_pop_T0(s);
6771 if (CODE64(s) && s->dflag)
6772 s->dflag = 2;
6773 gen_stack_update(s, val + (2 << s->dflag));
6774 if (s->dflag == 0)
6775 gen_op_andl_T0_ffff();
6776 gen_op_jmp_T0();
6777 gen_eob(s);
6778 break;
6779 case 0xc3: /* ret */
6780 gen_pop_T0(s);
6781 gen_pop_update(s);
6782 if (s->dflag == 0)
6783 gen_op_andl_T0_ffff();
6784 gen_op_jmp_T0();
6785 gen_eob(s);
6786 break;
6787 case 0xca: /* lret im */
6788 val = ldsw_code(s->pc);
6789 s->pc += 2;
6790 do_lret:
6791 if (s->pe && !s->vm86) {
6792 if (s->cc_op != CC_OP_DYNAMIC)
6793 gen_op_set_cc_op(s->cc_op);
6794 gen_jmp_im(pc_start - s->cs_base);
6795 tcg_gen_helper_0_2(helper_lret_protected,
6796 tcg_const_i32(s->dflag),
6797 tcg_const_i32(val));
6798 } else {
6799 gen_stack_A0(s);
6800 /* pop offset */
6801 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6802 if (s->dflag == 0)
6803 gen_op_andl_T0_ffff();
6804 /* NOTE: keeping EIP updated is not a problem in case of
6805 exception */
6806 gen_op_jmp_T0();
6807 /* pop selector */
6808 gen_op_addl_A0_im(2 << s->dflag);
6809 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6810 gen_op_movl_seg_T0_vm(R_CS);
6811 /* add stack offset */
6812 gen_stack_update(s, val + (4 << s->dflag));
6813 }
6814 gen_eob(s);
6815 break;
6816 case 0xcb: /* lret */
6817 val = 0;
6818 goto do_lret;
6819 case 0xcf: /* iret */
6820 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6821 if (!s->pe) {
6822 /* real mode */
6823 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6824 s->cc_op = CC_OP_EFLAGS;
6825 } else if (s->vm86) {
6826#ifdef VBOX
6827 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6828#else
6829 if (s->iopl != 3) {
6830#endif
6831 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6832 } else {
6833 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6834 s->cc_op = CC_OP_EFLAGS;
6835 }
6836 } else {
6837 if (s->cc_op != CC_OP_DYNAMIC)
6838 gen_op_set_cc_op(s->cc_op);
6839 gen_jmp_im(pc_start - s->cs_base);
6840 tcg_gen_helper_0_2(helper_iret_protected,
6841 tcg_const_i32(s->dflag),
6842 tcg_const_i32(s->pc - s->cs_base));
6843 s->cc_op = CC_OP_EFLAGS;
6844 }
6845 gen_eob(s);
6846 break;
6847 case 0xe8: /* call im */
6848 {
6849 if (dflag)
6850 tval = (int32_t)insn_get(s, OT_LONG);
6851 else
6852 tval = (int16_t)insn_get(s, OT_WORD);
6853 next_eip = s->pc - s->cs_base;
6854 tval += next_eip;
6855 if (s->dflag == 0)
6856 tval &= 0xffff;
6857 gen_movtl_T0_im(next_eip);
6858 gen_push_T0(s);
6859 gen_jmp(s, tval);
6860 }
6861 break;
6862 case 0x9a: /* lcall im */
6863 {
6864 unsigned int selector, offset;
6865
6866 if (CODE64(s))
6867 goto illegal_op;
6868 ot = dflag ? OT_LONG : OT_WORD;
6869 offset = insn_get(s, ot);
6870 selector = insn_get(s, OT_WORD);
6871
6872 gen_op_movl_T0_im(selector);
6873 gen_op_movl_T1_imu(offset);
6874 }
6875 goto do_lcall;
6876 case 0xe9: /* jmp im */
6877 if (dflag)
6878 tval = (int32_t)insn_get(s, OT_LONG);
6879 else
6880 tval = (int16_t)insn_get(s, OT_WORD);
6881 tval += s->pc - s->cs_base;
6882 if (s->dflag == 0)
6883 tval &= 0xffff;
6884 else if(!CODE64(s))
6885 tval &= 0xffffffff;
6886 gen_jmp(s, tval);
6887 break;
6888 case 0xea: /* ljmp im */
6889 {
6890 unsigned int selector, offset;
6891
6892 if (CODE64(s))
6893 goto illegal_op;
6894 ot = dflag ? OT_LONG : OT_WORD;
6895 offset = insn_get(s, ot);
6896 selector = insn_get(s, OT_WORD);
6897
6898 gen_op_movl_T0_im(selector);
6899 gen_op_movl_T1_imu(offset);
6900 }
6901 goto do_ljmp;
6902 case 0xeb: /* jmp Jb */
6903 tval = (int8_t)insn_get(s, OT_BYTE);
6904 tval += s->pc - s->cs_base;
6905 if (s->dflag == 0)
6906 tval &= 0xffff;
6907 gen_jmp(s, tval);
6908 break;
6909 case 0x70 ... 0x7f: /* jcc Jb */
6910 tval = (int8_t)insn_get(s, OT_BYTE);
6911 goto do_jcc;
6912 case 0x180 ... 0x18f: /* jcc Jv */
6913 if (dflag) {
6914 tval = (int32_t)insn_get(s, OT_LONG);
6915 } else {
6916 tval = (int16_t)insn_get(s, OT_WORD);
6917 }
6918 do_jcc:
6919 next_eip = s->pc - s->cs_base;
6920 tval += next_eip;
6921 if (s->dflag == 0)
6922 tval &= 0xffff;
6923 gen_jcc(s, b, tval, next_eip);
6924 break;
6925
6926 case 0x190 ... 0x19f: /* setcc Gv */
6927 modrm = ldub_code(s->pc++);
6928 gen_setcc(s, b);
6929 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6930 break;
6931 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6932 {
6933 int l1;
6934 TCGv t0;
6935
6936 ot = dflag + OT_WORD;
6937 modrm = ldub_code(s->pc++);
6938 reg = ((modrm >> 3) & 7) | rex_r;
6939 mod = (modrm >> 6) & 3;
6940 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6941 if (mod != 3) {
6942 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6943 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6944 } else {
6945 rm = (modrm & 7) | REX_B(s);
6946 gen_op_mov_v_reg(ot, t0, rm);
6947 }
6948#ifdef TARGET_X86_64
6949 if (ot == OT_LONG) {
6950 /* XXX: specific Intel behaviour ? */
6951 l1 = gen_new_label();
6952 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6953 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6954 gen_set_label(l1);
6955 tcg_gen_movi_tl(cpu_tmp0, 0);
6956 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6957 } else
6958#endif
6959 {
6960 l1 = gen_new_label();
6961 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6962 gen_op_mov_reg_v(ot, reg, t0);
6963 gen_set_label(l1);
6964 }
6965 tcg_temp_free(t0);
6966 }
6967 break;
6968
6969 /************************/
6970 /* flags */
6971 case 0x9c: /* pushf */
6972 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6973#ifdef VBOX
6974 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6975#else
6976 if (s->vm86 && s->iopl != 3) {
6977#endif
6978 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6979 } else {
6980 if (s->cc_op != CC_OP_DYNAMIC)
6981 gen_op_set_cc_op(s->cc_op);
6982#ifdef VBOX
6983 if (s->vm86 && s->vme && s->iopl != 3)
6984 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
6985 else
6986#endif
6987 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
6988 gen_push_T0(s);
6989 }
6990 break;
6991 case 0x9d: /* popf */
6992 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6993#ifdef VBOX
6994 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6995#else
6996 if (s->vm86 && s->iopl != 3) {
6997#endif
6998 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6999 } else {
7000 gen_pop_T0(s);
7001 if (s->cpl == 0) {
7002 if (s->dflag) {
7003 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7004 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
7005 } else {
7006 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7007 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
7008 }
7009 } else {
7010 if (s->cpl <= s->iopl) {
7011 if (s->dflag) {
7012 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7013 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
7014 } else {
7015 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7016 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
7017 }
7018 } else {
7019 if (s->dflag) {
7020 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7021 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
7022 } else {
7023#ifdef VBOX
7024 if (s->vm86 && s->vme)
7025 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
7026 else
7027#endif
7028 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7029 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
7030 }
7031 }
7032 }
7033 gen_pop_update(s);
7034 s->cc_op = CC_OP_EFLAGS;
7035 /* abort translation because TF flag may change */
7036 gen_jmp_im(s->pc - s->cs_base);
7037 gen_eob(s);
7038 }
7039 break;
7040 case 0x9e: /* sahf */
7041 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7042 goto illegal_op;
7043 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
7044 if (s->cc_op != CC_OP_DYNAMIC)
7045 gen_op_set_cc_op(s->cc_op);
7046 gen_compute_eflags(cpu_cc_src);
7047 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
7048 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
7049 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
7050 s->cc_op = CC_OP_EFLAGS;
7051 break;
7052 case 0x9f: /* lahf */
7053 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7054 goto illegal_op;
7055 if (s->cc_op != CC_OP_DYNAMIC)
7056 gen_op_set_cc_op(s->cc_op);
7057 gen_compute_eflags(cpu_T[0]);
7058 /* Note: gen_compute_eflags() only gives the condition codes */
7059 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
7060 gen_op_mov_reg_T0(OT_BYTE, R_AH);
7061 break;
7062 case 0xf5: /* cmc */
7063 if (s->cc_op != CC_OP_DYNAMIC)
7064 gen_op_set_cc_op(s->cc_op);
7065 gen_compute_eflags(cpu_cc_src);
7066 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7067 s->cc_op = CC_OP_EFLAGS;
7068 break;
7069 case 0xf8: /* clc */
7070 if (s->cc_op != CC_OP_DYNAMIC)
7071 gen_op_set_cc_op(s->cc_op);
7072 gen_compute_eflags(cpu_cc_src);
7073 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
7074 s->cc_op = CC_OP_EFLAGS;
7075 break;
7076 case 0xf9: /* stc */
7077 if (s->cc_op != CC_OP_DYNAMIC)
7078 gen_op_set_cc_op(s->cc_op);
7079 gen_compute_eflags(cpu_cc_src);
7080 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7081 s->cc_op = CC_OP_EFLAGS;
7082 break;
7083 case 0xfc: /* cld */
7084 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7085 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7086 break;
7087 case 0xfd: /* std */
7088 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7089 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7090 break;
7091
7092 /************************/
7093 /* bit operations */
7094 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7095 ot = dflag + OT_WORD;
7096 modrm = ldub_code(s->pc++);
7097 op = (modrm >> 3) & 7;
7098 mod = (modrm >> 6) & 3;
7099 rm = (modrm & 7) | REX_B(s);
7100 if (mod != 3) {
7101 s->rip_offset = 1;
7102 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7103 gen_op_ld_T0_A0(ot + s->mem_index);
7104 } else {
7105 gen_op_mov_TN_reg(ot, 0, rm);
7106 }
7107 /* load shift */
7108 val = ldub_code(s->pc++);
7109 gen_op_movl_T1_im(val);
7110 if (op < 4)
7111 goto illegal_op;
7112 op -= 4;
7113 goto bt_op;
7114 case 0x1a3: /* bt Gv, Ev */
7115 op = 0;
7116 goto do_btx;
7117 case 0x1ab: /* bts */
7118 op = 1;
7119 goto do_btx;
7120 case 0x1b3: /* btr */
7121 op = 2;
7122 goto do_btx;
7123 case 0x1bb: /* btc */
7124 op = 3;
7125 do_btx:
7126 ot = dflag + OT_WORD;
7127 modrm = ldub_code(s->pc++);
7128 reg = ((modrm >> 3) & 7) | rex_r;
7129 mod = (modrm >> 6) & 3;
7130 rm = (modrm & 7) | REX_B(s);
7131 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7132 if (mod != 3) {
7133 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7134 /* specific case: we need to add a displacement */
7135 gen_exts(ot, cpu_T[1]);
7136 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7137 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7138 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7139 gen_op_ld_T0_A0(ot + s->mem_index);
7140 } else {
7141 gen_op_mov_TN_reg(ot, 0, rm);
7142 }
7143 bt_op:
7144 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7145 switch(op) {
7146 case 0:
7147 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7148 tcg_gen_movi_tl(cpu_cc_dst, 0);
7149 break;
7150 case 1:
7151 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7152 tcg_gen_movi_tl(cpu_tmp0, 1);
7153 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7154 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7155 break;
7156 case 2:
7157 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7158 tcg_gen_movi_tl(cpu_tmp0, 1);
7159 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7160 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7161 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7162 break;
7163 default:
7164 case 3:
7165 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7166 tcg_gen_movi_tl(cpu_tmp0, 1);
7167 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7168 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7169 break;
7170 }
7171 s->cc_op = CC_OP_SARB + ot;
7172 if (op != 0) {
7173 if (mod != 3)
7174 gen_op_st_T0_A0(ot + s->mem_index);
7175 else
7176 gen_op_mov_reg_T0(ot, rm);
7177 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7178 tcg_gen_movi_tl(cpu_cc_dst, 0);
7179 }
7180 break;
7181 case 0x1bc: /* bsf */
7182 case 0x1bd: /* bsr */
7183 {
7184 int label1;
7185 TCGv t0;
7186
7187 ot = dflag + OT_WORD;
7188 modrm = ldub_code(s->pc++);
7189 reg = ((modrm >> 3) & 7) | rex_r;
7190 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7191 gen_extu(ot, cpu_T[0]);
7192 label1 = gen_new_label();
7193 tcg_gen_movi_tl(cpu_cc_dst, 0);
7194 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7195 tcg_gen_mov_tl(t0, cpu_T[0]);
7196 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7197 if (b & 1) {
7198 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7199 } else {
7200 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7201 }
7202 gen_op_mov_reg_T0(ot, reg);
7203 tcg_gen_movi_tl(cpu_cc_dst, 1);
7204 gen_set_label(label1);
7205 tcg_gen_discard_tl(cpu_cc_src);
7206 s->cc_op = CC_OP_LOGICB + ot;
7207 tcg_temp_free(t0);
7208 }
7209 break;
7210 /************************/
7211 /* bcd */
7212 case 0x27: /* daa */
7213 if (CODE64(s))
7214 goto illegal_op;
7215 if (s->cc_op != CC_OP_DYNAMIC)
7216 gen_op_set_cc_op(s->cc_op);
7217 tcg_gen_helper_0_0(helper_daa);
7218 s->cc_op = CC_OP_EFLAGS;
7219 break;
7220 case 0x2f: /* das */
7221 if (CODE64(s))
7222 goto illegal_op;
7223 if (s->cc_op != CC_OP_DYNAMIC)
7224 gen_op_set_cc_op(s->cc_op);
7225 tcg_gen_helper_0_0(helper_das);
7226 s->cc_op = CC_OP_EFLAGS;
7227 break;
7228 case 0x37: /* aaa */
7229 if (CODE64(s))
7230 goto illegal_op;
7231 if (s->cc_op != CC_OP_DYNAMIC)
7232 gen_op_set_cc_op(s->cc_op);
7233 tcg_gen_helper_0_0(helper_aaa);
7234 s->cc_op = CC_OP_EFLAGS;
7235 break;
7236 case 0x3f: /* aas */
7237 if (CODE64(s))
7238 goto illegal_op;
7239 if (s->cc_op != CC_OP_DYNAMIC)
7240 gen_op_set_cc_op(s->cc_op);
7241 tcg_gen_helper_0_0(helper_aas);
7242 s->cc_op = CC_OP_EFLAGS;
7243 break;
7244 case 0xd4: /* aam */
7245 if (CODE64(s))
7246 goto illegal_op;
7247 val = ldub_code(s->pc++);
7248 if (val == 0) {
7249 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7250 } else {
7251 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7252 s->cc_op = CC_OP_LOGICB;
7253 }
7254 break;
7255 case 0xd5: /* aad */
7256 if (CODE64(s))
7257 goto illegal_op;
7258 val = ldub_code(s->pc++);
7259 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7260 s->cc_op = CC_OP_LOGICB;
7261 break;
7262 /************************/
7263 /* misc */
7264 case 0x90: /* nop */
7265 /* XXX: xchg + rex handling */
7266 /* XXX: correct lock test for all insn */
7267 if (prefixes & PREFIX_LOCK)
7268 goto illegal_op;
7269 if (prefixes & PREFIX_REPZ) {
7270 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7271 }
7272 break;
7273 case 0x9b: /* fwait */
7274 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7275 (HF_MP_MASK | HF_TS_MASK)) {
7276 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7277 } else {
7278 if (s->cc_op != CC_OP_DYNAMIC)
7279 gen_op_set_cc_op(s->cc_op);
7280 gen_jmp_im(pc_start - s->cs_base);
7281 tcg_gen_helper_0_0(helper_fwait);
7282 }
7283 break;
7284 case 0xcc: /* int3 */
7285#ifdef VBOX
7286 if (s->vm86 && s->iopl != 3 && !s->vme) {
7287 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7288 } else
7289#endif
7290 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7291 break;
7292 case 0xcd: /* int N */
7293 val = ldub_code(s->pc++);
7294#ifdef VBOX
7295 if (s->vm86 && s->iopl != 3 && !s->vme) {
7296#else
7297 if (s->vm86 && s->iopl != 3) {
7298#endif
7299 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7300 } else {
7301 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7302 }
7303 break;
7304 case 0xce: /* into */
7305 if (CODE64(s))
7306 goto illegal_op;
7307 if (s->cc_op != CC_OP_DYNAMIC)
7308 gen_op_set_cc_op(s->cc_op);
7309 gen_jmp_im(pc_start - s->cs_base);
7310 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7311 break;
7312 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7313 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7314#if 1
7315 gen_debug(s, pc_start - s->cs_base);
7316#else
7317 /* start debug */
7318 tb_flush(cpu_single_env);
7319 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7320#endif
7321 break;
7322 case 0xfa: /* cli */
7323 if (!s->vm86) {
7324 if (s->cpl <= s->iopl) {
7325 tcg_gen_helper_0_0(helper_cli);
7326 } else {
7327 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7328 }
7329 } else {
7330 if (s->iopl == 3) {
7331 tcg_gen_helper_0_0(helper_cli);
7332#ifdef VBOX
7333 } else if (s->iopl != 3 && s->vme) {
7334 tcg_gen_helper_0_0(helper_cli_vme);
7335#endif
7336 } else {
7337 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7338 }
7339 }
7340 break;
7341 case 0xfb: /* sti */
7342 if (!s->vm86) {
7343 if (s->cpl <= s->iopl) {
7344 gen_sti:
7345 tcg_gen_helper_0_0(helper_sti);
7346 /* interruptions are enabled only the first insn after sti */
7347 /* If several instructions disable interrupts, only the
7348 _first_ does it */
7349 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7350 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7351 /* give a chance to handle pending irqs */
7352 gen_jmp_im(s->pc - s->cs_base);
7353 gen_eob(s);
7354 } else {
7355 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7356 }
7357 } else {
7358 if (s->iopl == 3) {
7359 goto gen_sti;
7360#ifdef VBOX
7361 } else if (s->iopl != 3 && s->vme) {
7362 tcg_gen_helper_0_0(helper_sti_vme);
7363 /* give a chance to handle pending irqs */
7364 gen_jmp_im(s->pc - s->cs_base);
7365 gen_eob(s);
7366#endif
7367 } else {
7368 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7369 }
7370 }
7371 break;
7372 case 0x62: /* bound */
7373 if (CODE64(s))
7374 goto illegal_op;
7375 ot = dflag ? OT_LONG : OT_WORD;
7376 modrm = ldub_code(s->pc++);
7377 reg = (modrm >> 3) & 7;
7378 mod = (modrm >> 6) & 3;
7379 if (mod == 3)
7380 goto illegal_op;
7381 gen_op_mov_TN_reg(ot, 0, reg);
7382 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7383 gen_jmp_im(pc_start - s->cs_base);
7384 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7385 if (ot == OT_WORD)
7386 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7387 else
7388 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7389 break;
7390 case 0x1c8 ... 0x1cf: /* bswap reg */
7391 reg = (b & 7) | REX_B(s);
7392#ifdef TARGET_X86_64
7393 if (dflag == 2) {
7394 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7395 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7396 gen_op_mov_reg_T0(OT_QUAD, reg);
7397 } else
7398 {
7399 TCGv tmp0;
7400 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7401
7402 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7403 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7404 tcg_gen_bswap_i32(tmp0, tmp0);
7405 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7406 gen_op_mov_reg_T0(OT_LONG, reg);
7407 }
7408#else
7409 {
7410 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7411 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7412 gen_op_mov_reg_T0(OT_LONG, reg);
7413 }
7414#endif
7415 break;
7416 case 0xd6: /* salc */
7417 if (CODE64(s))
7418 goto illegal_op;
7419 if (s->cc_op != CC_OP_DYNAMIC)
7420 gen_op_set_cc_op(s->cc_op);
7421 gen_compute_eflags_c(cpu_T[0]);
7422 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7423 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7424 break;
7425 case 0xe0: /* loopnz */
7426 case 0xe1: /* loopz */
7427 case 0xe2: /* loop */
7428 case 0xe3: /* jecxz */
7429 {
7430 int l1, l2, l3;
7431
7432 tval = (int8_t)insn_get(s, OT_BYTE);
7433 next_eip = s->pc - s->cs_base;
7434 tval += next_eip;
7435 if (s->dflag == 0)
7436 tval &= 0xffff;
7437
7438 l1 = gen_new_label();
7439 l2 = gen_new_label();
7440 l3 = gen_new_label();
7441 b &= 3;
7442 switch(b) {
7443 case 0: /* loopnz */
7444 case 1: /* loopz */
7445 if (s->cc_op != CC_OP_DYNAMIC)
7446 gen_op_set_cc_op(s->cc_op);
7447 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7448 gen_op_jz_ecx(s->aflag, l3);
7449 gen_compute_eflags(cpu_tmp0);
7450 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7451 if (b == 0) {
7452 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7453 } else {
7454 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7455 }
7456 break;
7457 case 2: /* loop */
7458 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7459 gen_op_jnz_ecx(s->aflag, l1);
7460 break;
7461 default:
7462 case 3: /* jcxz */
7463 gen_op_jz_ecx(s->aflag, l1);
7464 break;
7465 }
7466
7467 gen_set_label(l3);
7468 gen_jmp_im(next_eip);
7469 tcg_gen_br(l2);
7470
7471 gen_set_label(l1);
7472 gen_jmp_im(tval);
7473 gen_set_label(l2);
7474 gen_eob(s);
7475 }
7476 break;
7477 case 0x130: /* wrmsr */
7478 case 0x132: /* rdmsr */
7479 if (s->cpl != 0) {
7480 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7481 } else {
7482 if (s->cc_op != CC_OP_DYNAMIC)
7483 gen_op_set_cc_op(s->cc_op);
7484 gen_jmp_im(pc_start - s->cs_base);
7485 if (b & 2) {
7486 tcg_gen_helper_0_0(helper_rdmsr);
7487 } else {
7488 tcg_gen_helper_0_0(helper_wrmsr);
7489 }
7490 }
7491 break;
7492 case 0x131: /* rdtsc */
7493 if (s->cc_op != CC_OP_DYNAMIC)
7494 gen_op_set_cc_op(s->cc_op);
7495 gen_jmp_im(pc_start - s->cs_base);
7496 if (use_icount)
7497 gen_io_start();
7498 tcg_gen_helper_0_0(helper_rdtsc);
7499 if (use_icount) {
7500 gen_io_end();
7501 gen_jmp(s, s->pc - s->cs_base);
7502 }
7503 break;
7504 case 0x133: /* rdpmc */
7505 if (s->cc_op != CC_OP_DYNAMIC)
7506 gen_op_set_cc_op(s->cc_op);
7507 gen_jmp_im(pc_start - s->cs_base);
7508 tcg_gen_helper_0_0(helper_rdpmc);
7509 break;
7510 case 0x134: /* sysenter */
7511#ifndef VBOX
7512 /* For Intel SYSENTER is valid on 64-bit */
7513 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7514#else
7515 /** @todo: make things right */
7516 if (CODE64(s))
7517#endif
7518 goto illegal_op;
7519 if (!s->pe) {
7520 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7521 } else {
7522 if (s->cc_op != CC_OP_DYNAMIC) {
7523 gen_op_set_cc_op(s->cc_op);
7524 s->cc_op = CC_OP_DYNAMIC;
7525 }
7526 gen_jmp_im(pc_start - s->cs_base);
7527 tcg_gen_helper_0_0(helper_sysenter);
7528 gen_eob(s);
7529 }
7530 break;
7531 case 0x135: /* sysexit */
7532#ifndef VBOX
7533 /* For Intel SYSEXIT is valid on 64-bit */
7534 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7535#else
7536 /** @todo: make things right */
7537 if (CODE64(s))
7538#endif
7539 goto illegal_op;
7540 if (!s->pe) {
7541 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7542 } else {
7543 if (s->cc_op != CC_OP_DYNAMIC) {
7544 gen_op_set_cc_op(s->cc_op);
7545 s->cc_op = CC_OP_DYNAMIC;
7546 }
7547 gen_jmp_im(pc_start - s->cs_base);
7548 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7549 gen_eob(s);
7550 }
7551 break;
7552#ifdef TARGET_X86_64
7553 case 0x105: /* syscall */
7554 /* XXX: is it usable in real mode ? */
7555 if (s->cc_op != CC_OP_DYNAMIC) {
7556 gen_op_set_cc_op(s->cc_op);
7557 s->cc_op = CC_OP_DYNAMIC;
7558 }
7559 gen_jmp_im(pc_start - s->cs_base);
7560 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7561 gen_eob(s);
7562 break;
7563 case 0x107: /* sysret */
7564 if (!s->pe) {
7565 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7566 } else {
7567 if (s->cc_op != CC_OP_DYNAMIC) {
7568 gen_op_set_cc_op(s->cc_op);
7569 s->cc_op = CC_OP_DYNAMIC;
7570 }
7571 gen_jmp_im(pc_start - s->cs_base);
7572 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7573 /* condition codes are modified only in long mode */
7574 if (s->lma)
7575 s->cc_op = CC_OP_EFLAGS;
7576 gen_eob(s);
7577 }
7578 break;
7579#endif
7580 case 0x1a2: /* cpuid */
7581 if (s->cc_op != CC_OP_DYNAMIC)
7582 gen_op_set_cc_op(s->cc_op);
7583 gen_jmp_im(pc_start - s->cs_base);
7584 tcg_gen_helper_0_0(helper_cpuid);
7585 break;
7586 case 0xf4: /* hlt */
7587 if (s->cpl != 0) {
7588 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7589 } else {
7590 if (s->cc_op != CC_OP_DYNAMIC)
7591 gen_op_set_cc_op(s->cc_op);
7592 gen_jmp_im(pc_start - s->cs_base);
7593 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7594 s->is_jmp = 3;
7595 }
7596 break;
7597 case 0x100:
7598 modrm = ldub_code(s->pc++);
7599 mod = (modrm >> 6) & 3;
7600 op = (modrm >> 3) & 7;
7601 switch(op) {
7602 case 0: /* sldt */
7603 if (!s->pe || s->vm86)
7604 goto illegal_op;
7605 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7606 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7607 ot = OT_WORD;
7608 if (mod == 3)
7609 ot += s->dflag;
7610 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7611 break;
7612 case 2: /* lldt */
7613 if (!s->pe || s->vm86)
7614 goto illegal_op;
7615 if (s->cpl != 0) {
7616 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7617 } else {
7618 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7619 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7620 gen_jmp_im(pc_start - s->cs_base);
7621 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7622 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7623 }
7624 break;
7625 case 1: /* str */
7626 if (!s->pe || s->vm86)
7627 goto illegal_op;
7628 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7629 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7630 ot = OT_WORD;
7631 if (mod == 3)
7632 ot += s->dflag;
7633 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7634 break;
7635 case 3: /* ltr */
7636 if (!s->pe || s->vm86)
7637 goto illegal_op;
7638 if (s->cpl != 0) {
7639 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7640 } else {
7641 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7642 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7643 gen_jmp_im(pc_start - s->cs_base);
7644 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7645 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7646 }
7647 break;
7648 case 4: /* verr */
7649 case 5: /* verw */
7650 if (!s->pe || s->vm86)
7651 goto illegal_op;
7652 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7653 if (s->cc_op != CC_OP_DYNAMIC)
7654 gen_op_set_cc_op(s->cc_op);
7655 if (op == 4)
7656 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7657 else
7658 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7659 s->cc_op = CC_OP_EFLAGS;
7660 break;
7661 default:
7662 goto illegal_op;
7663 }
7664 break;
7665 case 0x101:
7666 modrm = ldub_code(s->pc++);
7667 mod = (modrm >> 6) & 3;
7668 op = (modrm >> 3) & 7;
7669 rm = modrm & 7;
7670
7671#ifdef VBOX
7672 /* 0f 01 f9 */
7673 if (modrm == 0xf9)
7674 {
7675 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7676 goto illegal_op;
7677 gen_jmp_im(pc_start - s->cs_base);
7678 tcg_gen_helper_0_0(helper_rdtscp);
7679 break;
7680 }
7681#endif
7682 switch(op) {
7683 case 0: /* sgdt */
7684 if (mod == 3)
7685 goto illegal_op;
7686 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7687 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7688 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7689 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7690 gen_add_A0_im(s, 2);
7691 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7692 if (!s->dflag)
7693 gen_op_andl_T0_im(0xffffff);
7694 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7695 break;
7696 case 1:
7697 if (mod == 3) {
7698 switch (rm) {
7699 case 0: /* monitor */
7700 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7701 s->cpl != 0)
7702 goto illegal_op;
7703 if (s->cc_op != CC_OP_DYNAMIC)
7704 gen_op_set_cc_op(s->cc_op);
7705 gen_jmp_im(pc_start - s->cs_base);
7706#ifdef TARGET_X86_64
7707 if (s->aflag == 2) {
7708 gen_op_movq_A0_reg(R_EAX);
7709 } else
7710#endif
7711 {
7712 gen_op_movl_A0_reg(R_EAX);
7713 if (s->aflag == 0)
7714 gen_op_andl_A0_ffff();
7715 }
7716 gen_add_A0_ds_seg(s);
7717 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7718 break;
7719 case 1: /* mwait */
7720 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7721 s->cpl != 0)
7722 goto illegal_op;
7723 if (s->cc_op != CC_OP_DYNAMIC) {
7724 gen_op_set_cc_op(s->cc_op);
7725 s->cc_op = CC_OP_DYNAMIC;
7726 }
7727 gen_jmp_im(pc_start - s->cs_base);
7728 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7729 gen_eob(s);
7730 break;
7731 default:
7732 goto illegal_op;
7733 }
7734 } else { /* sidt */
7735 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7736 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7737 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7738 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7739 gen_add_A0_im(s, 2);
7740 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7741 if (!s->dflag)
7742 gen_op_andl_T0_im(0xffffff);
7743 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7744 }
7745 break;
7746 case 2: /* lgdt */
7747 case 3: /* lidt */
7748 if (mod == 3) {
7749 if (s->cc_op != CC_OP_DYNAMIC)
7750 gen_op_set_cc_op(s->cc_op);
7751 gen_jmp_im(pc_start - s->cs_base);
7752 switch(rm) {
7753 case 0: /* VMRUN */
7754 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7755 goto illegal_op;
7756 if (s->cpl != 0) {
7757 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7758 break;
7759 } else {
7760 tcg_gen_helper_0_2(helper_vmrun,
7761 tcg_const_i32(s->aflag),
7762 tcg_const_i32(s->pc - pc_start));
7763 tcg_gen_exit_tb(0);
7764 s->is_jmp = 3;
7765 }
7766 break;
7767 case 1: /* VMMCALL */
7768 if (!(s->flags & HF_SVME_MASK))
7769 goto illegal_op;
7770 tcg_gen_helper_0_0(helper_vmmcall);
7771 break;
7772 case 2: /* VMLOAD */
7773 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7774 goto illegal_op;
7775 if (s->cpl != 0) {
7776 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7777 break;
7778 } else {
7779 tcg_gen_helper_0_1(helper_vmload,
7780 tcg_const_i32(s->aflag));
7781 }
7782 break;
7783 case 3: /* VMSAVE */
7784 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7785 goto illegal_op;
7786 if (s->cpl != 0) {
7787 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7788 break;
7789 } else {
7790 tcg_gen_helper_0_1(helper_vmsave,
7791 tcg_const_i32(s->aflag));
7792 }
7793 break;
7794 case 4: /* STGI */
7795 if ((!(s->flags & HF_SVME_MASK) &&
7796 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7797 !s->pe)
7798 goto illegal_op;
7799 if (s->cpl != 0) {
7800 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7801 break;
7802 } else {
7803 tcg_gen_helper_0_0(helper_stgi);
7804 }
7805 break;
7806 case 5: /* CLGI */
7807 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7808 goto illegal_op;
7809 if (s->cpl != 0) {
7810 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7811 break;
7812 } else {
7813 tcg_gen_helper_0_0(helper_clgi);
7814 }
7815 break;
7816 case 6: /* SKINIT */
7817 if ((!(s->flags & HF_SVME_MASK) &&
7818 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7819 !s->pe)
7820 goto illegal_op;
7821 tcg_gen_helper_0_0(helper_skinit);
7822 break;
7823 case 7: /* INVLPGA */
7824 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7825 goto illegal_op;
7826 if (s->cpl != 0) {
7827 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7828 break;
7829 } else {
7830 tcg_gen_helper_0_1(helper_invlpga,
7831 tcg_const_i32(s->aflag));
7832 }
7833 break;
7834 default:
7835 goto illegal_op;
7836 }
7837 } else if (s->cpl != 0) {
7838 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7839 } else {
7840 gen_svm_check_intercept(s, pc_start,
7841 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7842 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7843 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7844 gen_add_A0_im(s, 2);
7845 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7846 if (!s->dflag)
7847 gen_op_andl_T0_im(0xffffff);
7848 if (op == 2) {
7849 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7850 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7851 } else {
7852 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7853 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7854 }
7855 }
7856 break;
7857 case 4: /* smsw */
7858 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7859 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7860 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7861 break;
7862 case 6: /* lmsw */
7863 if (s->cpl != 0) {
7864 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7865 } else {
7866 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7867 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7868 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7869 gen_jmp_im(s->pc - s->cs_base);
7870 gen_eob(s);
7871 }
7872 break;
7873 case 7: /* invlpg */
7874 if (s->cpl != 0) {
7875 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7876 } else {
7877 if (mod == 3) {
7878#ifdef TARGET_X86_64
7879 if (CODE64(s) && rm == 0) {
7880 /* swapgs */
7881 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7882 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7883 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7884 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7885 } else
7886#endif
7887 {
7888 goto illegal_op;
7889 }
7890 } else {
7891 if (s->cc_op != CC_OP_DYNAMIC)
7892 gen_op_set_cc_op(s->cc_op);
7893 gen_jmp_im(pc_start - s->cs_base);
7894 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7895 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7896 gen_jmp_im(s->pc - s->cs_base);
7897 gen_eob(s);
7898 }
7899 }
7900 break;
7901 default:
7902 goto illegal_op;
7903 }
7904 break;
7905 case 0x108: /* invd */
7906 case 0x109: /* wbinvd */
7907 if (s->cpl != 0) {
7908 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7909 } else {
7910 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7911 /* nothing to do */
7912 }
7913 break;
7914 case 0x63: /* arpl or movslS (x86_64) */
7915#ifdef TARGET_X86_64
7916 if (CODE64(s)) {
7917 int d_ot;
7918 /* d_ot is the size of destination */
7919 d_ot = dflag + OT_WORD;
7920
7921 modrm = ldub_code(s->pc++);
7922 reg = ((modrm >> 3) & 7) | rex_r;
7923 mod = (modrm >> 6) & 3;
7924 rm = (modrm & 7) | REX_B(s);
7925
7926 if (mod == 3) {
7927 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7928 /* sign extend */
7929 if (d_ot == OT_QUAD)
7930 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7931 gen_op_mov_reg_T0(d_ot, reg);
7932 } else {
7933 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7934 if (d_ot == OT_QUAD) {
7935 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7936 } else {
7937 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7938 }
7939 gen_op_mov_reg_T0(d_ot, reg);
7940 }
7941 } else
7942#endif
7943 {
7944 int label1;
7945 TCGv t0, t1, t2, a0;
7946
7947 if (!s->pe || s->vm86)
7948 goto illegal_op;
7949
7950 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7951 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7952 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7953#ifdef VBOX
7954 a0 = tcg_temp_local_new(TCG_TYPE_TL);
7955#endif
7956 ot = OT_WORD;
7957 modrm = ldub_code(s->pc++);
7958 reg = (modrm >> 3) & 7;
7959 mod = (modrm >> 6) & 3;
7960 rm = modrm & 7;
7961 if (mod != 3) {
7962 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7963#ifdef VBOX
7964 tcg_gen_mov_tl(a0, cpu_A0);
7965#endif
7966 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7967 } else {
7968 gen_op_mov_v_reg(ot, t0, rm);
7969 }
7970 gen_op_mov_v_reg(ot, t1, reg);
7971 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7972 tcg_gen_andi_tl(t1, t1, 3);
7973 tcg_gen_movi_tl(t2, 0);
7974 label1 = gen_new_label();
7975 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7976 tcg_gen_andi_tl(t0, t0, ~3);
7977 tcg_gen_or_tl(t0, t0, t1);
7978 tcg_gen_movi_tl(t2, CC_Z);
7979 gen_set_label(label1);
7980 if (mod != 3) {
7981#ifdef VBOX
7982 /* cpu_A0 doesn't survive branch */
7983 gen_op_st_v(ot + s->mem_index, t0, a0);
7984#else
7985 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
7986#endif
7987 } else {
7988 gen_op_mov_reg_v(ot, rm, t0);
7989 }
7990 if (s->cc_op != CC_OP_DYNAMIC)
7991 gen_op_set_cc_op(s->cc_op);
7992 gen_compute_eflags(cpu_cc_src);
7993 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7994 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7995 s->cc_op = CC_OP_EFLAGS;
7996 tcg_temp_free(t0);
7997 tcg_temp_free(t1);
7998 tcg_temp_free(t2);
7999#ifdef VBOX
8000 tcg_temp_free(a0);
8001#endif
8002 }
8003 break;
8004 case 0x102: /* lar */
8005 case 0x103: /* lsl */
8006 {
8007 int label1;
8008 TCGv t0;
8009 if (!s->pe || s->vm86)
8010 goto illegal_op;
8011 ot = dflag ? OT_LONG : OT_WORD;
8012 modrm = ldub_code(s->pc++);
8013 reg = ((modrm >> 3) & 7) | rex_r;
8014 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
8015 t0 = tcg_temp_local_new(TCG_TYPE_TL);
8016 if (s->cc_op != CC_OP_DYNAMIC)
8017 gen_op_set_cc_op(s->cc_op);
8018 if (b == 0x102)
8019 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
8020 else
8021 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
8022 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
8023 label1 = gen_new_label();
8024 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
8025 gen_op_mov_reg_v(ot, reg, t0);
8026 gen_set_label(label1);
8027 s->cc_op = CC_OP_EFLAGS;
8028 tcg_temp_free(t0);
8029 }
8030 break;
8031 case 0x118:
8032 modrm = ldub_code(s->pc++);
8033 mod = (modrm >> 6) & 3;
8034 op = (modrm >> 3) & 7;
8035 switch(op) {
8036 case 0: /* prefetchnta */
8037 case 1: /* prefetchnt0 */
8038 case 2: /* prefetchnt0 */
8039 case 3: /* prefetchnt0 */
8040 if (mod == 3)
8041 goto illegal_op;
8042 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8043 /* nothing more to do */
8044 break;
8045 default: /* nop (multi byte) */
8046 gen_nop_modrm(s, modrm);
8047 break;
8048 }
8049 break;
8050 case 0x119 ... 0x11f: /* nop (multi byte) */
8051 modrm = ldub_code(s->pc++);
8052 gen_nop_modrm(s, modrm);
8053 break;
8054 case 0x120: /* mov reg, crN */
8055 case 0x122: /* mov crN, reg */
8056 if (s->cpl != 0) {
8057 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8058 } else {
8059 modrm = ldub_code(s->pc++);
8060 if ((modrm & 0xc0) != 0xc0)
8061 goto illegal_op;
8062 rm = (modrm & 7) | REX_B(s);
8063 reg = ((modrm >> 3) & 7) | rex_r;
8064 if (CODE64(s))
8065 ot = OT_QUAD;
8066 else
8067 ot = OT_LONG;
8068 switch(reg) {
8069 case 0:
8070 case 2:
8071 case 3:
8072 case 4:
8073 case 8:
8074 if (s->cc_op != CC_OP_DYNAMIC)
8075 gen_op_set_cc_op(s->cc_op);
8076 gen_jmp_im(pc_start - s->cs_base);
8077 if (b & 2) {
8078 gen_op_mov_TN_reg(ot, 0, rm);
8079 tcg_gen_helper_0_2(helper_write_crN,
8080 tcg_const_i32(reg), cpu_T[0]);
8081 gen_jmp_im(s->pc - s->cs_base);
8082 gen_eob(s);
8083 } else {
8084 tcg_gen_helper_1_1(helper_read_crN,
8085 cpu_T[0], tcg_const_i32(reg));
8086 gen_op_mov_reg_T0(ot, rm);
8087 }
8088 break;
8089 default:
8090 goto illegal_op;
8091 }
8092 }
8093 break;
8094 case 0x121: /* mov reg, drN */
8095 case 0x123: /* mov drN, reg */
8096 if (s->cpl != 0) {
8097 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8098 } else {
8099 modrm = ldub_code(s->pc++);
8100 if ((modrm & 0xc0) != 0xc0)
8101 goto illegal_op;
8102 rm = (modrm & 7) | REX_B(s);
8103 reg = ((modrm >> 3) & 7) | rex_r;
8104 if (CODE64(s))
8105 ot = OT_QUAD;
8106 else
8107 ot = OT_LONG;
8108 /* XXX: do it dynamically with CR4.DE bit */
8109 if (reg == 4 || reg == 5 || reg >= 8)
8110 goto illegal_op;
8111 if (b & 2) {
8112 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8113 gen_op_mov_TN_reg(ot, 0, rm);
8114 tcg_gen_helper_0_2(helper_movl_drN_T0,
8115 tcg_const_i32(reg), cpu_T[0]);
8116 gen_jmp_im(s->pc - s->cs_base);
8117 gen_eob(s);
8118 } else {
8119 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8120 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8121 gen_op_mov_reg_T0(ot, rm);
8122 }
8123 }
8124 break;
8125 case 0x106: /* clts */
8126 if (s->cpl != 0) {
8127 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8128 } else {
8129 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8130 tcg_gen_helper_0_0(helper_clts);
8131 /* abort block because static cpu state changed */
8132 gen_jmp_im(s->pc - s->cs_base);
8133 gen_eob(s);
8134 }
8135 break;
8136 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8137 case 0x1c3: /* MOVNTI reg, mem */
8138 if (!(s->cpuid_features & CPUID_SSE2))
8139 goto illegal_op;
8140 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8141 modrm = ldub_code(s->pc++);
8142 mod = (modrm >> 6) & 3;
8143 if (mod == 3)
8144 goto illegal_op;
8145 reg = ((modrm >> 3) & 7) | rex_r;
8146 /* generate a generic store */
8147 gen_ldst_modrm(s, modrm, ot, reg, 1);
8148 break;
8149 case 0x1ae:
8150 modrm = ldub_code(s->pc++);
8151 mod = (modrm >> 6) & 3;
8152 op = (modrm >> 3) & 7;
8153 switch(op) {
8154 case 0: /* fxsave */
8155 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8156 (s->flags & HF_EM_MASK))
8157 goto illegal_op;
8158 if (s->flags & HF_TS_MASK) {
8159 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8160 break;
8161 }
8162 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8163 if (s->cc_op != CC_OP_DYNAMIC)
8164 gen_op_set_cc_op(s->cc_op);
8165 gen_jmp_im(pc_start - s->cs_base);
8166 tcg_gen_helper_0_2(helper_fxsave,
8167 cpu_A0, tcg_const_i32((s->dflag == 2)));
8168 break;
8169 case 1: /* fxrstor */
8170 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8171 (s->flags & HF_EM_MASK))
8172 goto illegal_op;
8173 if (s->flags & HF_TS_MASK) {
8174 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8175 break;
8176 }
8177 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8178 if (s->cc_op != CC_OP_DYNAMIC)
8179 gen_op_set_cc_op(s->cc_op);
8180 gen_jmp_im(pc_start - s->cs_base);
8181 tcg_gen_helper_0_2(helper_fxrstor,
8182 cpu_A0, tcg_const_i32((s->dflag == 2)));
8183 break;
8184 case 2: /* ldmxcsr */
8185 case 3: /* stmxcsr */
8186 if (s->flags & HF_TS_MASK) {
8187 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8188 break;
8189 }
8190 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8191 mod == 3)
8192 goto illegal_op;
8193 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8194 if (op == 2) {
8195 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8196 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8197 } else {
8198 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8199 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8200 }
8201 break;
8202 case 5: /* lfence */
8203 case 6: /* mfence */
8204 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8205 goto illegal_op;
8206 break;
8207 case 7: /* sfence / clflush */
8208 if ((modrm & 0xc7) == 0xc0) {
8209 /* sfence */
8210 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8211 if (!(s->cpuid_features & CPUID_SSE))
8212 goto illegal_op;
8213 } else {
8214 /* clflush */
8215 if (!(s->cpuid_features & CPUID_CLFLUSH))
8216 goto illegal_op;
8217 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8218 }
8219 break;
8220 default:
8221 goto illegal_op;
8222 }
8223 break;
8224 case 0x10d: /* 3DNow! prefetch(w) */
8225 modrm = ldub_code(s->pc++);
8226 mod = (modrm >> 6) & 3;
8227 if (mod == 3)
8228 goto illegal_op;
8229 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8230 /* ignore for now */
8231 break;
8232 case 0x1aa: /* rsm */
8233 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8234 if (!(s->flags & HF_SMM_MASK))
8235 goto illegal_op;
8236 if (s->cc_op != CC_OP_DYNAMIC) {
8237 gen_op_set_cc_op(s->cc_op);
8238 s->cc_op = CC_OP_DYNAMIC;
8239 }
8240 gen_jmp_im(s->pc - s->cs_base);
8241 tcg_gen_helper_0_0(helper_rsm);
8242 gen_eob(s);
8243 break;
8244 case 0x1b8: /* SSE4.2 popcnt */
8245 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8246 PREFIX_REPZ)
8247 goto illegal_op;
8248 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8249 goto illegal_op;
8250
8251 modrm = ldub_code(s->pc++);
8252 reg = ((modrm >> 3) & 7);
8253
8254 if (s->prefix & PREFIX_DATA)
8255 ot = OT_WORD;
8256 else if (s->dflag != 2)
8257 ot = OT_LONG;
8258 else
8259 ot = OT_QUAD;
8260
8261 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8262 tcg_gen_helper_1_2(helper_popcnt,
8263 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8264 gen_op_mov_reg_T0(ot, reg);
8265
8266 s->cc_op = CC_OP_EFLAGS;
8267 break;
8268 case 0x10e ... 0x10f:
8269 /* 3DNow! instructions, ignore prefixes */
8270 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8271 case 0x110 ... 0x117:
8272 case 0x128 ... 0x12f:
8273 case 0x138 ... 0x13a:
8274 case 0x150 ... 0x177:
8275 case 0x17c ... 0x17f:
8276 case 0x1c2:
8277 case 0x1c4 ... 0x1c6:
8278 case 0x1d0 ... 0x1fe:
8279 gen_sse(s, b, pc_start, rex_r);
8280 break;
8281 default:
8282 goto illegal_op;
8283 }
8284 /* lock generation */
8285 if (s->prefix & PREFIX_LOCK)
8286 tcg_gen_helper_0_0(helper_unlock);
8287 return s->pc;
8288 illegal_op:
8289 if (s->prefix & PREFIX_LOCK)
8290 tcg_gen_helper_0_0(helper_unlock);
8291 /* XXX: ensure that no lock was generated */
8292 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8293 return s->pc;
8294}
8295
8296void optimize_flags_init(void)
8297{
8298#if TCG_TARGET_REG_BITS == 32
8299 assert(sizeof(CCTable) == (1 << 3));
8300#else
8301 assert(sizeof(CCTable) == (1 << 4));
8302#endif
8303 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8304 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8305 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8306 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8307 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8308 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8309 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8310 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8311 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8312
8313 /* register helpers */
8314
8315#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8316#include "helper.h"
8317}
8318
8319/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8320 basic block 'tb'. If search_pc is TRUE, also generate PC
8321 information for each intermediate instruction. */
8322#ifndef VBOX
8323static inline void gen_intermediate_code_internal(CPUState *env,
8324#else /* VBOX */
8325DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8326#endif /* VBOX */
8327 TranslationBlock *tb,
8328 int search_pc)
8329{
8330 DisasContext dc1, *dc = &dc1;
8331 target_ulong pc_ptr;
8332 uint16_t *gen_opc_end;
8333 int j, lj, cflags;
8334 uint64_t flags;
8335 target_ulong pc_start;
8336 target_ulong cs_base;
8337 int num_insns;
8338 int max_insns;
8339
8340 /* generate intermediate code */
8341 pc_start = tb->pc;
8342 cs_base = tb->cs_base;
8343 flags = tb->flags;
8344 cflags = tb->cflags;
8345
8346 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8347 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8348 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8349 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8350 dc->f_st = 0;
8351 dc->vm86 = (flags >> VM_SHIFT) & 1;
8352#ifdef VBOX
8353 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8354 dc->pvi = !!(env->cr[4] & CR4_PVI_MASK);
8355#ifdef VBOX_WITH_CALL_RECORD
8356 if ( !(env->state & CPU_RAW_RING0)
8357 && (env->cr[0] & CR0_PG_MASK)
8358 && !(env->eflags & X86_EFL_IF)
8359 && dc->code32)
8360 dc->record_call = 1;
8361 else
8362 dc->record_call = 0;
8363#endif
8364#endif
8365 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8366 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8367 dc->tf = (flags >> TF_SHIFT) & 1;
8368 dc->singlestep_enabled = env->singlestep_enabled;
8369 dc->cc_op = CC_OP_DYNAMIC;
8370 dc->cs_base = cs_base;
8371 dc->tb = tb;
8372 dc->popl_esp_hack = 0;
8373 /* select memory access functions */
8374 dc->mem_index = 0;
8375 if (flags & HF_SOFTMMU_MASK) {
8376 if (dc->cpl == 3)
8377 dc->mem_index = 2 * 4;
8378 else
8379 dc->mem_index = 1 * 4;
8380 }
8381 dc->cpuid_features = env->cpuid_features;
8382 dc->cpuid_ext_features = env->cpuid_ext_features;
8383 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8384 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8385#ifdef TARGET_X86_64
8386 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8387 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8388#endif
8389 dc->flags = flags;
8390 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8391 (flags & HF_INHIBIT_IRQ_MASK)
8392#ifndef CONFIG_SOFTMMU
8393 || (flags & HF_SOFTMMU_MASK)
8394#endif
8395 );
8396#if 0
8397 /* check addseg logic */
8398 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8399 printf("ERROR addseg\n");
8400#endif
8401
8402 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8403 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8404 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8405 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8406
8407 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8408 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8409 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8410 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8411 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8412 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8413 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8414 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8415 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8416
8417 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8418
8419 dc->is_jmp = DISAS_NEXT;
8420 pc_ptr = pc_start;
8421 lj = -1;
8422 num_insns = 0;
8423 max_insns = tb->cflags & CF_COUNT_MASK;
8424 if (max_insns == 0)
8425 max_insns = CF_COUNT_MASK;
8426
8427 gen_icount_start();
8428 for(;;) {
8429 if (env->nb_breakpoints > 0) {
8430 for(j = 0; j < env->nb_breakpoints; j++) {
8431 if (env->breakpoints[j] == pc_ptr) {
8432 gen_debug(dc, pc_ptr - dc->cs_base);
8433 break;
8434 }
8435 }
8436 }
8437 if (search_pc) {
8438 j = gen_opc_ptr - gen_opc_buf;
8439 if (lj < j) {
8440 lj++;
8441 while (lj < j)
8442 gen_opc_instr_start[lj++] = 0;
8443 }
8444 gen_opc_pc[lj] = pc_ptr;
8445 gen_opc_cc_op[lj] = dc->cc_op;
8446 gen_opc_instr_start[lj] = 1;
8447 gen_opc_icount[lj] = num_insns;
8448 }
8449 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8450 gen_io_start();
8451
8452 pc_ptr = disas_insn(dc, pc_ptr);
8453 num_insns++;
8454 /* stop translation if indicated */
8455 if (dc->is_jmp)
8456 break;
8457#ifdef VBOX
8458#ifdef DEBUG
8459/*
8460 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8461 {
8462 //should never happen as the jump to the patch code terminates the translation block
8463 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8464 }
8465*/
8466#endif
8467 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8468 {
8469 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8470 gen_jmp_im(pc_ptr - dc->cs_base);
8471 gen_eob(dc);
8472 break;
8473 }
8474#endif /* VBOX */
8475
8476 /* if single step mode, we generate only one instruction and
8477 generate an exception */
8478 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8479 the flag and abort the translation to give the irqs a
8480 change to be happen */
8481 if (dc->tf || dc->singlestep_enabled ||
8482 (flags & HF_INHIBIT_IRQ_MASK)) {
8483 gen_jmp_im(pc_ptr - dc->cs_base);
8484 gen_eob(dc);
8485 break;
8486 }
8487 /* if too long translation, stop generation too */
8488 if (gen_opc_ptr >= gen_opc_end ||
8489 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8490 num_insns >= max_insns) {
8491 gen_jmp_im(pc_ptr - dc->cs_base);
8492 gen_eob(dc);
8493 break;
8494 }
8495 }
8496 if (tb->cflags & CF_LAST_IO)
8497 gen_io_end();
8498 gen_icount_end(tb, num_insns);
8499 *gen_opc_ptr = INDEX_op_end;
8500 /* we don't forget to fill the last values */
8501 if (search_pc) {
8502 j = gen_opc_ptr - gen_opc_buf;
8503 lj++;
8504 while (lj <= j)
8505 gen_opc_instr_start[lj++] = 0;
8506 }
8507
8508#ifdef DEBUG_DISAS
8509 if (loglevel & CPU_LOG_TB_CPU) {
8510 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8511 }
8512 if (loglevel & CPU_LOG_TB_IN_ASM) {
8513 int disas_flags;
8514 fprintf(logfile, "----------------\n");
8515 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8516#ifdef TARGET_X86_64
8517 if (dc->code64)
8518 disas_flags = 2;
8519 else
8520#endif
8521 disas_flags = !dc->code32;
8522 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8523 fprintf(logfile, "\n");
8524 }
8525#endif
8526
8527 if (!search_pc) {
8528 tb->size = pc_ptr - pc_start;
8529 tb->icount = num_insns;
8530 }
8531}
8532
8533void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8534{
8535 gen_intermediate_code_internal(env, tb, 0);
8536}
8537
8538void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8539{
8540 gen_intermediate_code_internal(env, tb, 1);
8541}
8542
8543void gen_pc_load(CPUState *env, TranslationBlock *tb,
8544 unsigned long searched_pc, int pc_pos, void *puc)
8545{
8546 int cc_op;
8547#ifdef DEBUG_DISAS
8548 if (loglevel & CPU_LOG_TB_OP) {
8549 int i;
8550 fprintf(logfile, "RESTORE:\n");
8551 for(i = 0;i <= pc_pos; i++) {
8552 if (gen_opc_instr_start[i]) {
8553 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8554 }
8555 }
8556 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8557 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8558 (uint32_t)tb->cs_base);
8559 }
8560#endif
8561 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8562 cc_op = gen_opc_cc_op[pc_pos];
8563 if (cc_op != CC_OP_DYNAMIC)
8564 env->cc_op = cc_op;
8565}
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette