4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 /* XXX: move that elsewhere */
33 static uint16_t *gen_opc_ptr;
34 static uint32_t *gen_opparam_ptr;
36 #define PREFIX_REPZ 0x01
37 #define PREFIX_REPNZ 0x02
38 #define PREFIX_LOCK 0x04
39 #define PREFIX_DATA 0x08
40 #define PREFIX_ADR 0x10
42 typedef struct DisasContext {
43 /* current insn context */
44 int override; /* -1 if no override */
47 uint8_t *pc; /* pc = eip + cs_base */
48 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
49 static state change (stop translation) */
50 /* current block context */
51 uint8_t *cs_base; /* base of CS segment */
52 int pe; /* protected mode */
53 int code32; /* 32 bit code segment */
54 int ss32; /* 32 bit stack segment */
55 int cc_op; /* current CC operation */
56 int addseg; /* non zero if either DS/ES/SS have a non zero base */
57 int f_st; /* currently unused */
58 int vm86; /* vm86 mode */
61 int tf; /* TF cpu flag */
62 int singlestep_enabled; /* "hardware" single step enabled */
63 int jmp_opt; /* use direct block chaining for direct jumps */
64 int mem_index; /* select memory access functions */
65 int flags; /* all execution flags */
66 struct TranslationBlock *tb;
67 int popl_esp_hack; /* for correct popl with esp base handling */
70 static void gen_eob(DisasContext *s);
71 static void gen_jmp(DisasContext *s, unsigned int eip);
73 /* i386 arith/logic operations */
93 OP_SHL1, /* undocumented */
98 #define DEF(s, n, copy_size) INDEX_op_ ## s,
115 /* I386 int registers */
116 OR_EAX, /* MUST be even numbered */
124 OR_TMP0, /* temporary operand register */
126 OR_A0, /* temporary register used when doing address evaluation */
127 OR_ZERO, /* fixed zero register */
131 static GenOpFunc *gen_op_mov_reg_T0[3][8] = {
164 static GenOpFunc *gen_op_mov_reg_T1[3][8] = {
197 static GenOpFunc *gen_op_mov_reg_A0[2][8] = {
220 static GenOpFunc *gen_op_mov_TN_reg[3][2][8] =
290 static GenOpFunc *gen_op_movl_A0_reg[8] = {
301 static GenOpFunc *gen_op_addl_A0_reg_sN[4][8] = {
313 gen_op_addl_A0_EAX_s1,
314 gen_op_addl_A0_ECX_s1,
315 gen_op_addl_A0_EDX_s1,
316 gen_op_addl_A0_EBX_s1,
317 gen_op_addl_A0_ESP_s1,
318 gen_op_addl_A0_EBP_s1,
319 gen_op_addl_A0_ESI_s1,
320 gen_op_addl_A0_EDI_s1,
323 gen_op_addl_A0_EAX_s2,
324 gen_op_addl_A0_ECX_s2,
325 gen_op_addl_A0_EDX_s2,
326 gen_op_addl_A0_EBX_s2,
327 gen_op_addl_A0_ESP_s2,
328 gen_op_addl_A0_EBP_s2,
329 gen_op_addl_A0_ESI_s2,
330 gen_op_addl_A0_EDI_s2,
333 gen_op_addl_A0_EAX_s3,
334 gen_op_addl_A0_ECX_s3,
335 gen_op_addl_A0_EDX_s3,
336 gen_op_addl_A0_EBX_s3,
337 gen_op_addl_A0_ESP_s3,
338 gen_op_addl_A0_EBP_s3,
339 gen_op_addl_A0_ESI_s3,
340 gen_op_addl_A0_EDI_s3,
344 static GenOpFunc *gen_op_cmov_reg_T1_T0[2][8] = {
346 gen_op_cmovw_EAX_T1_T0,
347 gen_op_cmovw_ECX_T1_T0,
348 gen_op_cmovw_EDX_T1_T0,
349 gen_op_cmovw_EBX_T1_T0,
350 gen_op_cmovw_ESP_T1_T0,
351 gen_op_cmovw_EBP_T1_T0,
352 gen_op_cmovw_ESI_T1_T0,
353 gen_op_cmovw_EDI_T1_T0,
356 gen_op_cmovl_EAX_T1_T0,
357 gen_op_cmovl_ECX_T1_T0,
358 gen_op_cmovl_EDX_T1_T0,
359 gen_op_cmovl_EBX_T1_T0,
360 gen_op_cmovl_ESP_T1_T0,
361 gen_op_cmovl_EBP_T1_T0,
362 gen_op_cmovl_ESI_T1_T0,
363 gen_op_cmovl_EDI_T1_T0,
367 static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
378 #define DEF_ARITHC(SUFFIX)\
380 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
381 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
384 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
385 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
388 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
389 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
392 static GenOpFunc *gen_op_arithc_T0_T1_cc[3][2] = {
396 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[9][2] = {
398 #ifndef CONFIG_USER_ONLY
404 static const int cc_op_arithb[8] = {
415 #define DEF_CMPXCHG(SUFFIX)\
416 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
417 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
418 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,
421 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[3] = {
425 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[9] = {
427 #ifndef CONFIG_USER_ONLY
433 #define DEF_SHIFT(SUFFIX)\
435 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
436 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
437 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
438 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
439 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
440 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
441 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
442 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
445 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
446 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
447 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
448 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
449 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
450 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
451 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
452 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
455 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
456 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
457 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
458 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
459 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
460 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
461 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
462 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
465 static GenOpFunc *gen_op_shift_T0_T1_cc[3][8] = {
469 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[9][8] = {
471 #ifndef CONFIG_USER_ONLY
477 #define DEF_SHIFTD(SUFFIX, op)\
483 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
484 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
487 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
488 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
492 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[3][2] = {
496 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[3][2] = {
500 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[9][2] = {
502 #ifndef CONFIG_USER_ONLY
503 DEF_SHIFTD(_kernel, im)
504 DEF_SHIFTD(_user, im)
508 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[9][2] = {
509 DEF_SHIFTD(_raw, ECX)
510 #ifndef CONFIG_USER_ONLY
511 DEF_SHIFTD(_kernel, ECX)
512 DEF_SHIFTD(_user, ECX)
516 static GenOpFunc *gen_op_btx_T0_T1_cc[2][4] = {
519 gen_op_btsw_T0_T1_cc,
520 gen_op_btrw_T0_T1_cc,
521 gen_op_btcw_T0_T1_cc,
525 gen_op_btsl_T0_T1_cc,
526 gen_op_btrl_T0_T1_cc,
527 gen_op_btcl_T0_T1_cc,
531 static GenOpFunc *gen_op_bsx_T0_cc[2][2] = {
542 static GenOpFunc *gen_op_lds_T0_A0[3 * 3] = {
543 gen_op_ldsb_raw_T0_A0,
544 gen_op_ldsw_raw_T0_A0,
546 #ifndef CONFIG_USER_ONLY
547 gen_op_ldsb_kernel_T0_A0,
548 gen_op_ldsw_kernel_T0_A0,
551 gen_op_ldsb_user_T0_A0,
552 gen_op_ldsw_user_T0_A0,
557 static GenOpFunc *gen_op_ldu_T0_A0[3 * 3] = {
558 gen_op_ldub_raw_T0_A0,
559 gen_op_lduw_raw_T0_A0,
562 #ifndef CONFIG_USER_ONLY
563 gen_op_ldub_kernel_T0_A0,
564 gen_op_lduw_kernel_T0_A0,
567 gen_op_ldub_user_T0_A0,
568 gen_op_lduw_user_T0_A0,
573 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
574 static GenOpFunc *gen_op_ld_T0_A0[3 * 3] = {
575 gen_op_ldub_raw_T0_A0,
576 gen_op_lduw_raw_T0_A0,
577 gen_op_ldl_raw_T0_A0,
579 #ifndef CONFIG_USER_ONLY
580 gen_op_ldub_kernel_T0_A0,
581 gen_op_lduw_kernel_T0_A0,
582 gen_op_ldl_kernel_T0_A0,
584 gen_op_ldub_user_T0_A0,
585 gen_op_lduw_user_T0_A0,
586 gen_op_ldl_user_T0_A0,
590 static GenOpFunc *gen_op_ld_T1_A0[3 * 3] = {
591 gen_op_ldub_raw_T1_A0,
592 gen_op_lduw_raw_T1_A0,
593 gen_op_ldl_raw_T1_A0,
595 #ifndef CONFIG_USER_ONLY
596 gen_op_ldub_kernel_T1_A0,
597 gen_op_lduw_kernel_T1_A0,
598 gen_op_ldl_kernel_T1_A0,
600 gen_op_ldub_user_T1_A0,
601 gen_op_lduw_user_T1_A0,
602 gen_op_ldl_user_T1_A0,
606 static GenOpFunc *gen_op_st_T0_A0[3 * 3] = {
607 gen_op_stb_raw_T0_A0,
608 gen_op_stw_raw_T0_A0,
609 gen_op_stl_raw_T0_A0,
611 #ifndef CONFIG_USER_ONLY
612 gen_op_stb_kernel_T0_A0,
613 gen_op_stw_kernel_T0_A0,
614 gen_op_stl_kernel_T0_A0,
616 gen_op_stb_user_T0_A0,
617 gen_op_stw_user_T0_A0,
618 gen_op_stl_user_T0_A0,
622 static GenOpFunc *gen_op_st_T1_A0[3 * 3] = {
624 gen_op_stw_raw_T1_A0,
625 gen_op_stl_raw_T1_A0,
627 #ifndef CONFIG_USER_ONLY
629 gen_op_stw_kernel_T1_A0,
630 gen_op_stl_kernel_T1_A0,
633 gen_op_stw_user_T1_A0,
634 gen_op_stl_user_T1_A0,
638 static inline void gen_string_movl_A0_ESI(DisasContext *s)
642 override = s->override;
645 if (s->addseg && override < 0)
648 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
649 gen_op_addl_A0_reg_sN[0][R_ESI]();
651 gen_op_movl_A0_reg[R_ESI]();
654 /* 16 address, always override */
657 gen_op_movl_A0_reg[R_ESI]();
658 gen_op_andl_A0_ffff();
659 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
663 static inline void gen_string_movl_A0_EDI(DisasContext *s)
667 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
668 gen_op_addl_A0_reg_sN[0][R_EDI]();
670 gen_op_movl_A0_reg[R_EDI]();
673 gen_op_movl_A0_reg[R_EDI]();
674 gen_op_andl_A0_ffff();
675 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
679 static GenOpFunc *gen_op_movl_T0_Dshift[3] = {
680 gen_op_movl_T0_Dshiftb,
681 gen_op_movl_T0_Dshiftw,
682 gen_op_movl_T0_Dshiftl,
685 static GenOpFunc2 *gen_op_jz_ecx[2] = {
690 static GenOpFunc1 *gen_op_jz_ecx_im[2] = {
695 static GenOpFunc *gen_op_dec_ECX[2] = {
700 #ifdef USE_DIRECT_JUMP
701 typedef GenOpFunc GenOpFuncTB2;
702 #define gen_op_string_jnz_sub(nz, ot, tb) gen_op_string_jnz_sub2[nz][ot]()
704 typedef GenOpFunc1 GenOpFuncTB2;
705 #define gen_op_string_jnz_sub(nz, ot, tb) gen_op_string_jnz_sub2[nz][ot](tb)
708 static GenOpFuncTB2 *gen_op_string_jnz_sub2[2][3] = {
710 gen_op_string_jnz_subb,
711 gen_op_string_jnz_subw,
712 gen_op_string_jnz_subl,
715 gen_op_string_jz_subb,
716 gen_op_string_jz_subw,
717 gen_op_string_jz_subl,
721 static GenOpFunc1 *gen_op_string_jnz_sub_im[2][3] = {
723 gen_op_string_jnz_subb_im,
724 gen_op_string_jnz_subw_im,
725 gen_op_string_jnz_subl_im,
728 gen_op_string_jz_subb_im,
729 gen_op_string_jz_subw_im,
730 gen_op_string_jz_subl_im,
734 static GenOpFunc *gen_op_in_DX_T0[3] = {
740 static GenOpFunc *gen_op_out_DX_T0[3] = {
746 static GenOpFunc *gen_op_in[3] = {
752 static GenOpFunc *gen_op_out[3] = {
758 static GenOpFunc *gen_check_io_T0[3] = {
764 static GenOpFunc *gen_check_io_DX[3] = {
770 static void gen_check_io(DisasContext *s, int ot, int use_dx, int cur_eip)
772 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
773 if (s->cc_op != CC_OP_DYNAMIC)
774 gen_op_set_cc_op(s->cc_op);
775 gen_op_jmp_im(cur_eip);
777 gen_check_io_DX[ot]();
779 gen_check_io_T0[ot]();
783 static inline void gen_movs(DisasContext *s, int ot)
785 gen_string_movl_A0_ESI(s);
786 gen_op_ld_T0_A0[ot + s->mem_index]();
787 gen_string_movl_A0_EDI(s);
788 gen_op_st_T0_A0[ot + s->mem_index]();
789 gen_op_movl_T0_Dshift[ot]();
791 gen_op_addl_ESI_T0();
792 gen_op_addl_EDI_T0();
794 gen_op_addw_ESI_T0();
795 gen_op_addw_EDI_T0();
799 static inline void gen_update_cc_op(DisasContext *s)
801 if (s->cc_op != CC_OP_DYNAMIC) {
802 gen_op_set_cc_op(s->cc_op);
803 s->cc_op = CC_OP_DYNAMIC;
807 static inline void gen_jz_ecx_string(DisasContext *s, unsigned int next_eip)
810 gen_op_jz_ecx[s->aflag]((long)s->tb, next_eip);
812 /* XXX: does not work with gdbstub "ice" single step - not a
814 gen_op_jz_ecx_im[s->aflag](next_eip);
818 static inline void gen_stos(DisasContext *s, int ot)
820 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
821 gen_string_movl_A0_EDI(s);
822 gen_op_st_T0_A0[ot + s->mem_index]();
823 gen_op_movl_T0_Dshift[ot]();
825 gen_op_addl_EDI_T0();
827 gen_op_addw_EDI_T0();
831 static inline void gen_lods(DisasContext *s, int ot)
833 gen_string_movl_A0_ESI(s);
834 gen_op_ld_T0_A0[ot + s->mem_index]();
835 gen_op_mov_reg_T0[ot][R_EAX]();
836 gen_op_movl_T0_Dshift[ot]();
838 gen_op_addl_ESI_T0();
840 gen_op_addw_ESI_T0();
844 static inline void gen_scas(DisasContext *s, int ot)
846 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
847 gen_string_movl_A0_EDI(s);
848 gen_op_ld_T1_A0[ot + s->mem_index]();
849 gen_op_cmpl_T0_T1_cc();
850 gen_op_movl_T0_Dshift[ot]();
852 gen_op_addl_EDI_T0();
854 gen_op_addw_EDI_T0();
858 static inline void gen_cmps(DisasContext *s, int ot)
860 gen_string_movl_A0_ESI(s);
861 gen_op_ld_T0_A0[ot + s->mem_index]();
862 gen_string_movl_A0_EDI(s);
863 gen_op_ld_T1_A0[ot + s->mem_index]();
864 gen_op_cmpl_T0_T1_cc();
865 gen_op_movl_T0_Dshift[ot]();
867 gen_op_addl_ESI_T0();
868 gen_op_addl_EDI_T0();
870 gen_op_addw_ESI_T0();
871 gen_op_addw_EDI_T0();
875 static inline void gen_ins(DisasContext *s, int ot)
877 gen_op_in_DX_T0[ot]();
878 gen_string_movl_A0_EDI(s);
879 gen_op_st_T0_A0[ot + s->mem_index]();
880 gen_op_movl_T0_Dshift[ot]();
882 gen_op_addl_EDI_T0();
884 gen_op_addw_EDI_T0();
888 static inline void gen_outs(DisasContext *s, int ot)
890 gen_string_movl_A0_ESI(s);
891 gen_op_ld_T0_A0[ot + s->mem_index]();
892 gen_op_out_DX_T0[ot]();
893 gen_op_movl_T0_Dshift[ot]();
895 gen_op_addl_ESI_T0();
897 gen_op_addw_ESI_T0();
901 /* same method as Valgrind : we generate jumps to current or next
903 #define GEN_REPZ(op) \
904 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
905 unsigned int cur_eip, unsigned int next_eip) \
907 gen_update_cc_op(s); \
908 gen_jz_ecx_string(s, next_eip); \
910 gen_op_dec_ECX[s->aflag](); \
911 /* a loop would cause two single step exceptions if ECX = 1 \
912 before rep string_insn */ \
914 gen_op_jz_ecx_im[s->aflag](next_eip); \
915 gen_jmp(s, cur_eip); \
918 #define GEN_REPZ2(op) \
919 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
920 unsigned int cur_eip, \
921 unsigned int next_eip, \
924 gen_update_cc_op(s); \
925 gen_jz_ecx_string(s, next_eip); \
927 gen_op_dec_ECX[s->aflag](); \
928 gen_op_set_cc_op(CC_OP_SUBB + ot); \
930 gen_op_string_jnz_sub_im[nz][ot](next_eip); \
932 gen_op_string_jnz_sub(nz, ot, (long)s->tb); \
934 gen_op_jz_ecx_im[s->aflag](next_eip); \
935 gen_jmp(s, cur_eip); \
957 static GenOpFunc3 *gen_jcc_sub[3][8] = {
989 static GenOpFunc2 *gen_op_loop[2][4] = {
1004 static GenOpFunc *gen_setcc_slow[8] = {
1015 static GenOpFunc *gen_setcc_sub[3][8] = {
1018 gen_op_setb_T0_subb,
1019 gen_op_setz_T0_subb,
1020 gen_op_setbe_T0_subb,
1021 gen_op_sets_T0_subb,
1023 gen_op_setl_T0_subb,
1024 gen_op_setle_T0_subb,
1028 gen_op_setb_T0_subw,
1029 gen_op_setz_T0_subw,
1030 gen_op_setbe_T0_subw,
1031 gen_op_sets_T0_subw,
1033 gen_op_setl_T0_subw,
1034 gen_op_setle_T0_subw,
1038 gen_op_setb_T0_subl,
1039 gen_op_setz_T0_subl,
1040 gen_op_setbe_T0_subl,
1041 gen_op_sets_T0_subl,
1043 gen_op_setl_T0_subl,
1044 gen_op_setle_T0_subl,
1048 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1049 gen_op_fadd_ST0_FT0,
1050 gen_op_fmul_ST0_FT0,
1051 gen_op_fcom_ST0_FT0,
1052 gen_op_fcom_ST0_FT0,
1053 gen_op_fsub_ST0_FT0,
1054 gen_op_fsubr_ST0_FT0,
1055 gen_op_fdiv_ST0_FT0,
1056 gen_op_fdivr_ST0_FT0,
1059 /* NOTE the exception in "r" op ordering */
1060 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1061 gen_op_fadd_STN_ST0,
1062 gen_op_fmul_STN_ST0,
1065 gen_op_fsubr_STN_ST0,
1066 gen_op_fsub_STN_ST0,
1067 gen_op_fdivr_STN_ST0,
1068 gen_op_fdiv_STN_ST0,
1071 /* if d == OR_TMP0, it means memory operand (address in A0) */
1072 static void gen_op(DisasContext *s1, int op, int ot, int d)
1074 GenOpFunc *gen_update_cc;
1077 gen_op_mov_TN_reg[ot][0][d]();
1079 gen_op_ld_T0_A0[ot + s1->mem_index]();
1084 if (s1->cc_op != CC_OP_DYNAMIC)
1085 gen_op_set_cc_op(s1->cc_op);
1087 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1088 gen_op_mov_reg_T0[ot][d]();
1090 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1092 s1->cc_op = CC_OP_DYNAMIC;
1095 gen_op_addl_T0_T1();
1096 s1->cc_op = CC_OP_ADDB + ot;
1097 gen_update_cc = gen_op_update2_cc;
1100 gen_op_subl_T0_T1();
1101 s1->cc_op = CC_OP_SUBB + ot;
1102 gen_update_cc = gen_op_update2_cc;
1108 gen_op_arith_T0_T1_cc[op]();
1109 s1->cc_op = CC_OP_LOGICB + ot;
1110 gen_update_cc = gen_op_update1_cc;
1113 gen_op_cmpl_T0_T1_cc();
1114 s1->cc_op = CC_OP_SUBB + ot;
1115 gen_update_cc = NULL;
1118 if (op != OP_CMPL) {
1120 gen_op_mov_reg_T0[ot][d]();
1122 gen_op_st_T0_A0[ot + s1->mem_index]();
1124 /* the flags update must happen after the memory write (precise
1125 exception support) */
1131 /* if d == OR_TMP0, it means memory operand (address in A0) */
1132 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1135 gen_op_mov_TN_reg[ot][0][d]();
1137 gen_op_ld_T0_A0[ot + s1->mem_index]();
1138 if (s1->cc_op != CC_OP_DYNAMIC)
1139 gen_op_set_cc_op(s1->cc_op);
1142 s1->cc_op = CC_OP_INCB + ot;
1145 s1->cc_op = CC_OP_DECB + ot;
1148 gen_op_mov_reg_T0[ot][d]();
1150 gen_op_st_T0_A0[ot + s1->mem_index]();
1151 gen_op_update_inc_cc();
1154 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1157 gen_op_mov_TN_reg[ot][0][d]();
1159 gen_op_ld_T0_A0[ot + s1->mem_index]();
1161 gen_op_mov_TN_reg[ot][1][s]();
1162 /* for zero counts, flags are not updated, so must do it dynamically */
1163 if (s1->cc_op != CC_OP_DYNAMIC)
1164 gen_op_set_cc_op(s1->cc_op);
1167 gen_op_shift_T0_T1_cc[ot][op]();
1169 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1171 gen_op_mov_reg_T0[ot][d]();
1172 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1175 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1177 /* currently not optimized */
1178 gen_op_movl_T1_im(c);
1179 gen_shift(s1, op, ot, d, OR_TMP1);
1182 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1189 int mod, rm, code, override, must_add_seg;
1191 override = s->override;
1192 must_add_seg = s->addseg;
1195 mod = (modrm >> 6) & 3;
1207 code = ldub_code(s->pc++);
1208 scale = (code >> 6) & 3;
1209 index = (code >> 3) & 7;
1217 disp = ldl_code(s->pc);
1224 disp = (int8_t)ldub_code(s->pc++);
1228 disp = ldl_code(s->pc);
1234 /* for correct popl handling with esp */
1235 if (base == 4 && s->popl_esp_hack)
1236 disp += s->popl_esp_hack;
1237 gen_op_movl_A0_reg[base]();
1239 gen_op_addl_A0_im(disp);
1241 gen_op_movl_A0_im(disp);
1243 /* XXX: index == 4 is always invalid */
1244 if (havesib && (index != 4 || scale != 0)) {
1245 gen_op_addl_A0_reg_sN[scale][index]();
1249 if (base == R_EBP || base == R_ESP)
1254 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1260 disp = lduw_code(s->pc);
1262 gen_op_movl_A0_im(disp);
1263 rm = 0; /* avoid SS override */
1270 disp = (int8_t)ldub_code(s->pc++);
1274 disp = lduw_code(s->pc);
1280 gen_op_movl_A0_reg[R_EBX]();
1281 gen_op_addl_A0_reg_sN[0][R_ESI]();
1284 gen_op_movl_A0_reg[R_EBX]();
1285 gen_op_addl_A0_reg_sN[0][R_EDI]();
1288 gen_op_movl_A0_reg[R_EBP]();
1289 gen_op_addl_A0_reg_sN[0][R_ESI]();
1292 gen_op_movl_A0_reg[R_EBP]();
1293 gen_op_addl_A0_reg_sN[0][R_EDI]();
1296 gen_op_movl_A0_reg[R_ESI]();
1299 gen_op_movl_A0_reg[R_EDI]();
1302 gen_op_movl_A0_reg[R_EBP]();
1306 gen_op_movl_A0_reg[R_EBX]();
1310 gen_op_addl_A0_im(disp);
1311 gen_op_andl_A0_ffff();
1315 if (rm == 2 || rm == 3 || rm == 6)
1320 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1330 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1332 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1334 int mod, rm, opreg, disp;
1336 mod = (modrm >> 6) & 3;
1341 gen_op_mov_TN_reg[ot][0][reg]();
1342 gen_op_mov_reg_T0[ot][rm]();
1344 gen_op_mov_TN_reg[ot][0][rm]();
1346 gen_op_mov_reg_T0[ot][reg]();
1349 gen_lea_modrm(s, modrm, &opreg, &disp);
1352 gen_op_mov_TN_reg[ot][0][reg]();
1353 gen_op_st_T0_A0[ot + s->mem_index]();
1355 gen_op_ld_T0_A0[ot + s->mem_index]();
1357 gen_op_mov_reg_T0[ot][reg]();
1362 static inline uint32_t insn_get(DisasContext *s, int ot)
1368 ret = ldub_code(s->pc);
1372 ret = lduw_code(s->pc);
1377 ret = ldl_code(s->pc);
1384 static inline void gen_jcc(DisasContext *s, int b, int val, int next_eip)
1386 TranslationBlock *tb;
1391 jcc_op = (b >> 1) & 7;
1395 /* we optimize the cmp/jcc case */
1399 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1402 /* some jumps are easy to compute */
1429 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 3][jcc_op];
1432 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 3][jcc_op];
1444 if (s->cc_op != CC_OP_DYNAMIC)
1445 gen_op_set_cc_op(s->cc_op);
1448 gen_setcc_slow[jcc_op]();
1454 func((long)tb, val, next_eip);
1456 func((long)tb, next_eip, val);
1460 if (s->cc_op != CC_OP_DYNAMIC) {
1461 gen_op_set_cc_op(s->cc_op);
1462 s->cc_op = CC_OP_DYNAMIC;
1464 gen_setcc_slow[jcc_op]();
1466 gen_op_jcc_im(val, next_eip);
1468 gen_op_jcc_im(next_eip, val);
1474 static void gen_setcc(DisasContext *s, int b)
1480 jcc_op = (b >> 1) & 7;
1482 /* we optimize the cmp/jcc case */
1486 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1491 /* some jumps are easy to compute */
1509 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 3][jcc_op];
1512 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 3][jcc_op];
1520 if (s->cc_op != CC_OP_DYNAMIC)
1521 gen_op_set_cc_op(s->cc_op);
1522 func = gen_setcc_slow[jcc_op];
1531 /* move T0 to seg_reg and compute if the CPU state may change. Never
1532 call this function with seg_reg == R_CS */
1533 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, unsigned int cur_eip)
1535 if (s->pe && !s->vm86) {
1536 /* XXX: optimize by finding processor state dynamically */
1537 if (s->cc_op != CC_OP_DYNAMIC)
1538 gen_op_set_cc_op(s->cc_op);
1539 gen_op_jmp_im(cur_eip);
1540 gen_op_movl_seg_T0(seg_reg);
1542 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1544 /* abort translation because the register may have a non zero base
1545 or because ss32 may change. For R_SS, translation must always
1546 stop as a special handling must be done to disable hardware
1547 interrupts for the next instruction */
1548 if (seg_reg == R_SS || (!s->addseg && seg_reg < R_FS))
1552 static inline void gen_stack_update(DisasContext *s, int addend)
1556 gen_op_addl_ESP_2();
1557 else if (addend == 4)
1558 gen_op_addl_ESP_4();
1560 gen_op_addl_ESP_im(addend);
1563 gen_op_addw_ESP_2();
1564 else if (addend == 4)
1565 gen_op_addw_ESP_4();
1567 gen_op_addw_ESP_im(addend);
1571 /* generate a push. It depends on ss32, addseg and dflag */
1572 static void gen_push_T0(DisasContext *s)
1574 gen_op_movl_A0_reg[R_ESP]();
1581 gen_op_movl_T1_A0();
1582 gen_op_addl_A0_SS();
1585 gen_op_andl_A0_ffff();
1586 gen_op_movl_T1_A0();
1587 gen_op_addl_A0_SS();
1589 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
1590 if (s->ss32 && !s->addseg)
1591 gen_op_movl_ESP_A0();
1593 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
1596 /* generate a push. It depends on ss32, addseg and dflag */
1597 /* slower version for T1, only used for call Ev */
1598 static void gen_push_T1(DisasContext *s)
1600 gen_op_movl_A0_reg[R_ESP]();
1607 gen_op_addl_A0_SS();
1610 gen_op_andl_A0_ffff();
1611 gen_op_addl_A0_SS();
1613 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
1615 if (s->ss32 && !s->addseg)
1616 gen_op_movl_ESP_A0();
1618 gen_stack_update(s, (-2) << s->dflag);
1621 /* two step pop is necessary for precise exceptions */
1622 static void gen_pop_T0(DisasContext *s)
1624 gen_op_movl_A0_reg[R_ESP]();
1627 gen_op_addl_A0_SS();
1629 gen_op_andl_A0_ffff();
1630 gen_op_addl_A0_SS();
1632 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
1635 static void gen_pop_update(DisasContext *s)
1637 gen_stack_update(s, 2 << s->dflag);
1640 static void gen_stack_A0(DisasContext *s)
1642 gen_op_movl_A0_ESP();
1644 gen_op_andl_A0_ffff();
1645 gen_op_movl_T1_A0();
1647 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
1650 /* NOTE: wrap around in 16 bit not fully handled */
1651 static void gen_pusha(DisasContext *s)
1654 gen_op_movl_A0_ESP();
1655 gen_op_addl_A0_im(-16 << s->dflag);
1657 gen_op_andl_A0_ffff();
1658 gen_op_movl_T1_A0();
1660 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
1661 for(i = 0;i < 8; i++) {
1662 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
1663 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
1664 gen_op_addl_A0_im(2 << s->dflag);
1666 gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
1669 /* NOTE: wrap around in 16 bit not fully handled */
1670 static void gen_popa(DisasContext *s)
1673 gen_op_movl_A0_ESP();
1675 gen_op_andl_A0_ffff();
1676 gen_op_movl_T1_A0();
1677 gen_op_addl_T1_im(16 << s->dflag);
1679 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
1680 for(i = 0;i < 8; i++) {
1681 /* ESP is not reloaded */
1683 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
1684 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
1686 gen_op_addl_A0_im(2 << s->dflag);
1688 gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
1691 /* NOTE: wrap around in 16 bit not fully handled */
1692 /* XXX: check this */
1693 static void gen_enter(DisasContext *s, int esp_addend, int level)
1695 int ot, level1, addend, opsize;
1697 ot = s->dflag + OT_WORD;
1700 opsize = 2 << s->dflag;
1702 gen_op_movl_A0_ESP();
1703 gen_op_addl_A0_im(-opsize);
1705 gen_op_andl_A0_ffff();
1706 gen_op_movl_T1_A0();
1708 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
1710 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
1711 gen_op_st_T0_A0[ot + s->mem_index]();
1714 gen_op_addl_A0_im(-opsize);
1715 gen_op_addl_T0_im(-opsize);
1716 gen_op_st_T0_A0[ot + s->mem_index]();
1718 gen_op_addl_A0_im(-opsize);
1719 gen_op_st_T1_A0[ot + s->mem_index]();
1721 gen_op_mov_reg_T1[ot][R_EBP]();
1722 addend = -esp_addend;
1724 addend -= opsize * (level1 + 1);
1725 gen_op_addl_T1_im(addend);
1726 gen_op_mov_reg_T1[ot][R_ESP]();
1729 static void gen_exception(DisasContext *s, int trapno, unsigned int cur_eip)
1731 if (s->cc_op != CC_OP_DYNAMIC)
1732 gen_op_set_cc_op(s->cc_op);
1733 gen_op_jmp_im(cur_eip);
1734 gen_op_raise_exception(trapno);
1738 /* an interrupt is different from an exception because of the
1739 priviledge checks */
1740 static void gen_interrupt(DisasContext *s, int intno,
1741 unsigned int cur_eip, unsigned int next_eip)
1743 if (s->cc_op != CC_OP_DYNAMIC)
1744 gen_op_set_cc_op(s->cc_op);
1745 gen_op_jmp_im(cur_eip);
1746 gen_op_raise_interrupt(intno, next_eip);
1750 static void gen_debug(DisasContext *s, unsigned int cur_eip)
1752 if (s->cc_op != CC_OP_DYNAMIC)
1753 gen_op_set_cc_op(s->cc_op);
1754 gen_op_jmp_im(cur_eip);
1759 /* generate a generic end of block. Trace exception is also generated
1761 static void gen_eob(DisasContext *s)
1763 if (s->cc_op != CC_OP_DYNAMIC)
1764 gen_op_set_cc_op(s->cc_op);
1765 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
1766 gen_op_reset_inhibit_irq();
1768 if (s->singlestep_enabled) {
1771 gen_op_raise_exception(EXCP01_SSTP);
1779 /* generate a jump to eip. No segment change must happen before as a
1780 direct call to the next block may occur */
1781 static void gen_jmp(DisasContext *s, unsigned int eip)
1783 TranslationBlock *tb = s->tb;
1786 if (s->cc_op != CC_OP_DYNAMIC)
1787 gen_op_set_cc_op(s->cc_op);
1788 gen_op_jmp((long)tb, eip);
1796 /* convert one instruction. s->is_jmp is set if the translation must
1797 be stopped. Return the next pc value */
1798 static uint8_t *disas_insn(DisasContext *s, uint8_t *pc_start)
1800 int b, prefixes, aflag, dflag;
1802 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
1803 unsigned int next_eip;
1811 b = ldub_code(s->pc);
1813 /* check prefixes */
1816 prefixes |= PREFIX_REPZ;
1819 prefixes |= PREFIX_REPNZ;
1822 prefixes |= PREFIX_LOCK;
1843 prefixes |= PREFIX_DATA;
1846 prefixes |= PREFIX_ADR;
1850 if (prefixes & PREFIX_DATA)
1852 if (prefixes & PREFIX_ADR)
1855 s->prefix = prefixes;
1859 /* lock generation */
1860 if (prefixes & PREFIX_LOCK)
1863 /* now check op code */
1867 /**************************/
1868 /* extended op code */
1869 b = ldub_code(s->pc++) | 0x100;
1872 /**************************/
1890 ot = dflag ? OT_LONG : OT_WORD;
1893 case 0: /* OP Ev, Gv */
1894 modrm = ldub_code(s->pc++);
1895 reg = ((modrm >> 3) & 7);
1896 mod = (modrm >> 6) & 3;
1899 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1901 } else if (op == OP_XORL && rm == reg) {
1903 /* xor reg, reg optimisation */
1905 s->cc_op = CC_OP_LOGICB + ot;
1906 gen_op_mov_reg_T0[ot][reg]();
1907 gen_op_update1_cc();
1912 gen_op_mov_TN_reg[ot][1][reg]();
1913 gen_op(s, op, ot, opreg);
1915 case 1: /* OP Gv, Ev */
1916 modrm = ldub_code(s->pc++);
1917 mod = (modrm >> 6) & 3;
1918 reg = ((modrm >> 3) & 7);
1921 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1922 gen_op_ld_T1_A0[ot + s->mem_index]();
1923 } else if (op == OP_XORL && rm == reg) {
1926 gen_op_mov_TN_reg[ot][1][rm]();
1928 gen_op(s, op, ot, reg);
1930 case 2: /* OP A, Iv */
1931 val = insn_get(s, ot);
1932 gen_op_movl_T1_im(val);
1933 gen_op(s, op, ot, OR_EAX);
1939 case 0x80: /* GRP1 */
1948 ot = dflag ? OT_LONG : OT_WORD;
1950 modrm = ldub_code(s->pc++);
1951 mod = (modrm >> 6) & 3;
1953 op = (modrm >> 3) & 7;
1956 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1959 opreg = rm + OR_EAX;
1966 val = insn_get(s, ot);
1969 val = (int8_t)insn_get(s, OT_BYTE);
1972 gen_op_movl_T1_im(val);
1973 gen_op(s, op, ot, opreg);
1977 /**************************/
1978 /* inc, dec, and other misc arith */
1979 case 0x40 ... 0x47: /* inc Gv */
1980 ot = dflag ? OT_LONG : OT_WORD;
1981 gen_inc(s, ot, OR_EAX + (b & 7), 1);
1983 case 0x48 ... 0x4f: /* dec Gv */
1984 ot = dflag ? OT_LONG : OT_WORD;
1985 gen_inc(s, ot, OR_EAX + (b & 7), -1);
1987 case 0xf6: /* GRP3 */
1992 ot = dflag ? OT_LONG : OT_WORD;
1994 modrm = ldub_code(s->pc++);
1995 mod = (modrm >> 6) & 3;
1997 op = (modrm >> 3) & 7;
1999 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2000 gen_op_ld_T0_A0[ot + s->mem_index]();
2002 gen_op_mov_TN_reg[ot][0][rm]();
2007 val = insn_get(s, ot);
2008 gen_op_movl_T1_im(val);
2009 gen_op_testl_T0_T1_cc();
2010 s->cc_op = CC_OP_LOGICB + ot;
2015 gen_op_st_T0_A0[ot + s->mem_index]();
2017 gen_op_mov_reg_T0[ot][rm]();
2023 gen_op_st_T0_A0[ot + s->mem_index]();
2025 gen_op_mov_reg_T0[ot][rm]();
2027 gen_op_update_neg_cc();
2028 s->cc_op = CC_OP_SUBB + ot;
2033 gen_op_mulb_AL_T0();
2034 s->cc_op = CC_OP_MULB;
2037 gen_op_mulw_AX_T0();
2038 s->cc_op = CC_OP_MULW;
2042 gen_op_mull_EAX_T0();
2043 s->cc_op = CC_OP_MULL;
2050 gen_op_imulb_AL_T0();
2051 s->cc_op = CC_OP_MULB;
2054 gen_op_imulw_AX_T0();
2055 s->cc_op = CC_OP_MULW;
2059 gen_op_imull_EAX_T0();
2060 s->cc_op = CC_OP_MULL;
2067 gen_op_divb_AL_T0(pc_start - s->cs_base);
2070 gen_op_divw_AX_T0(pc_start - s->cs_base);
2074 gen_op_divl_EAX_T0(pc_start - s->cs_base);
2081 gen_op_idivb_AL_T0(pc_start - s->cs_base);
2084 gen_op_idivw_AX_T0(pc_start - s->cs_base);
2088 gen_op_idivl_EAX_T0(pc_start - s->cs_base);
2097 case 0xfe: /* GRP4 */
2098 case 0xff: /* GRP5 */
2102 ot = dflag ? OT_LONG : OT_WORD;
2104 modrm = ldub_code(s->pc++);
2105 mod = (modrm >> 6) & 3;
2107 op = (modrm >> 3) & 7;
2108 if (op >= 2 && b == 0xfe) {
2112 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2113 if (op >= 2 && op != 3 && op != 5)
2114 gen_op_ld_T0_A0[ot + s->mem_index]();
2116 gen_op_mov_TN_reg[ot][0][rm]();
2120 case 0: /* inc Ev */
2125 gen_inc(s, ot, opreg, 1);
2127 case 1: /* dec Ev */
2132 gen_inc(s, ot, opreg, -1);
2134 case 2: /* call Ev */
2135 /* XXX: optimize if memory (no 'and' is necessary) */
2137 gen_op_andl_T0_ffff();
2138 next_eip = s->pc - s->cs_base;
2139 gen_op_movl_T1_im(next_eip);
2144 case 3: /* lcall Ev */
2145 gen_op_ld_T1_A0[ot + s->mem_index]();
2146 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
2147 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
2149 if (s->pe && !s->vm86) {
2150 if (s->cc_op != CC_OP_DYNAMIC)
2151 gen_op_set_cc_op(s->cc_op);
2152 gen_op_jmp_im(pc_start - s->cs_base);
2153 gen_op_lcall_protected_T0_T1(dflag, s->pc - s->cs_base);
2155 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
2159 case 4: /* jmp Ev */
2161 gen_op_andl_T0_ffff();
2165 case 5: /* ljmp Ev */
2166 gen_op_ld_T1_A0[ot + s->mem_index]();
2167 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
2168 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
2170 if (s->pe && !s->vm86) {
2171 if (s->cc_op != CC_OP_DYNAMIC)
2172 gen_op_set_cc_op(s->cc_op);
2173 gen_op_jmp_im(pc_start - s->cs_base);
2174 gen_op_ljmp_protected_T0_T1(s->pc - s->cs_base);
2176 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
2177 gen_op_movl_T0_T1();
2182 case 6: /* push Ev */
2190 case 0x84: /* test Ev, Gv */
2195 ot = dflag ? OT_LONG : OT_WORD;
2197 modrm = ldub_code(s->pc++);
2198 mod = (modrm >> 6) & 3;
2200 reg = (modrm >> 3) & 7;
2202 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2203 gen_op_mov_TN_reg[ot][1][reg + OR_EAX]();
2204 gen_op_testl_T0_T1_cc();
2205 s->cc_op = CC_OP_LOGICB + ot;
2208 case 0xa8: /* test eAX, Iv */
2213 ot = dflag ? OT_LONG : OT_WORD;
2214 val = insn_get(s, ot);
2216 gen_op_mov_TN_reg[ot][0][OR_EAX]();
2217 gen_op_movl_T1_im(val);
2218 gen_op_testl_T0_T1_cc();
2219 s->cc_op = CC_OP_LOGICB + ot;
2222 case 0x98: /* CWDE/CBW */
2224 gen_op_movswl_EAX_AX();
2226 gen_op_movsbw_AX_AL();
2228 case 0x99: /* CDQ/CWD */
2230 gen_op_movslq_EDX_EAX();
2232 gen_op_movswl_DX_AX();
2234 case 0x1af: /* imul Gv, Ev */
2235 case 0x69: /* imul Gv, Ev, I */
2237 ot = dflag ? OT_LONG : OT_WORD;
2238 modrm = ldub_code(s->pc++);
2239 reg = ((modrm >> 3) & 7) + OR_EAX;
2240 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2242 val = insn_get(s, ot);
2243 gen_op_movl_T1_im(val);
2244 } else if (b == 0x6b) {
2245 val = insn_get(s, OT_BYTE);
2246 gen_op_movl_T1_im(val);
2248 gen_op_mov_TN_reg[ot][1][reg]();
2251 if (ot == OT_LONG) {
2252 gen_op_imull_T0_T1();
2254 gen_op_imulw_T0_T1();
2256 gen_op_mov_reg_T0[ot][reg]();
2257 s->cc_op = CC_OP_MULB + ot;
2260 case 0x1c1: /* xadd Ev, Gv */
2264 ot = dflag ? OT_LONG : OT_WORD;
2265 modrm = ldub_code(s->pc++);
2266 reg = (modrm >> 3) & 7;
2267 mod = (modrm >> 6) & 3;
2270 gen_op_mov_TN_reg[ot][0][reg]();
2271 gen_op_mov_TN_reg[ot][1][rm]();
2272 gen_op_addl_T0_T1();
2273 gen_op_mov_reg_T1[ot][reg]();
2274 gen_op_mov_reg_T0[ot][rm]();
2276 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2277 gen_op_mov_TN_reg[ot][0][reg]();
2278 gen_op_ld_T1_A0[ot + s->mem_index]();
2279 gen_op_addl_T0_T1();
2280 gen_op_st_T0_A0[ot + s->mem_index]();
2281 gen_op_mov_reg_T1[ot][reg]();
2283 gen_op_update2_cc();
2284 s->cc_op = CC_OP_ADDB + ot;
2287 case 0x1b1: /* cmpxchg Ev, Gv */
2291 ot = dflag ? OT_LONG : OT_WORD;
2292 modrm = ldub_code(s->pc++);
2293 reg = (modrm >> 3) & 7;
2294 mod = (modrm >> 6) & 3;
2295 gen_op_mov_TN_reg[ot][1][reg]();
2298 gen_op_mov_TN_reg[ot][0][rm]();
2299 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
2300 gen_op_mov_reg_T0[ot][rm]();
2302 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2303 gen_op_ld_T0_A0[ot + s->mem_index]();
2304 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
2306 s->cc_op = CC_OP_SUBB + ot;
2308 case 0x1c7: /* cmpxchg8b */
2309 modrm = ldub_code(s->pc++);
2310 mod = (modrm >> 6) & 3;
2313 if (s->cc_op != CC_OP_DYNAMIC)
2314 gen_op_set_cc_op(s->cc_op);
2315 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2317 s->cc_op = CC_OP_EFLAGS;
2320 /**************************/
2322 case 0x50 ... 0x57: /* push */
2323 gen_op_mov_TN_reg[OT_LONG][0][b & 7]();
2326 case 0x58 ... 0x5f: /* pop */
2327 ot = dflag ? OT_LONG : OT_WORD;
2329 /* NOTE: order is important for pop %sp */
2331 gen_op_mov_reg_T0[ot][b & 7]();
2333 case 0x60: /* pusha */
2336 case 0x61: /* popa */
2339 case 0x68: /* push Iv */
2341 ot = dflag ? OT_LONG : OT_WORD;
2343 val = insn_get(s, ot);
2345 val = (int8_t)insn_get(s, OT_BYTE);
2346 gen_op_movl_T0_im(val);
2349 case 0x8f: /* pop Ev */
2350 ot = dflag ? OT_LONG : OT_WORD;
2351 modrm = ldub_code(s->pc++);
2352 mod = (modrm >> 6) & 3;
2355 /* NOTE: order is important for pop %sp */
2358 gen_op_mov_reg_T0[ot][rm]();
2360 /* NOTE: order is important too for MMU exceptions */
2361 s->popl_esp_hack = 2 << dflag;
2362 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
2363 s->popl_esp_hack = 0;
2367 case 0xc8: /* enter */
2370 val = lduw_code(s->pc);
2372 level = ldub_code(s->pc++);
2373 gen_enter(s, val, level);
2376 case 0xc9: /* leave */
2377 /* XXX: exception not precise (ESP is updated before potential exception) */
2379 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2380 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
2382 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
2383 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
2386 ot = dflag ? OT_LONG : OT_WORD;
2387 gen_op_mov_reg_T0[ot][R_EBP]();
2390 case 0x06: /* push es */
2391 case 0x0e: /* push cs */
2392 case 0x16: /* push ss */
2393 case 0x1e: /* push ds */
2394 gen_op_movl_T0_seg(b >> 3);
2397 case 0x1a0: /* push fs */
2398 case 0x1a8: /* push gs */
2399 gen_op_movl_T0_seg((b >> 3) & 7);
2402 case 0x07: /* pop es */
2403 case 0x17: /* pop ss */
2404 case 0x1f: /* pop ds */
2407 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
2410 /* if reg == SS, inhibit interrupts/trace. */
2411 /* If several instructions disable interrupts, only the
2413 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
2414 gen_op_set_inhibit_irq();
2418 gen_op_jmp_im(s->pc - s->cs_base);
2422 case 0x1a1: /* pop fs */
2423 case 0x1a9: /* pop gs */
2425 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
2428 gen_op_jmp_im(s->pc - s->cs_base);
2433 /**************************/
2436 case 0x89: /* mov Gv, Ev */
2440 ot = dflag ? OT_LONG : OT_WORD;
2441 modrm = ldub_code(s->pc++);
2442 reg = (modrm >> 3) & 7;
2444 /* generate a generic store */
2445 gen_ldst_modrm(s, modrm, ot, OR_EAX + reg, 1);
2448 case 0xc7: /* mov Ev, Iv */
2452 ot = dflag ? OT_LONG : OT_WORD;
2453 modrm = ldub_code(s->pc++);
2454 mod = (modrm >> 6) & 3;
2456 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2457 val = insn_get(s, ot);
2458 gen_op_movl_T0_im(val);
2460 gen_op_st_T0_A0[ot + s->mem_index]();
2462 gen_op_mov_reg_T0[ot][modrm & 7]();
2465 case 0x8b: /* mov Ev, Gv */
2469 ot = dflag ? OT_LONG : OT_WORD;
2470 modrm = ldub_code(s->pc++);
2471 reg = (modrm >> 3) & 7;
2473 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2474 gen_op_mov_reg_T0[ot][reg]();
2476 case 0x8e: /* mov seg, Gv */
2477 modrm = ldub_code(s->pc++);
2478 reg = (modrm >> 3) & 7;
2479 if (reg >= 6 || reg == R_CS)
2481 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
2482 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
2484 /* if reg == SS, inhibit interrupts/trace */
2485 /* If several instructions disable interrupts, only the
2487 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
2488 gen_op_set_inhibit_irq();
2492 gen_op_jmp_im(s->pc - s->cs_base);
2496 case 0x8c: /* mov Gv, seg */
2497 modrm = ldub_code(s->pc++);
2498 reg = (modrm >> 3) & 7;
2499 mod = (modrm >> 6) & 3;
2502 gen_op_movl_T0_seg(reg);
2504 if (mod == 3 && dflag)
2506 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
2509 case 0x1b6: /* movzbS Gv, Eb */
2510 case 0x1b7: /* movzwS Gv, Eb */
2511 case 0x1be: /* movsbS Gv, Eb */
2512 case 0x1bf: /* movswS Gv, Eb */
2515 /* d_ot is the size of destination */
2516 d_ot = dflag + OT_WORD;
2517 /* ot is the size of source */
2518 ot = (b & 1) + OT_BYTE;
2519 modrm = ldub_code(s->pc++);
2520 reg = ((modrm >> 3) & 7) + OR_EAX;
2521 mod = (modrm >> 6) & 3;
2525 gen_op_mov_TN_reg[ot][0][rm]();
2526 switch(ot | (b & 8)) {
2528 gen_op_movzbl_T0_T0();
2531 gen_op_movsbl_T0_T0();
2534 gen_op_movzwl_T0_T0();
2538 gen_op_movswl_T0_T0();
2541 gen_op_mov_reg_T0[d_ot][reg]();
2543 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2545 gen_op_lds_T0_A0[ot + s->mem_index]();
2547 gen_op_ldu_T0_A0[ot + s->mem_index]();
2549 gen_op_mov_reg_T0[d_ot][reg]();
2554 case 0x8d: /* lea */
2555 ot = dflag ? OT_LONG : OT_WORD;
2556 modrm = ldub_code(s->pc++);
2557 mod = (modrm >> 6) & 3;
2560 reg = (modrm >> 3) & 7;
2561 /* we must ensure that no segment is added */
2565 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2567 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
2570 case 0xa0: /* mov EAX, Ov */
2572 case 0xa2: /* mov Ov, EAX */
2577 ot = dflag ? OT_LONG : OT_WORD;
2579 offset_addr = insn_get(s, OT_LONG);
2581 offset_addr = insn_get(s, OT_WORD);
2582 gen_op_movl_A0_im(offset_addr);
2583 /* handle override */
2585 int override, must_add_seg;
2586 must_add_seg = s->addseg;
2587 if (s->override >= 0) {
2588 override = s->override;
2594 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
2598 gen_op_ld_T0_A0[ot + s->mem_index]();
2599 gen_op_mov_reg_T0[ot][R_EAX]();
2601 gen_op_mov_TN_reg[ot][0][R_EAX]();
2602 gen_op_st_T0_A0[ot + s->mem_index]();
2605 case 0xd7: /* xlat */
2606 gen_op_movl_A0_reg[R_EBX]();
2607 gen_op_addl_A0_AL();
2609 gen_op_andl_A0_ffff();
2610 /* handle override */
2612 int override, must_add_seg;
2613 must_add_seg = s->addseg;
2615 if (s->override >= 0) {
2616 override = s->override;
2622 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
2625 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
2626 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
2628 case 0xb0 ... 0xb7: /* mov R, Ib */
2629 val = insn_get(s, OT_BYTE);
2630 gen_op_movl_T0_im(val);
2631 gen_op_mov_reg_T0[OT_BYTE][b & 7]();
2633 case 0xb8 ... 0xbf: /* mov R, Iv */
2634 ot = dflag ? OT_LONG : OT_WORD;
2635 val = insn_get(s, ot);
2636 reg = OR_EAX + (b & 7);
2637 gen_op_movl_T0_im(val);
2638 gen_op_mov_reg_T0[ot][reg]();
2641 case 0x91 ... 0x97: /* xchg R, EAX */
2642 ot = dflag ? OT_LONG : OT_WORD;
2647 case 0x87: /* xchg Ev, Gv */
2651 ot = dflag ? OT_LONG : OT_WORD;
2652 modrm = ldub_code(s->pc++);
2653 reg = (modrm >> 3) & 7;
2654 mod = (modrm >> 6) & 3;
2658 gen_op_mov_TN_reg[ot][0][reg]();
2659 gen_op_mov_TN_reg[ot][1][rm]();
2660 gen_op_mov_reg_T0[ot][rm]();
2661 gen_op_mov_reg_T1[ot][reg]();
2663 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2664 gen_op_mov_TN_reg[ot][0][reg]();
2665 /* for xchg, lock is implicit */
2666 if (!(prefixes & PREFIX_LOCK))
2668 gen_op_ld_T1_A0[ot + s->mem_index]();
2669 gen_op_st_T0_A0[ot + s->mem_index]();
2670 if (!(prefixes & PREFIX_LOCK))
2672 gen_op_mov_reg_T1[ot][reg]();
2675 case 0xc4: /* les Gv */
2678 case 0xc5: /* lds Gv */
2681 case 0x1b2: /* lss Gv */
2684 case 0x1b4: /* lfs Gv */
2687 case 0x1b5: /* lgs Gv */
2690 ot = dflag ? OT_LONG : OT_WORD;
2691 modrm = ldub_code(s->pc++);
2692 reg = (modrm >> 3) & 7;
2693 mod = (modrm >> 6) & 3;
2696 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2697 gen_op_ld_T1_A0[ot + s->mem_index]();
2698 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
2699 /* load the segment first to handle exceptions properly */
2700 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
2701 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
2702 /* then put the data */
2703 gen_op_mov_reg_T1[ot][reg]();
2705 gen_op_jmp_im(s->pc - s->cs_base);
2710 /************************/
2721 ot = dflag ? OT_LONG : OT_WORD;
2723 modrm = ldub_code(s->pc++);
2724 mod = (modrm >> 6) & 3;
2726 op = (modrm >> 3) & 7;
2729 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2732 opreg = rm + OR_EAX;
2737 gen_shift(s, op, ot, opreg, OR_ECX);
2740 shift = ldub_code(s->pc++);
2742 gen_shifti(s, op, ot, opreg, shift);
2757 case 0x1a4: /* shld imm */
2761 case 0x1a5: /* shld cl */
2765 case 0x1ac: /* shrd imm */
2769 case 0x1ad: /* shrd cl */
2773 ot = dflag ? OT_LONG : OT_WORD;
2774 modrm = ldub_code(s->pc++);
2775 mod = (modrm >> 6) & 3;
2777 reg = (modrm >> 3) & 7;
2780 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2781 gen_op_ld_T0_A0[ot + s->mem_index]();
2783 gen_op_mov_TN_reg[ot][0][rm]();
2785 gen_op_mov_TN_reg[ot][1][reg]();
2788 val = ldub_code(s->pc++);
2792 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
2794 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
2795 if (op == 0 && ot != OT_WORD)
2796 s->cc_op = CC_OP_SHLB + ot;
2798 s->cc_op = CC_OP_SARB + ot;
2801 if (s->cc_op != CC_OP_DYNAMIC)
2802 gen_op_set_cc_op(s->cc_op);
2804 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
2806 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
2807 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2810 gen_op_mov_reg_T0[ot][rm]();
2814 /************************/
2817 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
2818 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
2819 /* XXX: what to do if illegal op ? */
2820 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2823 modrm = ldub_code(s->pc++);
2824 mod = (modrm >> 6) & 3;
2826 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
2829 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2831 case 0x00 ... 0x07: /* fxxxs */
2832 case 0x10 ... 0x17: /* fixxxl */
2833 case 0x20 ... 0x27: /* fxxxl */
2834 case 0x30 ... 0x37: /* fixxx */
2841 gen_op_flds_FT0_A0();
2844 gen_op_fildl_FT0_A0();
2847 gen_op_fldl_FT0_A0();
2851 gen_op_fild_FT0_A0();
2855 gen_op_fp_arith_ST0_FT0[op1]();
2857 /* fcomp needs pop */
2862 case 0x08: /* flds */
2863 case 0x0a: /* fsts */
2864 case 0x0b: /* fstps */
2865 case 0x18: /* fildl */
2866 case 0x1a: /* fistl */
2867 case 0x1b: /* fistpl */
2868 case 0x28: /* fldl */
2869 case 0x2a: /* fstl */
2870 case 0x2b: /* fstpl */
2871 case 0x38: /* filds */
2872 case 0x3a: /* fists */
2873 case 0x3b: /* fistps */
2879 gen_op_flds_ST0_A0();
2882 gen_op_fildl_ST0_A0();
2885 gen_op_fldl_ST0_A0();
2889 gen_op_fild_ST0_A0();
2896 gen_op_fsts_ST0_A0();
2899 gen_op_fistl_ST0_A0();
2902 gen_op_fstl_ST0_A0();
2906 gen_op_fist_ST0_A0();
2914 case 0x0c: /* fldenv mem */
2915 gen_op_fldenv_A0(s->dflag);
2917 case 0x0d: /* fldcw mem */
2920 case 0x0e: /* fnstenv mem */
2921 gen_op_fnstenv_A0(s->dflag);
2923 case 0x0f: /* fnstcw mem */
2926 case 0x1d: /* fldt mem */
2927 gen_op_fldt_ST0_A0();
2929 case 0x1f: /* fstpt mem */
2930 gen_op_fstt_ST0_A0();
2933 case 0x2c: /* frstor mem */
2934 gen_op_frstor_A0(s->dflag);
2936 case 0x2e: /* fnsave mem */
2937 gen_op_fnsave_A0(s->dflag);
2939 case 0x2f: /* fnstsw mem */
2942 case 0x3c: /* fbld */
2943 gen_op_fbld_ST0_A0();
2945 case 0x3e: /* fbstp */
2946 gen_op_fbst_ST0_A0();
2949 case 0x3d: /* fildll */
2950 gen_op_fildll_ST0_A0();
2952 case 0x3f: /* fistpll */
2953 gen_op_fistll_ST0_A0();
2960 /* register float ops */
2964 case 0x08: /* fld sti */
2966 gen_op_fmov_ST0_STN((opreg + 1) & 7);
2968 case 0x09: /* fxchg sti */
2969 gen_op_fxchg_ST0_STN(opreg);
2971 case 0x0a: /* grp d9/2 */
2979 case 0x0c: /* grp d9/4 */
2989 gen_op_fcom_ST0_FT0();
2998 case 0x0d: /* grp d9/5 */
3007 gen_op_fldl2t_ST0();
3011 gen_op_fldl2e_ST0();
3019 gen_op_fldlg2_ST0();
3023 gen_op_fldln2_ST0();
3034 case 0x0e: /* grp d9/6 */
3045 case 3: /* fpatan */
3048 case 4: /* fxtract */
3051 case 5: /* fprem1 */
3054 case 6: /* fdecstp */
3058 case 7: /* fincstp */
3063 case 0x0f: /* grp d9/7 */
3068 case 1: /* fyl2xp1 */
3074 case 3: /* fsincos */
3077 case 5: /* fscale */
3080 case 4: /* frndint */
3092 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
3093 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
3094 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
3100 gen_op_fp_arith_STN_ST0[op1](opreg);
3104 gen_op_fmov_FT0_STN(opreg);
3105 gen_op_fp_arith_ST0_FT0[op1]();
3109 case 0x02: /* fcom */
3110 gen_op_fmov_FT0_STN(opreg);
3111 gen_op_fcom_ST0_FT0();
3113 case 0x03: /* fcomp */
3114 gen_op_fmov_FT0_STN(opreg);
3115 gen_op_fcom_ST0_FT0();
3118 case 0x15: /* da/5 */
3120 case 1: /* fucompp */
3121 gen_op_fmov_FT0_STN(1);
3122 gen_op_fucom_ST0_FT0();
3132 case 0: /* feni (287 only, just do nop here) */
3134 case 1: /* fdisi (287 only, just do nop here) */
3139 case 3: /* fninit */
3142 case 4: /* fsetpm (287 only, just do nop here) */
3148 case 0x1d: /* fucomi */
3149 if (s->cc_op != CC_OP_DYNAMIC)
3150 gen_op_set_cc_op(s->cc_op);
3151 gen_op_fmov_FT0_STN(opreg);
3152 gen_op_fucomi_ST0_FT0();
3153 s->cc_op = CC_OP_EFLAGS;
3155 case 0x1e: /* fcomi */
3156 if (s->cc_op != CC_OP_DYNAMIC)
3157 gen_op_set_cc_op(s->cc_op);
3158 gen_op_fmov_FT0_STN(opreg);
3159 gen_op_fcomi_ST0_FT0();
3160 s->cc_op = CC_OP_EFLAGS;
3162 case 0x2a: /* fst sti */
3163 gen_op_fmov_STN_ST0(opreg);
3165 case 0x2b: /* fstp sti */
3166 gen_op_fmov_STN_ST0(opreg);
3169 case 0x2c: /* fucom st(i) */
3170 gen_op_fmov_FT0_STN(opreg);
3171 gen_op_fucom_ST0_FT0();
3173 case 0x2d: /* fucomp st(i) */
3174 gen_op_fmov_FT0_STN(opreg);
3175 gen_op_fucom_ST0_FT0();
3178 case 0x33: /* de/3 */
3180 case 1: /* fcompp */
3181 gen_op_fmov_FT0_STN(1);
3182 gen_op_fcom_ST0_FT0();
3190 case 0x3c: /* df/4 */
3193 gen_op_fnstsw_EAX();
3199 case 0x3d: /* fucomip */
3200 if (s->cc_op != CC_OP_DYNAMIC)
3201 gen_op_set_cc_op(s->cc_op);
3202 gen_op_fmov_FT0_STN(opreg);
3203 gen_op_fucomi_ST0_FT0();
3205 s->cc_op = CC_OP_EFLAGS;
3207 case 0x3e: /* fcomip */
3208 if (s->cc_op != CC_OP_DYNAMIC)
3209 gen_op_set_cc_op(s->cc_op);
3210 gen_op_fmov_FT0_STN(opreg);
3211 gen_op_fcomi_ST0_FT0();
3213 s->cc_op = CC_OP_EFLAGS;
3215 case 0x10 ... 0x13: /* fcmovxx */
3219 const static uint8_t fcmov_cc[8] = {
3225 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
3227 gen_op_fcmov_ST0_STN_T0(opreg);
3234 #ifdef USE_CODE_COPY
3235 s->tb->cflags |= CF_TB_FP_USED;
3238 /************************/
3241 case 0xa4: /* movsS */
3246 ot = dflag ? OT_LONG : OT_WORD;
3248 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3249 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3255 case 0xaa: /* stosS */
3260 ot = dflag ? OT_LONG : OT_WORD;
3262 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3263 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3268 case 0xac: /* lodsS */
3273 ot = dflag ? OT_LONG : OT_WORD;
3274 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3275 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3280 case 0xae: /* scasS */
3285 ot = dflag ? OT_LONG : OT_WORD;
3286 if (prefixes & PREFIX_REPNZ) {
3287 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
3288 } else if (prefixes & PREFIX_REPZ) {
3289 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
3292 s->cc_op = CC_OP_SUBB + ot;
3296 case 0xa6: /* cmpsS */
3301 ot = dflag ? OT_LONG : OT_WORD;
3302 if (prefixes & PREFIX_REPNZ) {
3303 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
3304 } else if (prefixes & PREFIX_REPZ) {
3305 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
3308 s->cc_op = CC_OP_SUBB + ot;
3311 case 0x6c: /* insS */
3316 ot = dflag ? OT_LONG : OT_WORD;
3317 gen_check_io(s, ot, 1, pc_start - s->cs_base);
3318 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3319 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3324 case 0x6e: /* outsS */
3329 ot = dflag ? OT_LONG : OT_WORD;
3330 gen_check_io(s, ot, 1, pc_start - s->cs_base);
3331 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3332 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3338 /************************/
3345 ot = dflag ? OT_LONG : OT_WORD;
3346 val = ldub_code(s->pc++);
3347 gen_op_movl_T0_im(val);
3348 gen_check_io(s, ot, 0, pc_start - s->cs_base);
3350 gen_op_mov_reg_T1[ot][R_EAX]();
3357 ot = dflag ? OT_LONG : OT_WORD;
3358 val = ldub_code(s->pc++);
3359 gen_op_movl_T0_im(val);
3360 gen_check_io(s, ot, 0, pc_start - s->cs_base);
3361 gen_op_mov_TN_reg[ot][1][R_EAX]();
3369 ot = dflag ? OT_LONG : OT_WORD;
3370 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
3371 gen_op_andl_T0_ffff();
3372 gen_check_io(s, ot, 0, pc_start - s->cs_base);
3374 gen_op_mov_reg_T1[ot][R_EAX]();
3381 ot = dflag ? OT_LONG : OT_WORD;
3382 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
3383 gen_op_andl_T0_ffff();
3384 gen_check_io(s, ot, 0, pc_start - s->cs_base);
3385 gen_op_mov_TN_reg[ot][1][R_EAX]();
3389 /************************/
3391 case 0xc2: /* ret im */
3392 val = ldsw_code(s->pc);
3395 gen_stack_update(s, val + (2 << s->dflag));
3397 gen_op_andl_T0_ffff();
3401 case 0xc3: /* ret */
3405 gen_op_andl_T0_ffff();
3409 case 0xca: /* lret im */
3410 val = ldsw_code(s->pc);
3413 if (s->pe && !s->vm86) {
3414 if (s->cc_op != CC_OP_DYNAMIC)
3415 gen_op_set_cc_op(s->cc_op);
3416 gen_op_jmp_im(pc_start - s->cs_base);
3417 gen_op_lret_protected(s->dflag, val);
3421 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
3423 gen_op_andl_T0_ffff();
3424 /* NOTE: keeping EIP updated is not a problem in case of
3428 gen_op_addl_A0_im(2 << s->dflag);
3429 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
3430 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3431 /* add stack offset */
3432 gen_stack_update(s, val + (4 << s->dflag));
3436 case 0xcb: /* lret */
3439 case 0xcf: /* iret */
3442 gen_op_iret_real(s->dflag);
3443 s->cc_op = CC_OP_EFLAGS;
3444 } else if (s->vm86) {
3446 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3448 gen_op_iret_real(s->dflag);
3449 s->cc_op = CC_OP_EFLAGS;
3452 if (s->cc_op != CC_OP_DYNAMIC)
3453 gen_op_set_cc_op(s->cc_op);
3454 gen_op_jmp_im(pc_start - s->cs_base);
3455 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
3456 s->cc_op = CC_OP_EFLAGS;
3460 case 0xe8: /* call im */
3462 unsigned int next_eip;
3463 ot = dflag ? OT_LONG : OT_WORD;
3464 val = insn_get(s, ot);
3465 next_eip = s->pc - s->cs_base;
3469 gen_op_movl_T0_im(next_eip);
3474 case 0x9a: /* lcall im */
3476 unsigned int selector, offset;
3478 ot = dflag ? OT_LONG : OT_WORD;
3479 offset = insn_get(s, ot);
3480 selector = insn_get(s, OT_WORD);
3482 gen_op_movl_T0_im(selector);
3483 gen_op_movl_T1_im(offset);
3486 case 0xe9: /* jmp */
3487 ot = dflag ? OT_LONG : OT_WORD;
3488 val = insn_get(s, ot);
3489 val += s->pc - s->cs_base;
3494 case 0xea: /* ljmp im */
3496 unsigned int selector, offset;
3498 ot = dflag ? OT_LONG : OT_WORD;
3499 offset = insn_get(s, ot);
3500 selector = insn_get(s, OT_WORD);
3502 gen_op_movl_T0_im(selector);
3503 gen_op_movl_T1_im(offset);
3506 case 0xeb: /* jmp Jb */
3507 val = (int8_t)insn_get(s, OT_BYTE);
3508 val += s->pc - s->cs_base;
3513 case 0x70 ... 0x7f: /* jcc Jb */
3514 val = (int8_t)insn_get(s, OT_BYTE);
3516 case 0x180 ... 0x18f: /* jcc Jv */
3518 val = insn_get(s, OT_LONG);
3520 val = (int16_t)insn_get(s, OT_WORD);
3523 next_eip = s->pc - s->cs_base;
3527 gen_jcc(s, b, val, next_eip);
3530 case 0x190 ... 0x19f: /* setcc Gv */
3531 modrm = ldub_code(s->pc++);
3533 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
3535 case 0x140 ... 0x14f: /* cmov Gv, Ev */
3536 ot = dflag ? OT_LONG : OT_WORD;
3537 modrm = ldub_code(s->pc++);
3538 reg = (modrm >> 3) & 7;
3539 mod = (modrm >> 6) & 3;
3542 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3543 gen_op_ld_T1_A0[ot + s->mem_index]();
3546 gen_op_mov_TN_reg[ot][1][rm]();
3548 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
3551 /************************/
3553 case 0x9c: /* pushf */
3554 if (s->vm86 && s->iopl != 3) {
3555 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3557 if (s->cc_op != CC_OP_DYNAMIC)
3558 gen_op_set_cc_op(s->cc_op);
3559 gen_op_movl_T0_eflags();
3563 case 0x9d: /* popf */
3564 if (s->vm86 && s->iopl != 3) {
3565 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3570 gen_op_movl_eflags_T0_cpl0();
3572 gen_op_movw_eflags_T0_cpl0();
3575 if (s->cpl <= s->iopl) {
3577 gen_op_movl_eflags_T0_io();
3579 gen_op_movw_eflags_T0_io();
3583 gen_op_movl_eflags_T0();
3585 gen_op_movw_eflags_T0();
3590 s->cc_op = CC_OP_EFLAGS;
3591 /* abort translation because TF flag may change */
3592 gen_op_jmp_im(s->pc - s->cs_base);
3596 case 0x9e: /* sahf */
3597 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
3598 if (s->cc_op != CC_OP_DYNAMIC)
3599 gen_op_set_cc_op(s->cc_op);
3600 gen_op_movb_eflags_T0();
3601 s->cc_op = CC_OP_EFLAGS;
3603 case 0x9f: /* lahf */
3604 if (s->cc_op != CC_OP_DYNAMIC)
3605 gen_op_set_cc_op(s->cc_op);
3606 gen_op_movl_T0_eflags();
3607 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
3609 case 0xf5: /* cmc */
3610 if (s->cc_op != CC_OP_DYNAMIC)
3611 gen_op_set_cc_op(s->cc_op);
3613 s->cc_op = CC_OP_EFLAGS;
3615 case 0xf8: /* clc */
3616 if (s->cc_op != CC_OP_DYNAMIC)
3617 gen_op_set_cc_op(s->cc_op);
3619 s->cc_op = CC_OP_EFLAGS;
3621 case 0xf9: /* stc */
3622 if (s->cc_op != CC_OP_DYNAMIC)
3623 gen_op_set_cc_op(s->cc_op);
3625 s->cc_op = CC_OP_EFLAGS;
3627 case 0xfc: /* cld */
3630 case 0xfd: /* std */
3634 /************************/
3635 /* bit operations */
3636 case 0x1ba: /* bt/bts/btr/btc Gv, im */
3637 ot = dflag ? OT_LONG : OT_WORD;
3638 modrm = ldub_code(s->pc++);
3639 op = (modrm >> 3) & 7;
3640 mod = (modrm >> 6) & 3;
3643 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3644 gen_op_ld_T0_A0[ot + s->mem_index]();
3646 gen_op_mov_TN_reg[ot][0][rm]();
3649 val = ldub_code(s->pc++);
3650 gen_op_movl_T1_im(val);
3654 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
3655 s->cc_op = CC_OP_SARB + ot;
3658 gen_op_st_T0_A0[ot + s->mem_index]();
3660 gen_op_mov_reg_T0[ot][rm]();
3661 gen_op_update_bt_cc();
3664 case 0x1a3: /* bt Gv, Ev */
3667 case 0x1ab: /* bts */
3670 case 0x1b3: /* btr */
3673 case 0x1bb: /* btc */
3676 ot = dflag ? OT_LONG : OT_WORD;
3677 modrm = ldub_code(s->pc++);
3678 reg = (modrm >> 3) & 7;
3679 mod = (modrm >> 6) & 3;
3681 gen_op_mov_TN_reg[OT_LONG][1][reg]();
3683 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3684 /* specific case: we need to add a displacement */
3686 gen_op_add_bitw_A0_T1();
3688 gen_op_add_bitl_A0_T1();
3689 gen_op_ld_T0_A0[ot + s->mem_index]();
3691 gen_op_mov_TN_reg[ot][0][rm]();
3693 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
3694 s->cc_op = CC_OP_SARB + ot;
3697 gen_op_st_T0_A0[ot + s->mem_index]();
3699 gen_op_mov_reg_T0[ot][rm]();
3700 gen_op_update_bt_cc();
3703 case 0x1bc: /* bsf */
3704 case 0x1bd: /* bsr */
3705 ot = dflag ? OT_LONG : OT_WORD;
3706 modrm = ldub_code(s->pc++);
3707 reg = (modrm >> 3) & 7;
3708 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3709 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
3710 /* NOTE: we always write back the result. Intel doc says it is
3711 undefined if T0 == 0 */
3712 gen_op_mov_reg_T0[ot][reg]();
3713 s->cc_op = CC_OP_LOGICB + ot;
3715 /************************/
3717 case 0x27: /* daa */
3718 if (s->cc_op != CC_OP_DYNAMIC)
3719 gen_op_set_cc_op(s->cc_op);
3721 s->cc_op = CC_OP_EFLAGS;
3723 case 0x2f: /* das */
3724 if (s->cc_op != CC_OP_DYNAMIC)
3725 gen_op_set_cc_op(s->cc_op);
3727 s->cc_op = CC_OP_EFLAGS;
3729 case 0x37: /* aaa */
3730 if (s->cc_op != CC_OP_DYNAMIC)
3731 gen_op_set_cc_op(s->cc_op);
3733 s->cc_op = CC_OP_EFLAGS;
3735 case 0x3f: /* aas */
3736 if (s->cc_op != CC_OP_DYNAMIC)
3737 gen_op_set_cc_op(s->cc_op);
3739 s->cc_op = CC_OP_EFLAGS;
3741 case 0xd4: /* aam */
3742 val = ldub_code(s->pc++);
3744 s->cc_op = CC_OP_LOGICB;
3746 case 0xd5: /* aad */
3747 val = ldub_code(s->pc++);
3749 s->cc_op = CC_OP_LOGICB;
3751 /************************/
3753 case 0x90: /* nop */
3754 /* XXX: correct lock test for all insn */
3755 if (prefixes & PREFIX_LOCK)
3758 case 0x9b: /* fwait */
3759 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
3760 (HF_MP_MASK | HF_TS_MASK)) {
3761 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3764 case 0xcc: /* int3 */
3765 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
3767 case 0xcd: /* int N */
3768 val = ldub_code(s->pc++);
3769 if (s->vm86 && s->iopl != 3) {
3770 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3772 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
3775 case 0xce: /* into */
3776 if (s->cc_op != CC_OP_DYNAMIC)
3777 gen_op_set_cc_op(s->cc_op);
3778 gen_op_into(s->pc - s->cs_base);
3780 case 0xf1: /* icebp (undocumented, exits to external debugger) */
3781 gen_debug(s, pc_start - s->cs_base);
3783 case 0xfa: /* cli */
3785 if (s->cpl <= s->iopl) {
3788 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3794 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3798 case 0xfb: /* sti */
3800 if (s->cpl <= s->iopl) {
3803 /* interruptions are enabled only the first insn after sti */
3804 /* If several instructions disable interrupts, only the
3806 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3807 gen_op_set_inhibit_irq();
3808 /* give a chance to handle pending irqs */
3809 gen_op_jmp_im(s->pc - s->cs_base);
3812 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3818 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3822 case 0x62: /* bound */
3823 ot = dflag ? OT_LONG : OT_WORD;
3824 modrm = ldub_code(s->pc++);
3825 reg = (modrm >> 3) & 7;
3826 mod = (modrm >> 6) & 3;
3829 gen_op_mov_reg_T0[ot][reg]();
3830 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3832 gen_op_boundw(pc_start - s->cs_base);
3834 gen_op_boundl(pc_start - s->cs_base);
3836 case 0x1c8 ... 0x1cf: /* bswap reg */
3838 gen_op_mov_TN_reg[OT_LONG][0][reg]();
3840 gen_op_mov_reg_T0[OT_LONG][reg]();
3842 case 0xd6: /* salc */
3843 if (s->cc_op != CC_OP_DYNAMIC)
3844 gen_op_set_cc_op(s->cc_op);
3847 case 0xe0: /* loopnz */
3848 case 0xe1: /* loopz */
3849 if (s->cc_op != CC_OP_DYNAMIC)
3850 gen_op_set_cc_op(s->cc_op);
3852 case 0xe2: /* loop */
3853 case 0xe3: /* jecxz */
3854 val = (int8_t)insn_get(s, OT_BYTE);
3855 next_eip = s->pc - s->cs_base;
3859 gen_op_loop[s->aflag][b & 3](val, next_eip);
3862 case 0x130: /* wrmsr */
3863 case 0x132: /* rdmsr */
3865 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3873 case 0x131: /* rdtsc */
3876 case 0x1a2: /* cpuid */
3879 case 0xf4: /* hlt */
3881 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3883 if (s->cc_op != CC_OP_DYNAMIC)
3884 gen_op_set_cc_op(s->cc_op);
3885 gen_op_jmp_im(s->pc - s->cs_base);
3891 modrm = ldub_code(s->pc++);
3892 mod = (modrm >> 6) & 3;
3893 op = (modrm >> 3) & 7;
3896 if (!s->pe || s->vm86)
3898 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
3902 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3905 if (!s->pe || s->vm86)
3908 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3910 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3911 gen_op_jmp_im(pc_start - s->cs_base);
3916 if (!s->pe || s->vm86)
3918 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
3922 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3925 if (!s->pe || s->vm86)
3928 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3930 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3931 gen_op_jmp_im(pc_start - s->cs_base);
3937 if (!s->pe || s->vm86)
3939 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3940 if (s->cc_op != CC_OP_DYNAMIC)
3941 gen_op_set_cc_op(s->cc_op);
3946 s->cc_op = CC_OP_EFLAGS;
3953 modrm = ldub_code(s->pc++);
3954 mod = (modrm >> 6) & 3;
3955 op = (modrm >> 3) & 7;
3961 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3963 gen_op_movl_T0_env(offsetof(CPUX86State,gdt.limit));
3965 gen_op_movl_T0_env(offsetof(CPUX86State,idt.limit));
3966 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
3967 gen_op_addl_A0_im(2);
3969 gen_op_movl_T0_env(offsetof(CPUX86State,gdt.base));
3971 gen_op_movl_T0_env(offsetof(CPUX86State,idt.base));
3973 gen_op_andl_T0_im(0xffffff);
3974 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
3981 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3983 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3984 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
3985 gen_op_addl_A0_im(2);
3986 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3988 gen_op_andl_T0_im(0xffffff);
3990 gen_op_movl_env_T0(offsetof(CPUX86State,gdt.base));
3991 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
3993 gen_op_movl_env_T0(offsetof(CPUX86State,idt.base));
3994 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
3999 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
4000 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
4004 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4006 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4008 gen_op_jmp_im(s->pc - s->cs_base);
4012 case 7: /* invlpg */
4014 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4018 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4020 gen_op_jmp_im(s->pc - s->cs_base);
4028 case 0x108: /* invd */
4029 case 0x109: /* wbinvd */
4031 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4036 case 0x63: /* arpl */
4037 if (!s->pe || s->vm86)
4039 ot = dflag ? OT_LONG : OT_WORD;
4040 modrm = ldub_code(s->pc++);
4041 reg = (modrm >> 3) & 7;
4042 mod = (modrm >> 6) & 3;
4045 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4046 gen_op_ld_T0_A0[ot + s->mem_index]();
4048 gen_op_mov_TN_reg[ot][0][rm]();
4050 if (s->cc_op != CC_OP_DYNAMIC)
4051 gen_op_set_cc_op(s->cc_op);
4053 s->cc_op = CC_OP_EFLAGS;
4055 gen_op_st_T0_A0[ot + s->mem_index]();
4057 gen_op_mov_reg_T0[ot][rm]();
4059 gen_op_arpl_update();
4061 case 0x102: /* lar */
4062 case 0x103: /* lsl */
4063 if (!s->pe || s->vm86)
4065 ot = dflag ? OT_LONG : OT_WORD;
4066 modrm = ldub_code(s->pc++);
4067 reg = (modrm >> 3) & 7;
4068 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4069 gen_op_mov_TN_reg[ot][1][reg]();
4070 if (s->cc_op != CC_OP_DYNAMIC)
4071 gen_op_set_cc_op(s->cc_op);
4076 s->cc_op = CC_OP_EFLAGS;
4077 gen_op_mov_reg_T1[ot][reg]();
4080 modrm = ldub_code(s->pc++);
4081 mod = (modrm >> 6) & 3;
4082 op = (modrm >> 3) & 7;
4084 case 0: /* prefetchnta */
4085 case 1: /* prefetchnt0 */
4086 case 2: /* prefetchnt0 */
4087 case 3: /* prefetchnt0 */
4090 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4091 /* nothing more to do */
4097 case 0x120: /* mov reg, crN */
4098 case 0x122: /* mov crN, reg */
4100 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4102 modrm = ldub_code(s->pc++);
4103 if ((modrm & 0xc0) != 0xc0)
4106 reg = (modrm >> 3) & 7;
4113 gen_op_mov_TN_reg[OT_LONG][0][rm]();
4114 gen_op_movl_crN_T0(reg);
4115 gen_op_jmp_im(s->pc - s->cs_base);
4118 gen_op_movl_T0_env(offsetof(CPUX86State,cr[reg]));
4119 gen_op_mov_reg_T0[OT_LONG][rm]();
4127 case 0x121: /* mov reg, drN */
4128 case 0x123: /* mov drN, reg */
4130 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4132 modrm = ldub_code(s->pc++);
4133 if ((modrm & 0xc0) != 0xc0)
4136 reg = (modrm >> 3) & 7;
4137 /* XXX: do it dynamically with CR4.DE bit */
4138 if (reg == 4 || reg == 5)
4141 gen_op_mov_TN_reg[OT_LONG][0][rm]();
4142 gen_op_movl_drN_T0(reg);
4143 gen_op_jmp_im(s->pc - s->cs_base);
4146 gen_op_movl_T0_env(offsetof(CPUX86State,dr[reg]));
4147 gen_op_mov_reg_T0[OT_LONG][rm]();
4151 case 0x106: /* clts */
4153 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4156 /* abort block because static cpu state changed */
4157 gen_op_jmp_im(s->pc - s->cs_base);
4164 /* lock generation */
4165 if (s->prefix & PREFIX_LOCK)
4169 if (s->prefix & PREFIX_LOCK)
4171 /* XXX: ensure that no lock was generated */
4172 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4176 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
4177 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
4179 /* flags read by an operation */
4180 static uint16_t opc_read_flags[NB_OPS] = {
4181 [INDEX_op_aas] = CC_A,
4182 [INDEX_op_aaa] = CC_A,
4183 [INDEX_op_das] = CC_A | CC_C,
4184 [INDEX_op_daa] = CC_A | CC_C,
4186 /* subtle: due to the incl/decl implementation, C is used */
4187 [INDEX_op_update_inc_cc] = CC_C,
4189 [INDEX_op_into] = CC_O,
4191 [INDEX_op_jb_subb] = CC_C,
4192 [INDEX_op_jb_subw] = CC_C,
4193 [INDEX_op_jb_subl] = CC_C,
4195 [INDEX_op_jz_subb] = CC_Z,
4196 [INDEX_op_jz_subw] = CC_Z,
4197 [INDEX_op_jz_subl] = CC_Z,
4199 [INDEX_op_jbe_subb] = CC_Z | CC_C,
4200 [INDEX_op_jbe_subw] = CC_Z | CC_C,
4201 [INDEX_op_jbe_subl] = CC_Z | CC_C,
4203 [INDEX_op_js_subb] = CC_S,
4204 [INDEX_op_js_subw] = CC_S,
4205 [INDEX_op_js_subl] = CC_S,
4207 [INDEX_op_jl_subb] = CC_O | CC_S,
4208 [INDEX_op_jl_subw] = CC_O | CC_S,
4209 [INDEX_op_jl_subl] = CC_O | CC_S,
4211 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
4212 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
4213 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
4215 [INDEX_op_loopnzw] = CC_Z,
4216 [INDEX_op_loopnzl] = CC_Z,
4217 [INDEX_op_loopzw] = CC_Z,
4218 [INDEX_op_loopzl] = CC_Z,
4220 [INDEX_op_seto_T0_cc] = CC_O,
4221 [INDEX_op_setb_T0_cc] = CC_C,
4222 [INDEX_op_setz_T0_cc] = CC_Z,
4223 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
4224 [INDEX_op_sets_T0_cc] = CC_S,
4225 [INDEX_op_setp_T0_cc] = CC_P,
4226 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
4227 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
4229 [INDEX_op_setb_T0_subb] = CC_C,
4230 [INDEX_op_setb_T0_subw] = CC_C,
4231 [INDEX_op_setb_T0_subl] = CC_C,
4233 [INDEX_op_setz_T0_subb] = CC_Z,
4234 [INDEX_op_setz_T0_subw] = CC_Z,
4235 [INDEX_op_setz_T0_subl] = CC_Z,
4237 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
4238 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
4239 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
4241 [INDEX_op_sets_T0_subb] = CC_S,
4242 [INDEX_op_sets_T0_subw] = CC_S,
4243 [INDEX_op_sets_T0_subl] = CC_S,
4245 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
4246 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
4247 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
4249 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
4250 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
4251 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
4253 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
4254 [INDEX_op_cmc] = CC_C,
4255 [INDEX_op_salc] = CC_C,
4257 /* needed for correct flag optimisation before string ops */
4258 [INDEX_op_jz_ecxw] = CC_OSZAPC,
4259 [INDEX_op_jz_ecxl] = CC_OSZAPC,
4260 [INDEX_op_jz_ecxw_im] = CC_OSZAPC,
4261 [INDEX_op_jz_ecxl_im] = CC_OSZAPC,
4263 #define DEF_READF(SUFFIX)\
4264 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
4265 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
4266 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
4267 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
4268 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
4269 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
4271 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
4272 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
4273 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
4274 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
4275 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
4276 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,
4281 #ifndef CONFIG_USER_ONLY
4287 /* flags written by an operation */
4288 static uint16_t opc_write_flags[NB_OPS] = {
4289 [INDEX_op_update2_cc] = CC_OSZAPC,
4290 [INDEX_op_update1_cc] = CC_OSZAPC,
4291 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
4292 [INDEX_op_update_neg_cc] = CC_OSZAPC,
4293 /* subtle: due to the incl/decl implementation, C is used */
4294 [INDEX_op_update_inc_cc] = CC_OSZAPC,
4295 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
4297 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
4298 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
4299 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
4300 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
4301 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
4302 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
4303 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
4304 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
4307 [INDEX_op_aam] = CC_OSZAPC,
4308 [INDEX_op_aad] = CC_OSZAPC,
4309 [INDEX_op_aas] = CC_OSZAPC,
4310 [INDEX_op_aaa] = CC_OSZAPC,
4311 [INDEX_op_das] = CC_OSZAPC,
4312 [INDEX_op_daa] = CC_OSZAPC,
4314 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
4315 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
4316 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
4317 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
4318 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
4319 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
4320 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
4321 [INDEX_op_clc] = CC_C,
4322 [INDEX_op_stc] = CC_C,
4323 [INDEX_op_cmc] = CC_C,
4325 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
4326 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
4327 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
4328 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
4329 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
4330 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
4331 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
4332 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
4334 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
4335 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
4336 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
4337 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
4339 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
4340 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
4341 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
4343 [INDEX_op_cmpxchg8b] = CC_Z,
4344 [INDEX_op_lar] = CC_Z,
4345 [INDEX_op_lsl] = CC_Z,
4346 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
4347 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
4349 #define DEF_WRITEF(SUFFIX)\
4350 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4351 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4352 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4353 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4354 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4355 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4357 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4358 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4359 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4360 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4361 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4362 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4364 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4365 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4366 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4367 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4368 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4369 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4371 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4372 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4373 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4375 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4376 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4377 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4379 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4380 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4381 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4383 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
4384 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
4385 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
4386 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
4388 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
4389 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
4390 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
4391 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
4393 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
4394 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
4395 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,
4400 #ifndef CONFIG_USER_ONLY
4406 /* simpler form of an operation if no flags need to be generated */
4407 static uint16_t opc_simpler[NB_OPS] = {
4408 [INDEX_op_update2_cc] = INDEX_op_nop,
4409 [INDEX_op_update1_cc] = INDEX_op_nop,
4410 [INDEX_op_update_neg_cc] = INDEX_op_nop,
4412 /* broken: CC_OP logic must be rewritten */
4413 [INDEX_op_update_inc_cc] = INDEX_op_nop,
4416 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
4417 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
4418 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
4420 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
4421 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
4422 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
4424 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
4425 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
4426 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
4428 #define DEF_SIMPLER(SUFFIX)\
4429 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
4430 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
4431 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
4433 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
4434 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
4435 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,
4439 #ifndef CONFIG_USER_ONLY
4440 DEF_SIMPLER(_kernel)
4445 void optimize_flags_init(void)
4448 /* put default values in arrays */
4449 for(i = 0; i < NB_OPS; i++) {
4450 if (opc_simpler[i] == 0)
4455 /* CPU flags computation optimization: we move backward thru the
4456 generated code to see which flags are needed. The operation is
4457 modified if suitable */
4458 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
4461 int live_flags, write_flags, op;
4463 opc_ptr = opc_buf + opc_buf_len;
4464 /* live_flags contains the flags needed by the next instructions
4465 in the code. At the end of the bloc, we consider that all the
4467 live_flags = CC_OSZAPC;
4468 while (opc_ptr > opc_buf) {
4470 /* if none of the flags written by the instruction is used,
4471 then we can try to find a simpler instruction */
4472 write_flags = opc_write_flags[op];
4473 if ((live_flags & write_flags) == 0) {
4474 *opc_ptr = opc_simpler[op];
4476 /* compute the live flags before the instruction */
4477 live_flags &= ~write_flags;
4478 live_flags |= opc_read_flags[op];
4482 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
4483 basic block 'tb'. If search_pc is TRUE, also generate PC
4484 information for each intermediate instruction. */
4485 static inline int gen_intermediate_code_internal(CPUState *env,
4486 TranslationBlock *tb,
4489 DisasContext dc1, *dc = &dc1;
4491 uint16_t *gen_opc_end;
4496 /* generate intermediate code */
4497 pc_start = (uint8_t *)tb->pc;
4498 cs_base = (uint8_t *)tb->cs_base;
4501 dc->pe = (flags >> HF_PE_SHIFT) & 1;
4502 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
4503 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
4504 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
4506 dc->vm86 = (flags >> VM_SHIFT) & 1;
4507 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
4508 dc->iopl = (flags >> IOPL_SHIFT) & 3;
4509 dc->tf = (flags >> TF_SHIFT) & 1;
4510 dc->singlestep_enabled = env->singlestep_enabled;
4511 dc->cc_op = CC_OP_DYNAMIC;
4512 dc->cs_base = cs_base;
4514 dc->popl_esp_hack = 0;
4515 /* select memory access functions */
4517 if (flags & HF_SOFTMMU_MASK) {
4524 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
4525 (flags & HF_INHIBIT_IRQ_MASK)
4526 #ifndef CONFIG_SOFTMMU
4527 || (flags & HF_SOFTMMU_MASK)
4531 /* check addseg logic */
4532 if (!dc->addseg && (dc->vm86 || !dc->pe))
4533 printf("ERROR addseg\n");
4536 gen_opc_ptr = gen_opc_buf;
4537 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4538 gen_opparam_ptr = gen_opparam_buf;
4540 dc->is_jmp = DISAS_NEXT;
4545 if (env->nb_breakpoints > 0) {
4546 for(j = 0; j < env->nb_breakpoints; j++) {
4547 if (env->breakpoints[j] == (unsigned long)pc_ptr) {
4548 gen_debug(dc, pc_ptr - dc->cs_base);
4554 j = gen_opc_ptr - gen_opc_buf;
4558 gen_opc_instr_start[lj++] = 0;
4560 gen_opc_pc[lj] = (uint32_t)pc_ptr;
4561 gen_opc_cc_op[lj] = dc->cc_op;
4562 gen_opc_instr_start[lj] = 1;
4564 pc_ptr = disas_insn(dc, pc_ptr);
4565 /* stop translation if indicated */
4568 /* if single step mode, we generate only one instruction and
4569 generate an exception */
4570 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
4571 the flag and abort the translation to give the irqs a
4572 change to be happen */
4573 if (dc->tf || dc->singlestep_enabled ||
4574 (flags & HF_INHIBIT_IRQ_MASK)) {
4575 gen_op_jmp_im(pc_ptr - dc->cs_base);
4579 /* if too long translation, stop generation too */
4580 if (gen_opc_ptr >= gen_opc_end ||
4581 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
4582 gen_op_jmp_im(pc_ptr - dc->cs_base);
4587 *gen_opc_ptr = INDEX_op_end;
4588 /* we don't forget to fill the last values */
4590 j = gen_opc_ptr - gen_opc_buf;
4593 gen_opc_instr_start[lj++] = 0;
4597 if (loglevel & CPU_LOG_TB_IN_ASM) {
4598 fprintf(logfile, "----------------\n");
4599 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4600 disas(logfile, pc_start, pc_ptr - pc_start, 0, !dc->code32);
4601 fprintf(logfile, "\n");
4602 if (loglevel & CPU_LOG_TB_OP) {
4603 fprintf(logfile, "OP:\n");
4604 dump_ops(gen_opc_buf, gen_opparam_buf);
4605 fprintf(logfile, "\n");
4610 /* optimize flag computations */
4611 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
4614 if (loglevel & CPU_LOG_TB_OP_OPT) {
4615 fprintf(logfile, "AFTER FLAGS OPT:\n");
4616 dump_ops(gen_opc_buf, gen_opparam_buf);
4617 fprintf(logfile, "\n");
4621 tb->size = pc_ptr - pc_start;
4625 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
4627 return gen_intermediate_code_internal(env, tb, 0);
4630 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
4632 return gen_intermediate_code_internal(env, tb, 1);