4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
35 /* XXX: move that elsewhere */
36 static uint16_t *gen_opc_ptr;
37 static uint32_t *gen_opparam_ptr;
39 #define PREFIX_REPZ 0x01
40 #define PREFIX_REPNZ 0x02
41 #define PREFIX_LOCK 0x04
42 #define PREFIX_DATA 0x08
43 #define PREFIX_ADR 0x10
45 typedef struct DisasContext {
46 /* current insn context */
47 int override; /* -1 if no override */
50 uint8_t *pc; /* pc = eip + cs_base */
51 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
52 static state change (stop translation) */
53 /* current block context */
54 uint8_t *cs_base; /* base of CS segment */
55 int code32; /* 32 bit code segment */
56 int ss32; /* 32 bit stack segment */
57 int cc_op; /* current CC operation */
58 int addseg; /* non zero if either DS/ES/SS have a non zero base */
59 int f_st; /* currently unused */
60 int vm86; /* vm86 mode */
63 int tf; /* TF cpu flag */
64 struct TranslationBlock *tb;
65 int popl_esp_hack; /* for correct popl with esp base handling */
68 /* i386 arith/logic operations */
88 OP_SHL1, /* undocumented */
93 #define DEF(s, n, copy_size) INDEX_op_ ## s,
99 #include "gen-op-i386.h"
110 /* I386 int registers */
111 OR_EAX, /* MUST be even numbered */
119 OR_TMP0, /* temporary operand register */
121 OR_A0, /* temporary register used when doing address evaluation */
122 OR_ZERO, /* fixed zero register */
126 typedef void (GenOpFunc)(void);
127 typedef void (GenOpFunc1)(long);
128 typedef void (GenOpFunc2)(long, long);
129 typedef void (GenOpFunc3)(long, long, long);
131 static GenOpFunc *gen_op_mov_reg_T0[3][8] = {
164 static GenOpFunc *gen_op_mov_reg_T1[3][8] = {
197 static GenOpFunc *gen_op_mov_reg_A0[2][8] = {
220 static GenOpFunc *gen_op_mov_TN_reg[3][2][8] =
290 static GenOpFunc *gen_op_movl_A0_reg[8] = {
301 static GenOpFunc *gen_op_addl_A0_reg_sN[4][8] = {
313 gen_op_addl_A0_EAX_s1,
314 gen_op_addl_A0_ECX_s1,
315 gen_op_addl_A0_EDX_s1,
316 gen_op_addl_A0_EBX_s1,
317 gen_op_addl_A0_ESP_s1,
318 gen_op_addl_A0_EBP_s1,
319 gen_op_addl_A0_ESI_s1,
320 gen_op_addl_A0_EDI_s1,
323 gen_op_addl_A0_EAX_s2,
324 gen_op_addl_A0_ECX_s2,
325 gen_op_addl_A0_EDX_s2,
326 gen_op_addl_A0_EBX_s2,
327 gen_op_addl_A0_ESP_s2,
328 gen_op_addl_A0_EBP_s2,
329 gen_op_addl_A0_ESI_s2,
330 gen_op_addl_A0_EDI_s2,
333 gen_op_addl_A0_EAX_s3,
334 gen_op_addl_A0_ECX_s3,
335 gen_op_addl_A0_EDX_s3,
336 gen_op_addl_A0_EBX_s3,
337 gen_op_addl_A0_ESP_s3,
338 gen_op_addl_A0_EBP_s3,
339 gen_op_addl_A0_ESI_s3,
340 gen_op_addl_A0_EDI_s3,
344 static GenOpFunc *gen_op_cmov_reg_T1_T0[2][8] = {
346 gen_op_cmovw_EAX_T1_T0,
347 gen_op_cmovw_ECX_T1_T0,
348 gen_op_cmovw_EDX_T1_T0,
349 gen_op_cmovw_EBX_T1_T0,
350 gen_op_cmovw_ESP_T1_T0,
351 gen_op_cmovw_EBP_T1_T0,
352 gen_op_cmovw_ESI_T1_T0,
353 gen_op_cmovw_EDI_T1_T0,
356 gen_op_cmovl_EAX_T1_T0,
357 gen_op_cmovl_ECX_T1_T0,
358 gen_op_cmovl_EDX_T1_T0,
359 gen_op_cmovl_EBX_T1_T0,
360 gen_op_cmovl_ESP_T1_T0,
361 gen_op_cmovl_EBP_T1_T0,
362 gen_op_cmovl_ESI_T1_T0,
363 gen_op_cmovl_EDI_T1_T0,
367 static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
378 static GenOpFunc *gen_op_arithc_T0_T1_cc[3][2] = {
380 gen_op_adcb_T0_T1_cc,
381 gen_op_sbbb_T0_T1_cc,
384 gen_op_adcw_T0_T1_cc,
385 gen_op_sbbw_T0_T1_cc,
388 gen_op_adcl_T0_T1_cc,
389 gen_op_sbbl_T0_T1_cc,
393 static const int cc_op_arithb[8] = {
404 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[3] = {
405 gen_op_cmpxchgb_T0_T1_EAX_cc,
406 gen_op_cmpxchgw_T0_T1_EAX_cc,
407 gen_op_cmpxchgl_T0_T1_EAX_cc,
410 static GenOpFunc *gen_op_shift_T0_T1_cc[3][8] = {
412 gen_op_rolb_T0_T1_cc,
413 gen_op_rorb_T0_T1_cc,
414 gen_op_rclb_T0_T1_cc,
415 gen_op_rcrb_T0_T1_cc,
416 gen_op_shlb_T0_T1_cc,
417 gen_op_shrb_T0_T1_cc,
418 gen_op_shlb_T0_T1_cc,
419 gen_op_sarb_T0_T1_cc,
422 gen_op_rolw_T0_T1_cc,
423 gen_op_rorw_T0_T1_cc,
424 gen_op_rclw_T0_T1_cc,
425 gen_op_rcrw_T0_T1_cc,
426 gen_op_shlw_T0_T1_cc,
427 gen_op_shrw_T0_T1_cc,
428 gen_op_shlw_T0_T1_cc,
429 gen_op_sarw_T0_T1_cc,
432 gen_op_roll_T0_T1_cc,
433 gen_op_rorl_T0_T1_cc,
434 gen_op_rcll_T0_T1_cc,
435 gen_op_rcrl_T0_T1_cc,
436 gen_op_shll_T0_T1_cc,
437 gen_op_shrl_T0_T1_cc,
438 gen_op_shll_T0_T1_cc,
439 gen_op_sarl_T0_T1_cc,
443 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[2][2] = {
445 gen_op_shldw_T0_T1_im_cc,
446 gen_op_shrdw_T0_T1_im_cc,
449 gen_op_shldl_T0_T1_im_cc,
450 gen_op_shrdl_T0_T1_im_cc,
454 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[2][2] = {
456 gen_op_shldw_T0_T1_ECX_cc,
457 gen_op_shrdw_T0_T1_ECX_cc,
460 gen_op_shldl_T0_T1_ECX_cc,
461 gen_op_shrdl_T0_T1_ECX_cc,
465 static GenOpFunc *gen_op_btx_T0_T1_cc[2][4] = {
468 gen_op_btsw_T0_T1_cc,
469 gen_op_btrw_T0_T1_cc,
470 gen_op_btcw_T0_T1_cc,
474 gen_op_btsl_T0_T1_cc,
475 gen_op_btrl_T0_T1_cc,
476 gen_op_btcl_T0_T1_cc,
480 static GenOpFunc *gen_op_bsx_T0_cc[2][2] = {
491 static GenOpFunc *gen_op_lds_T0_A0[3] = {
496 static GenOpFunc *gen_op_ldu_T0_A0[3] = {
501 /* sign does not matter */
502 static GenOpFunc *gen_op_ld_T0_A0[3] = {
508 static GenOpFunc *gen_op_ld_T1_A0[3] = {
514 static GenOpFunc *gen_op_st_T0_A0[3] = {
520 /* the _a32 and _a16 string operations use A0 as the base register. */
522 #define STRINGOP(x) \
523 gen_op_ ## x ## b_fast, \
524 gen_op_ ## x ## w_fast, \
525 gen_op_ ## x ## l_fast, \
526 gen_op_ ## x ## b_a32, \
527 gen_op_ ## x ## w_a32, \
528 gen_op_ ## x ## l_a32, \
529 gen_op_ ## x ## b_a16, \
530 gen_op_ ## x ## w_a16, \
531 gen_op_ ## x ## l_a16,
533 static GenOpFunc *gen_op_movs[9 * 2] = {
538 static GenOpFunc *gen_op_stos[9 * 2] = {
543 static GenOpFunc *gen_op_lods[9 * 2] = {
548 static GenOpFunc *gen_op_scas[9 * 3] = {
554 static GenOpFunc *gen_op_cmps[9 * 3] = {
560 static GenOpFunc *gen_op_ins[9 * 2] = {
566 static GenOpFunc *gen_op_outs[9 * 2] = {
572 static inline void gen_string_ds(DisasContext *s, int ot, GenOpFunc **func)
576 override = s->override;
579 if (s->addseg && override < 0)
582 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
590 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
591 /* 16 address, always override */
597 static inline void gen_string_es(DisasContext *s, int ot, GenOpFunc **func)
614 static GenOpFunc *gen_op_in[3] = {
620 static GenOpFunc *gen_op_out[3] = {
637 static GenOpFunc3 *gen_jcc_sub[3][8] = {
669 static GenOpFunc2 *gen_op_loop[2][4] = {
684 static GenOpFunc *gen_setcc_slow[8] = {
695 static GenOpFunc *gen_setcc_sub[3][8] = {
700 gen_op_setbe_T0_subb,
704 gen_op_setle_T0_subb,
710 gen_op_setbe_T0_subw,
714 gen_op_setle_T0_subw,
720 gen_op_setbe_T0_subl,
724 gen_op_setle_T0_subl,
728 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
734 gen_op_fsubr_ST0_FT0,
736 gen_op_fdivr_ST0_FT0,
739 /* NOTE the exception in "r" op ordering */
740 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
745 gen_op_fsubr_STN_ST0,
747 gen_op_fdivr_STN_ST0,
751 /* if d == OR_TMP0, it means memory operand (address in A0) */
752 static void gen_op(DisasContext *s1, int op, int ot, int d)
754 GenOpFunc *gen_update_cc;
757 gen_op_mov_TN_reg[ot][0][d]();
759 gen_op_ld_T0_A0[ot]();
764 if (s1->cc_op != CC_OP_DYNAMIC)
765 gen_op_set_cc_op(s1->cc_op);
766 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
767 s1->cc_op = CC_OP_DYNAMIC;
768 /* XXX: incorrect: CC_OP must also be modified AFTER memory access */
769 gen_update_cc = gen_op_update2_cc;
773 s1->cc_op = CC_OP_ADDB + ot;
774 gen_update_cc = gen_op_update2_cc;
778 s1->cc_op = CC_OP_SUBB + ot;
779 gen_update_cc = gen_op_update2_cc;
785 gen_op_arith_T0_T1_cc[op]();
786 s1->cc_op = CC_OP_LOGICB + ot;
787 gen_update_cc = gen_op_update1_cc;
790 gen_op_cmpl_T0_T1_cc();
791 s1->cc_op = CC_OP_SUBB + ot;
792 gen_update_cc = NULL;
797 gen_op_mov_reg_T0[ot][d]();
799 gen_op_st_T0_A0[ot]();
801 /* the flags update must happen after the memory write (precise
802 exception support) */
807 /* if d == OR_TMP0, it means memory operand (address in A0) */
808 static void gen_inc(DisasContext *s1, int ot, int d, int c)
811 gen_op_mov_TN_reg[ot][0][d]();
813 gen_op_ld_T0_A0[ot]();
814 if (s1->cc_op != CC_OP_DYNAMIC)
815 gen_op_set_cc_op(s1->cc_op);
818 s1->cc_op = CC_OP_INCB + ot;
821 s1->cc_op = CC_OP_DECB + ot;
824 gen_op_mov_reg_T0[ot][d]();
826 gen_op_st_T0_A0[ot]();
827 gen_op_update_inc_cc();
830 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
833 gen_op_mov_TN_reg[ot][0][d]();
835 gen_op_mov_TN_reg[ot][1][s]();
836 /* for zero counts, flags are not updated, so must do it dynamically */
837 if (s1->cc_op != CC_OP_DYNAMIC)
838 gen_op_set_cc_op(s1->cc_op);
840 gen_op_shift_T0_T1_cc[ot][op]();
843 gen_op_mov_reg_T0[ot][d]();
844 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
847 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
849 /* currently not optimized */
850 gen_op_movl_T1_im(c);
851 gen_shift(s1, op, ot, d, OR_TMP1);
854 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
861 int mod, rm, code, override, must_add_seg;
863 override = s->override;
864 must_add_seg = s->addseg;
867 mod = (modrm >> 6) & 3;
879 code = ldub(s->pc++);
880 scale = (code >> 6) & 3;
881 index = (code >> 3) & 7;
896 disp = (int8_t)ldub(s->pc++);
906 /* for correct popl handling with esp */
907 if (base == 4 && s->popl_esp_hack)
909 gen_op_movl_A0_reg[base]();
911 gen_op_addl_A0_im(disp);
913 gen_op_movl_A0_im(disp);
915 /* XXX: index == 4 is always invalid */
916 if (havesib && (index != 4 || scale != 0)) {
917 gen_op_addl_A0_reg_sN[scale][index]();
921 if (base == R_EBP || base == R_ESP)
926 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
934 gen_op_movl_A0_im(disp);
935 rm = 0; /* avoid SS override */
942 disp = (int8_t)ldub(s->pc++);
952 gen_op_movl_A0_reg[R_EBX]();
953 gen_op_addl_A0_reg_sN[0][R_ESI]();
956 gen_op_movl_A0_reg[R_EBX]();
957 gen_op_addl_A0_reg_sN[0][R_EDI]();
960 gen_op_movl_A0_reg[R_EBP]();
961 gen_op_addl_A0_reg_sN[0][R_ESI]();
964 gen_op_movl_A0_reg[R_EBP]();
965 gen_op_addl_A0_reg_sN[0][R_EDI]();
968 gen_op_movl_A0_reg[R_ESI]();
971 gen_op_movl_A0_reg[R_EDI]();
974 gen_op_movl_A0_reg[R_EBP]();
978 gen_op_movl_A0_reg[R_EBX]();
982 gen_op_addl_A0_im(disp);
983 gen_op_andl_A0_ffff();
987 if (rm == 2 || rm == 3 || rm == 6)
992 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1002 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1004 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1006 int mod, rm, opreg, disp;
1008 mod = (modrm >> 6) & 3;
1013 gen_op_mov_TN_reg[ot][0][reg]();
1014 gen_op_mov_reg_T0[ot][rm]();
1016 gen_op_mov_TN_reg[ot][0][rm]();
1018 gen_op_mov_reg_T0[ot][reg]();
1021 gen_lea_modrm(s, modrm, &opreg, &disp);
1024 gen_op_mov_TN_reg[ot][0][reg]();
1025 gen_op_st_T0_A0[ot]();
1027 gen_op_ld_T0_A0[ot]();
1029 gen_op_mov_reg_T0[ot][reg]();
1034 static inline uint32_t insn_get(DisasContext *s, int ot)
1056 static inline void gen_jcc(DisasContext *s, int b, int val, int next_eip)
1058 TranslationBlock *tb;
1063 jcc_op = (b >> 1) & 7;
1065 /* we optimize the cmp/jcc case */
1069 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1072 /* some jumps are easy to compute */
1099 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 3][jcc_op];
1102 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 3][jcc_op];
1114 if (s->cc_op != CC_OP_DYNAMIC)
1115 gen_op_set_cc_op(s->cc_op);
1118 gen_setcc_slow[jcc_op]();
1124 func((long)tb, val, next_eip);
1126 func((long)tb, next_eip, val);
1131 static void gen_setcc(DisasContext *s, int b)
1137 jcc_op = (b >> 1) & 7;
1139 /* we optimize the cmp/jcc case */
1143 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1148 /* some jumps are easy to compute */
1166 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 3][jcc_op];
1169 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 3][jcc_op];
1177 if (s->cc_op != CC_OP_DYNAMIC)
1178 gen_op_set_cc_op(s->cc_op);
1179 func = gen_setcc_slow[jcc_op];
1188 /* move T0 to seg_reg and compute if the CPU state may change */
1189 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, unsigned int cur_eip)
1192 gen_op_movl_seg_T0(seg_reg, cur_eip);
1194 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1195 if (!s->addseg && seg_reg < R_FS)
1196 s->is_jmp = 2; /* abort translation because the register may
1197 have a non zero base */
1200 /* generate a push. It depends on ss32, addseg and dflag */
1201 static void gen_push_T0(DisasContext *s)
1211 gen_op_pushl_ss32_T0();
1213 gen_op_pushw_ss32_T0();
1217 gen_op_pushl_ss16_T0();
1219 gen_op_pushw_ss16_T0();
1223 /* two step pop is necessary for precise exceptions */
1224 static void gen_pop_T0(DisasContext *s)
1234 gen_op_popl_ss32_T0();
1236 gen_op_popw_ss32_T0();
1240 gen_op_popl_ss16_T0();
1242 gen_op_popw_ss16_T0();
1246 static inline void gen_stack_update(DisasContext *s, int addend)
1250 gen_op_addl_ESP_2();
1251 else if (addend == 4)
1252 gen_op_addl_ESP_4();
1254 gen_op_addl_ESP_im(addend);
1257 gen_op_addw_ESP_2();
1258 else if (addend == 4)
1259 gen_op_addw_ESP_4();
1261 gen_op_addw_ESP_im(addend);
1265 static void gen_pop_update(DisasContext *s)
1267 gen_stack_update(s, 2 << s->dflag);
1270 static void gen_stack_A0(DisasContext *s)
1272 gen_op_movl_A0_ESP();
1274 gen_op_andl_A0_ffff();
1275 gen_op_movl_T1_A0();
1277 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
1280 /* NOTE: wrap around in 16 bit not fully handled */
1281 static void gen_pusha(DisasContext *s)
1284 gen_op_movl_A0_ESP();
1285 gen_op_addl_A0_im(-16 << s->dflag);
1287 gen_op_andl_A0_ffff();
1288 gen_op_movl_T1_A0();
1290 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
1291 for(i = 0;i < 8; i++) {
1292 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
1293 gen_op_st_T0_A0[OT_WORD + s->dflag]();
1294 gen_op_addl_A0_im(2 << s->dflag);
1296 gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
1299 /* NOTE: wrap around in 16 bit not fully handled */
1300 static void gen_popa(DisasContext *s)
1303 gen_op_movl_A0_ESP();
1305 gen_op_andl_A0_ffff();
1306 gen_op_movl_T1_A0();
1307 gen_op_addl_T1_im(16 << s->dflag);
1309 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
1310 for(i = 0;i < 8; i++) {
1311 /* ESP is not reloaded */
1313 gen_op_ld_T0_A0[OT_WORD + s->dflag]();
1314 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
1316 gen_op_addl_A0_im(2 << s->dflag);
1318 gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
1321 /* NOTE: wrap around in 16 bit not fully handled */
1322 /* XXX: check this */
1323 static void gen_enter(DisasContext *s, int esp_addend, int level)
1325 int ot, level1, addend, opsize;
1327 ot = s->dflag + OT_WORD;
1330 opsize = 2 << s->dflag;
1332 gen_op_movl_A0_ESP();
1333 gen_op_addl_A0_im(-opsize);
1335 gen_op_andl_A0_ffff();
1336 gen_op_movl_T1_A0();
1338 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
1340 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
1341 gen_op_st_T0_A0[ot]();
1344 gen_op_addl_A0_im(-opsize);
1345 gen_op_addl_T0_im(-opsize);
1346 gen_op_st_T0_A0[ot]();
1348 gen_op_addl_A0_im(-opsize);
1349 /* XXX: add st_T1_A0 ? */
1350 gen_op_movl_T0_T1();
1351 gen_op_st_T0_A0[ot]();
1353 gen_op_mov_reg_T1[ot][R_EBP]();
1354 addend = -esp_addend;
1356 addend -= opsize * (level1 + 1);
1357 gen_op_addl_T1_im(addend);
1358 gen_op_mov_reg_T1[ot][R_ESP]();
1361 static void gen_exception(DisasContext *s, int trapno, unsigned int cur_eip)
1363 if (s->cc_op != CC_OP_DYNAMIC)
1364 gen_op_set_cc_op(s->cc_op);
1365 gen_op_jmp_im(cur_eip);
1366 gen_op_raise_exception(trapno);
1370 /* an interrupt is different from an exception because of the
1371 priviledge checks */
1372 static void gen_interrupt(DisasContext *s, int intno,
1373 unsigned int cur_eip, unsigned int next_eip)
1375 if (s->cc_op != CC_OP_DYNAMIC)
1376 gen_op_set_cc_op(s->cc_op);
1377 gen_op_jmp_im(cur_eip);
1378 gen_op_raise_interrupt(intno, next_eip);
1382 /* generate a jump to eip. No segment change must happen before as a
1383 direct call to the next block may occur */
1384 static void gen_jmp(DisasContext *s, unsigned int eip)
1386 TranslationBlock *tb = s->tb;
1388 if (s->cc_op != CC_OP_DYNAMIC)
1389 gen_op_set_cc_op(s->cc_op);
1390 gen_op_jmp_tb_next((long)tb, eip);
1394 /* return the next pc address. Return -1 if no insn found. *is_jmp_ptr
1395 is set to true if the instruction sets the PC (last instruction of
1397 long disas_insn(DisasContext *s, uint8_t *pc_start)
1399 int b, prefixes, aflag, dflag;
1401 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
1402 unsigned int next_eip;
1412 /* check prefixes */
1415 prefixes |= PREFIX_REPZ;
1418 prefixes |= PREFIX_REPNZ;
1421 prefixes |= PREFIX_LOCK;
1442 prefixes |= PREFIX_DATA;
1445 prefixes |= PREFIX_ADR;
1449 if (prefixes & PREFIX_DATA)
1451 if (prefixes & PREFIX_ADR)
1454 s->prefix = prefixes;
1458 /* lock generation */
1459 if (prefixes & PREFIX_LOCK)
1462 /* now check op code */
1466 /**************************/
1467 /* extended op code */
1468 b = ldub(s->pc++) | 0x100;
1471 /**************************/
1489 ot = dflag ? OT_LONG : OT_WORD;
1492 case 0: /* OP Ev, Gv */
1493 modrm = ldub(s->pc++);
1494 reg = ((modrm >> 3) & 7) + OR_EAX;
1495 mod = (modrm >> 6) & 3;
1498 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1501 opreg = OR_EAX + rm;
1503 gen_op_mov_TN_reg[ot][1][reg]();
1504 gen_op(s, op, ot, opreg);
1506 case 1: /* OP Gv, Ev */
1507 modrm = ldub(s->pc++);
1508 mod = (modrm >> 6) & 3;
1509 reg = ((modrm >> 3) & 7) + OR_EAX;
1512 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1513 gen_op_ld_T1_A0[ot]();
1515 gen_op_mov_TN_reg[ot][1][rm]();
1517 gen_op(s, op, ot, reg);
1519 case 2: /* OP A, Iv */
1520 val = insn_get(s, ot);
1521 gen_op_movl_T1_im(val);
1522 gen_op(s, op, ot, OR_EAX);
1528 case 0x80: /* GRP1 */
1537 ot = dflag ? OT_LONG : OT_WORD;
1539 modrm = ldub(s->pc++);
1540 mod = (modrm >> 6) & 3;
1542 op = (modrm >> 3) & 7;
1545 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1548 opreg = rm + OR_EAX;
1555 val = insn_get(s, ot);
1558 val = (int8_t)insn_get(s, OT_BYTE);
1561 gen_op_movl_T1_im(val);
1562 gen_op(s, op, ot, opreg);
1566 /**************************/
1567 /* inc, dec, and other misc arith */
1568 case 0x40 ... 0x47: /* inc Gv */
1569 ot = dflag ? OT_LONG : OT_WORD;
1570 gen_inc(s, ot, OR_EAX + (b & 7), 1);
1572 case 0x48 ... 0x4f: /* dec Gv */
1573 ot = dflag ? OT_LONG : OT_WORD;
1574 gen_inc(s, ot, OR_EAX + (b & 7), -1);
1576 case 0xf6: /* GRP3 */
1581 ot = dflag ? OT_LONG : OT_WORD;
1583 modrm = ldub(s->pc++);
1584 mod = (modrm >> 6) & 3;
1586 op = (modrm >> 3) & 7;
1588 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1589 gen_op_ld_T0_A0[ot]();
1591 gen_op_mov_TN_reg[ot][0][rm]();
1596 val = insn_get(s, ot);
1597 gen_op_movl_T1_im(val);
1598 gen_op_testl_T0_T1_cc();
1599 s->cc_op = CC_OP_LOGICB + ot;
1604 gen_op_st_T0_A0[ot]();
1606 gen_op_mov_reg_T0[ot][rm]();
1612 gen_op_st_T0_A0[ot]();
1614 gen_op_mov_reg_T0[ot][rm]();
1616 gen_op_update_neg_cc();
1617 s->cc_op = CC_OP_SUBB + ot;
1622 gen_op_mulb_AL_T0();
1625 gen_op_mulw_AX_T0();
1629 gen_op_mull_EAX_T0();
1632 s->cc_op = CC_OP_MUL;
1637 gen_op_imulb_AL_T0();
1640 gen_op_imulw_AX_T0();
1644 gen_op_imull_EAX_T0();
1647 s->cc_op = CC_OP_MUL;
1652 gen_op_divb_AL_T0(pc_start - s->cs_base);
1655 gen_op_divw_AX_T0(pc_start - s->cs_base);
1659 gen_op_divl_EAX_T0(pc_start - s->cs_base);
1666 gen_op_idivb_AL_T0(pc_start - s->cs_base);
1669 gen_op_idivw_AX_T0(pc_start - s->cs_base);
1673 gen_op_idivl_EAX_T0(pc_start - s->cs_base);
1682 case 0xfe: /* GRP4 */
1683 case 0xff: /* GRP5 */
1687 ot = dflag ? OT_LONG : OT_WORD;
1689 modrm = ldub(s->pc++);
1690 mod = (modrm >> 6) & 3;
1692 op = (modrm >> 3) & 7;
1693 if (op >= 2 && b == 0xfe) {
1697 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1698 if (op >= 2 && op != 3 && op != 5)
1699 gen_op_ld_T0_A0[ot]();
1701 gen_op_mov_TN_reg[ot][0][rm]();
1705 case 0: /* inc Ev */
1710 gen_inc(s, ot, opreg, 1);
1712 case 1: /* dec Ev */
1717 gen_inc(s, ot, opreg, -1);
1719 case 2: /* call Ev */
1720 /* XXX: optimize if memory (no and is necessary) */
1722 gen_op_andl_T0_ffff();
1724 next_eip = s->pc - s->cs_base;
1725 gen_op_movl_T0_im(next_eip);
1729 case 3: /* lcall Ev */
1730 /* push return segment + offset */
1731 gen_op_movl_T0_seg(R_CS);
1733 next_eip = s->pc - s->cs_base;
1734 gen_op_movl_T0_im(next_eip);
1737 gen_op_ld_T1_A0[ot]();
1738 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
1739 gen_op_lduw_T0_A0();
1740 gen_movl_seg_T0(s, R_CS, pc_start - s->cs_base);
1741 gen_op_movl_T0_T1();
1745 case 4: /* jmp Ev */
1747 gen_op_andl_T0_ffff();
1751 case 5: /* ljmp Ev */
1752 gen_op_ld_T1_A0[ot]();
1753 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
1754 gen_op_lduw_T0_A0();
1756 /* we compute EIP to handle the exception case */
1757 gen_op_jmp_im(pc_start - s->cs_base);
1758 gen_op_ljmp_T0_T1();
1760 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
1761 gen_op_movl_T0_T1();
1766 case 6: /* push Ev */
1774 case 0x84: /* test Ev, Gv */
1779 ot = dflag ? OT_LONG : OT_WORD;
1781 modrm = ldub(s->pc++);
1782 mod = (modrm >> 6) & 3;
1784 reg = (modrm >> 3) & 7;
1786 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
1787 gen_op_mov_TN_reg[ot][1][reg + OR_EAX]();
1788 gen_op_testl_T0_T1_cc();
1789 s->cc_op = CC_OP_LOGICB + ot;
1792 case 0xa8: /* test eAX, Iv */
1797 ot = dflag ? OT_LONG : OT_WORD;
1798 val = insn_get(s, ot);
1800 gen_op_mov_TN_reg[ot][0][OR_EAX]();
1801 gen_op_movl_T1_im(val);
1802 gen_op_testl_T0_T1_cc();
1803 s->cc_op = CC_OP_LOGICB + ot;
1806 case 0x98: /* CWDE/CBW */
1808 gen_op_movswl_EAX_AX();
1810 gen_op_movsbw_AX_AL();
1812 case 0x99: /* CDQ/CWD */
1814 gen_op_movslq_EDX_EAX();
1816 gen_op_movswl_DX_AX();
1818 case 0x1af: /* imul Gv, Ev */
1819 case 0x69: /* imul Gv, Ev, I */
1821 ot = dflag ? OT_LONG : OT_WORD;
1822 modrm = ldub(s->pc++);
1823 reg = ((modrm >> 3) & 7) + OR_EAX;
1824 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
1826 val = insn_get(s, ot);
1827 gen_op_movl_T1_im(val);
1828 } else if (b == 0x6b) {
1829 val = insn_get(s, OT_BYTE);
1830 gen_op_movl_T1_im(val);
1832 gen_op_mov_TN_reg[ot][1][reg]();
1835 if (ot == OT_LONG) {
1836 gen_op_imull_T0_T1();
1838 gen_op_imulw_T0_T1();
1840 gen_op_mov_reg_T0[ot][reg]();
1841 s->cc_op = CC_OP_MUL;
1844 case 0x1c1: /* xadd Ev, Gv */
1848 ot = dflag ? OT_LONG : OT_WORD;
1849 modrm = ldub(s->pc++);
1850 reg = (modrm >> 3) & 7;
1851 mod = (modrm >> 6) & 3;
1854 gen_op_mov_TN_reg[ot][0][reg]();
1855 gen_op_mov_TN_reg[ot][1][rm]();
1856 gen_op_addl_T0_T1();
1857 gen_op_mov_reg_T0[ot][rm]();
1858 gen_op_mov_reg_T1[ot][reg]();
1860 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1861 gen_op_mov_TN_reg[ot][0][reg]();
1862 gen_op_ld_T1_A0[ot]();
1863 gen_op_addl_T0_T1();
1864 gen_op_st_T0_A0[ot]();
1865 gen_op_mov_reg_T1[ot][reg]();
1867 gen_op_update2_cc();
1868 s->cc_op = CC_OP_ADDB + ot;
1871 case 0x1b1: /* cmpxchg Ev, Gv */
1875 ot = dflag ? OT_LONG : OT_WORD;
1876 modrm = ldub(s->pc++);
1877 reg = (modrm >> 3) & 7;
1878 mod = (modrm >> 6) & 3;
1879 gen_op_mov_TN_reg[ot][1][reg]();
1882 gen_op_mov_TN_reg[ot][0][rm]();
1883 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
1884 gen_op_mov_reg_T0[ot][rm]();
1886 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1887 gen_op_ld_T0_A0[ot]();
1888 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
1889 gen_op_st_T0_A0[ot]();
1891 s->cc_op = CC_OP_SUBB + ot;
1893 case 0x1c7: /* cmpxchg8b */
1894 modrm = ldub(s->pc++);
1895 mod = (modrm >> 6) & 3;
1898 if (s->cc_op != CC_OP_DYNAMIC)
1899 gen_op_set_cc_op(s->cc_op);
1900 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
1902 s->cc_op = CC_OP_EFLAGS;
1905 /**************************/
1907 case 0x50 ... 0x57: /* push */
1908 gen_op_mov_TN_reg[OT_LONG][0][b & 7]();
1911 case 0x58 ... 0x5f: /* pop */
1912 ot = dflag ? OT_LONG : OT_WORD;
1914 gen_op_mov_reg_T0[ot][b & 7]();
1917 case 0x60: /* pusha */
1920 case 0x61: /* popa */
1923 case 0x68: /* push Iv */
1925 ot = dflag ? OT_LONG : OT_WORD;
1927 val = insn_get(s, ot);
1929 val = (int8_t)insn_get(s, OT_BYTE);
1930 gen_op_movl_T0_im(val);
1933 case 0x8f: /* pop Ev */
1934 ot = dflag ? OT_LONG : OT_WORD;
1935 modrm = ldub(s->pc++);
1937 s->popl_esp_hack = 1;
1938 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
1939 s->popl_esp_hack = 0;
1942 case 0xc8: /* enter */
1947 level = ldub(s->pc++);
1948 gen_enter(s, val, level);
1951 case 0xc9: /* leave */
1952 /* XXX: exception not precise (ESP is updated before potential exception) */
1954 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
1955 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
1957 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
1958 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
1961 ot = dflag ? OT_LONG : OT_WORD;
1962 gen_op_mov_reg_T0[ot][R_EBP]();
1965 case 0x06: /* push es */
1966 case 0x0e: /* push cs */
1967 case 0x16: /* push ss */
1968 case 0x1e: /* push ds */
1969 gen_op_movl_T0_seg(b >> 3);
1972 case 0x1a0: /* push fs */
1973 case 0x1a8: /* push gs */
1974 gen_op_movl_T0_seg((b >> 3) & 7);
1977 case 0x07: /* pop es */
1978 case 0x17: /* pop ss */
1979 case 0x1f: /* pop ds */
1981 gen_movl_seg_T0(s, b >> 3, pc_start - s->cs_base);
1984 case 0x1a1: /* pop fs */
1985 case 0x1a9: /* pop gs */
1987 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
1991 /**************************/
1994 case 0x89: /* mov Gv, Ev */
1998 ot = dflag ? OT_LONG : OT_WORD;
1999 modrm = ldub(s->pc++);
2000 reg = (modrm >> 3) & 7;
2002 /* generate a generic store */
2003 gen_ldst_modrm(s, modrm, ot, OR_EAX + reg, 1);
2006 case 0xc7: /* mov Ev, Iv */
2010 ot = dflag ? OT_LONG : OT_WORD;
2011 modrm = ldub(s->pc++);
2012 mod = (modrm >> 6) & 3;
2014 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2015 val = insn_get(s, ot);
2016 gen_op_movl_T0_im(val);
2018 gen_op_st_T0_A0[ot]();
2020 gen_op_mov_reg_T0[ot][modrm & 7]();
2023 case 0x8b: /* mov Ev, Gv */
2027 ot = dflag ? OT_LONG : OT_WORD;
2028 modrm = ldub(s->pc++);
2029 reg = (modrm >> 3) & 7;
2031 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2032 gen_op_mov_reg_T0[ot][reg]();
2034 case 0x8e: /* mov seg, Gv */
2035 ot = dflag ? OT_LONG : OT_WORD;
2036 modrm = ldub(s->pc++);
2037 reg = (modrm >> 3) & 7;
2038 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2039 if (reg >= 6 || reg == R_CS)
2041 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
2043 case 0x8c: /* mov Gv, seg */
2044 ot = dflag ? OT_LONG : OT_WORD;
2045 modrm = ldub(s->pc++);
2046 reg = (modrm >> 3) & 7;
2049 gen_op_movl_T0_seg(reg);
2050 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
2053 case 0x1b6: /* movzbS Gv, Eb */
2054 case 0x1b7: /* movzwS Gv, Eb */
2055 case 0x1be: /* movsbS Gv, Eb */
2056 case 0x1bf: /* movswS Gv, Eb */
2059 /* d_ot is the size of destination */
2060 d_ot = dflag + OT_WORD;
2061 /* ot is the size of source */
2062 ot = (b & 1) + OT_BYTE;
2063 modrm = ldub(s->pc++);
2064 reg = ((modrm >> 3) & 7) + OR_EAX;
2065 mod = (modrm >> 6) & 3;
2069 gen_op_mov_TN_reg[ot][0][rm]();
2070 switch(ot | (b & 8)) {
2072 gen_op_movzbl_T0_T0();
2075 gen_op_movsbl_T0_T0();
2078 gen_op_movzwl_T0_T0();
2082 gen_op_movswl_T0_T0();
2085 gen_op_mov_reg_T0[d_ot][reg]();
2087 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2089 gen_op_lds_T0_A0[ot]();
2091 gen_op_ldu_T0_A0[ot]();
2093 gen_op_mov_reg_T0[d_ot][reg]();
2098 case 0x8d: /* lea */
2099 ot = dflag ? OT_LONG : OT_WORD;
2100 modrm = ldub(s->pc++);
2101 reg = (modrm >> 3) & 7;
2102 /* we must ensure that no segment is added */
2106 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2108 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
2111 case 0xa0: /* mov EAX, Ov */
2113 case 0xa2: /* mov Ov, EAX */
2118 ot = dflag ? OT_LONG : OT_WORD;
2120 offset_addr = insn_get(s, OT_LONG);
2122 offset_addr = insn_get(s, OT_WORD);
2123 gen_op_movl_A0_im(offset_addr);
2124 /* handle override */
2126 int override, must_add_seg;
2127 must_add_seg = s->addseg;
2128 if (s->override >= 0) {
2129 override = s->override;
2135 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
2139 gen_op_ld_T0_A0[ot]();
2140 gen_op_mov_reg_T0[ot][R_EAX]();
2142 gen_op_mov_TN_reg[ot][0][R_EAX]();
2143 gen_op_st_T0_A0[ot]();
2146 case 0xd7: /* xlat */
2147 gen_op_movl_A0_reg[R_EBX]();
2148 gen_op_addl_A0_AL();
2150 gen_op_andl_A0_ffff();
2151 /* handle override */
2153 int override, must_add_seg;
2154 must_add_seg = s->addseg;
2156 if (s->override >= 0) {
2157 override = s->override;
2163 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
2166 gen_op_ldub_T0_A0();
2167 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
2169 case 0xb0 ... 0xb7: /* mov R, Ib */
2170 val = insn_get(s, OT_BYTE);
2171 gen_op_movl_T0_im(val);
2172 gen_op_mov_reg_T0[OT_BYTE][b & 7]();
2174 case 0xb8 ... 0xbf: /* mov R, Iv */
2175 ot = dflag ? OT_LONG : OT_WORD;
2176 val = insn_get(s, ot);
2177 reg = OR_EAX + (b & 7);
2178 gen_op_movl_T0_im(val);
2179 gen_op_mov_reg_T0[ot][reg]();
2182 case 0x91 ... 0x97: /* xchg R, EAX */
2183 ot = dflag ? OT_LONG : OT_WORD;
2188 case 0x87: /* xchg Ev, Gv */
2192 ot = dflag ? OT_LONG : OT_WORD;
2193 modrm = ldub(s->pc++);
2194 reg = (modrm >> 3) & 7;
2195 mod = (modrm >> 6) & 3;
2199 gen_op_mov_TN_reg[ot][0][reg]();
2200 gen_op_mov_TN_reg[ot][1][rm]();
2201 gen_op_mov_reg_T0[ot][rm]();
2202 gen_op_mov_reg_T1[ot][reg]();
2204 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2205 gen_op_mov_TN_reg[ot][0][reg]();
2206 /* for xchg, lock is implicit */
2207 if (!(prefixes & PREFIX_LOCK))
2209 gen_op_ld_T1_A0[ot]();
2210 gen_op_st_T0_A0[ot]();
2211 if (!(prefixes & PREFIX_LOCK))
2213 gen_op_mov_reg_T1[ot][reg]();
2216 case 0xc4: /* les Gv */
2219 case 0xc5: /* lds Gv */
2222 case 0x1b2: /* lss Gv */
2225 case 0x1b4: /* lfs Gv */
2228 case 0x1b5: /* lgs Gv */
2231 ot = dflag ? OT_LONG : OT_WORD;
2232 modrm = ldub(s->pc++);
2233 reg = (modrm >> 3) & 7;
2234 mod = (modrm >> 6) & 3;
2237 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2238 gen_op_ld_T1_A0[ot]();
2239 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
2240 /* load the segment first to handle exceptions properly */
2241 gen_op_lduw_T0_A0();
2242 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
2243 /* then put the data */
2244 gen_op_mov_reg_T1[ot][reg]();
2247 /************************/
2258 ot = dflag ? OT_LONG : OT_WORD;
2260 modrm = ldub(s->pc++);
2261 mod = (modrm >> 6) & 3;
2263 op = (modrm >> 3) & 7;
2266 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2267 gen_op_ld_T0_A0[ot]();
2270 opreg = rm + OR_EAX;
2275 gen_shift(s, op, ot, opreg, OR_ECX);
2278 shift = ldub(s->pc++);
2280 gen_shifti(s, op, ot, opreg, shift);
2284 gen_op_st_T0_A0[ot]();
2299 case 0x1a4: /* shld imm */
2303 case 0x1a5: /* shld cl */
2307 case 0x1ac: /* shrd imm */
2311 case 0x1ad: /* shrd cl */
2315 ot = dflag ? OT_LONG : OT_WORD;
2316 modrm = ldub(s->pc++);
2317 mod = (modrm >> 6) & 3;
2319 reg = (modrm >> 3) & 7;
2322 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2323 gen_op_ld_T0_A0[ot]();
2325 gen_op_mov_TN_reg[ot][0][rm]();
2327 gen_op_mov_TN_reg[ot][1][reg]();
2330 val = ldub(s->pc++);
2333 gen_op_shiftd_T0_T1_im_cc[ot - OT_WORD][op](val);
2334 if (op == 0 && ot != OT_WORD)
2335 s->cc_op = CC_OP_SHLB + ot;
2337 s->cc_op = CC_OP_SARB + ot;
2340 if (s->cc_op != CC_OP_DYNAMIC)
2341 gen_op_set_cc_op(s->cc_op);
2342 gen_op_shiftd_T0_T1_ECX_cc[ot - OT_WORD][op]();
2343 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2346 gen_op_st_T0_A0[ot]();
2348 gen_op_mov_reg_T0[ot][rm]();
2352 /************************/
2355 modrm = ldub(s->pc++);
2356 mod = (modrm >> 6) & 3;
2358 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
2362 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2364 case 0x00 ... 0x07: /* fxxxs */
2365 case 0x10 ... 0x17: /* fixxxl */
2366 case 0x20 ... 0x27: /* fxxxl */
2367 case 0x30 ... 0x37: /* fixxx */
2374 gen_op_flds_FT0_A0();
2377 gen_op_fildl_FT0_A0();
2380 gen_op_fldl_FT0_A0();
2384 gen_op_fild_FT0_A0();
2388 gen_op_fp_arith_ST0_FT0[op1]();
2390 /* fcomp needs pop */
2395 case 0x08: /* flds */
2396 case 0x0a: /* fsts */
2397 case 0x0b: /* fstps */
2398 case 0x18: /* fildl */
2399 case 0x1a: /* fistl */
2400 case 0x1b: /* fistpl */
2401 case 0x28: /* fldl */
2402 case 0x2a: /* fstl */
2403 case 0x2b: /* fstpl */
2404 case 0x38: /* filds */
2405 case 0x3a: /* fists */
2406 case 0x3b: /* fistps */
2413 gen_op_flds_ST0_A0();
2416 gen_op_fildl_ST0_A0();
2419 gen_op_fldl_ST0_A0();
2423 gen_op_fild_ST0_A0();
2430 gen_op_fsts_ST0_A0();
2433 gen_op_fistl_ST0_A0();
2436 gen_op_fstl_ST0_A0();
2440 gen_op_fist_ST0_A0();
2448 case 0x0c: /* fldenv mem */
2449 gen_op_fldenv_A0(s->dflag);
2451 case 0x0d: /* fldcw mem */
2454 case 0x0e: /* fnstenv mem */
2455 gen_op_fnstenv_A0(s->dflag);
2457 case 0x0f: /* fnstcw mem */
2460 case 0x1d: /* fldt mem */
2462 gen_op_fldt_ST0_A0();
2464 case 0x1f: /* fstpt mem */
2465 gen_op_fstt_ST0_A0();
2468 case 0x2c: /* frstor mem */
2469 gen_op_frstor_A0(s->dflag);
2471 case 0x2e: /* fnsave mem */
2472 gen_op_fnsave_A0(s->dflag);
2474 case 0x2f: /* fnstsw mem */
2477 case 0x3c: /* fbld */
2479 gen_op_fbld_ST0_A0();
2481 case 0x3e: /* fbstp */
2482 gen_op_fbst_ST0_A0();
2485 case 0x3d: /* fildll */
2487 gen_op_fildll_ST0_A0();
2489 case 0x3f: /* fistpll */
2490 gen_op_fistll_ST0_A0();
2497 /* register float ops */
2501 case 0x08: /* fld sti */
2503 gen_op_fmov_ST0_STN((opreg + 1) & 7);
2505 case 0x09: /* fxchg sti */
2506 gen_op_fxchg_ST0_STN(opreg);
2508 case 0x0a: /* grp d9/2 */
2516 case 0x0c: /* grp d9/4 */
2526 gen_op_fcom_ST0_FT0();
2535 case 0x0d: /* grp d9/5 */
2544 gen_op_fldl2t_ST0();
2548 gen_op_fldl2e_ST0();
2556 gen_op_fldlg2_ST0();
2560 gen_op_fldln2_ST0();
2571 case 0x0e: /* grp d9/6 */
2582 case 3: /* fpatan */
2585 case 4: /* fxtract */
2588 case 5: /* fprem1 */
2591 case 6: /* fdecstp */
2595 case 7: /* fincstp */
2600 case 0x0f: /* grp d9/7 */
2605 case 1: /* fyl2xp1 */
2611 case 3: /* fsincos */
2614 case 5: /* fscale */
2617 case 4: /* frndint */
2629 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
2630 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
2631 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
2637 gen_op_fp_arith_STN_ST0[op1](opreg);
2641 gen_op_fmov_FT0_STN(opreg);
2642 gen_op_fp_arith_ST0_FT0[op1]();
2646 case 0x02: /* fcom */
2647 gen_op_fmov_FT0_STN(opreg);
2648 gen_op_fcom_ST0_FT0();
2650 case 0x03: /* fcomp */
2651 gen_op_fmov_FT0_STN(opreg);
2652 gen_op_fcom_ST0_FT0();
2655 case 0x15: /* da/5 */
2657 case 1: /* fucompp */
2658 gen_op_fmov_FT0_STN(1);
2659 gen_op_fucom_ST0_FT0();
2669 case 0: /* feni (287 only, just do nop here) */
2671 case 1: /* fdisi (287 only, just do nop here) */
2676 case 3: /* fninit */
2679 case 4: /* fsetpm (287 only, just do nop here) */
2685 case 0x1d: /* fucomi */
2686 if (s->cc_op != CC_OP_DYNAMIC)
2687 gen_op_set_cc_op(s->cc_op);
2688 gen_op_fmov_FT0_STN(opreg);
2689 gen_op_fucomi_ST0_FT0();
2690 s->cc_op = CC_OP_EFLAGS;
2692 case 0x1e: /* fcomi */
2693 if (s->cc_op != CC_OP_DYNAMIC)
2694 gen_op_set_cc_op(s->cc_op);
2695 gen_op_fmov_FT0_STN(opreg);
2696 gen_op_fcomi_ST0_FT0();
2697 s->cc_op = CC_OP_EFLAGS;
2699 case 0x2a: /* fst sti */
2700 gen_op_fmov_STN_ST0(opreg);
2702 case 0x2b: /* fstp sti */
2703 gen_op_fmov_STN_ST0(opreg);
2706 case 0x2c: /* fucom st(i) */
2707 gen_op_fmov_FT0_STN(opreg);
2708 gen_op_fucom_ST0_FT0();
2710 case 0x2d: /* fucomp st(i) */
2711 gen_op_fmov_FT0_STN(opreg);
2712 gen_op_fucom_ST0_FT0();
2715 case 0x33: /* de/3 */
2717 case 1: /* fcompp */
2718 gen_op_fmov_FT0_STN(1);
2719 gen_op_fcom_ST0_FT0();
2727 case 0x3c: /* df/4 */
2730 gen_op_fnstsw_EAX();
2736 case 0x3d: /* fucomip */
2737 if (s->cc_op != CC_OP_DYNAMIC)
2738 gen_op_set_cc_op(s->cc_op);
2739 gen_op_fmov_FT0_STN(opreg);
2740 gen_op_fucomi_ST0_FT0();
2742 s->cc_op = CC_OP_EFLAGS;
2744 case 0x3e: /* fcomip */
2745 if (s->cc_op != CC_OP_DYNAMIC)
2746 gen_op_set_cc_op(s->cc_op);
2747 gen_op_fmov_FT0_STN(opreg);
2748 gen_op_fcomi_ST0_FT0();
2750 s->cc_op = CC_OP_EFLAGS;
2757 /************************/
2760 case 0xa4: /* movsS */
2765 ot = dflag ? OT_LONG : OT_WORD;
2767 if (prefixes & PREFIX_REPZ) {
2768 gen_string_ds(s, ot, gen_op_movs + 9);
2770 gen_string_ds(s, ot, gen_op_movs);
2774 case 0xaa: /* stosS */
2779 ot = dflag ? OT_LONG : OT_WORD;
2781 if (prefixes & PREFIX_REPZ) {
2782 gen_string_es(s, ot, gen_op_stos + 9);
2784 gen_string_es(s, ot, gen_op_stos);
2787 case 0xac: /* lodsS */
2792 ot = dflag ? OT_LONG : OT_WORD;
2793 if (prefixes & PREFIX_REPZ) {
2794 gen_string_ds(s, ot, gen_op_lods + 9);
2796 gen_string_ds(s, ot, gen_op_lods);
2799 case 0xae: /* scasS */
2804 ot = dflag ? OT_LONG : OT_WORD;
2805 if (prefixes & PREFIX_REPNZ) {
2806 if (s->cc_op != CC_OP_DYNAMIC)
2807 gen_op_set_cc_op(s->cc_op);
2808 gen_string_es(s, ot, gen_op_scas + 9 * 2);
2809 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2810 } else if (prefixes & PREFIX_REPZ) {
2811 if (s->cc_op != CC_OP_DYNAMIC)
2812 gen_op_set_cc_op(s->cc_op);
2813 gen_string_es(s, ot, gen_op_scas + 9);
2814 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2816 gen_string_es(s, ot, gen_op_scas);
2817 s->cc_op = CC_OP_SUBB + ot;
2821 case 0xa6: /* cmpsS */
2826 ot = dflag ? OT_LONG : OT_WORD;
2827 if (prefixes & PREFIX_REPNZ) {
2828 if (s->cc_op != CC_OP_DYNAMIC)
2829 gen_op_set_cc_op(s->cc_op);
2830 gen_string_ds(s, ot, gen_op_cmps + 9 * 2);
2831 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2832 } else if (prefixes & PREFIX_REPZ) {
2833 if (s->cc_op != CC_OP_DYNAMIC)
2834 gen_op_set_cc_op(s->cc_op);
2835 gen_string_ds(s, ot, gen_op_cmps + 9);
2836 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2838 gen_string_ds(s, ot, gen_op_cmps);
2839 s->cc_op = CC_OP_SUBB + ot;
2842 case 0x6c: /* insS */
2844 if (s->cpl > s->iopl || s->vm86) {
2845 /* NOTE: even for (E)CX = 0 the exception is raised */
2846 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
2851 ot = dflag ? OT_LONG : OT_WORD;
2852 if (prefixes & PREFIX_REPZ) {
2853 gen_string_es(s, ot, gen_op_ins + 9);
2855 gen_string_es(s, ot, gen_op_ins);
2859 case 0x6e: /* outsS */
2861 if (s->cpl > s->iopl || s->vm86) {
2862 /* NOTE: even for (E)CX = 0 the exception is raised */
2863 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
2868 ot = dflag ? OT_LONG : OT_WORD;
2869 if (prefixes & PREFIX_REPZ) {
2870 gen_string_ds(s, ot, gen_op_outs + 9);
2872 gen_string_ds(s, ot, gen_op_outs);
2877 /************************/
2881 if (s->cpl > s->iopl || s->vm86) {
2882 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
2887 ot = dflag ? OT_LONG : OT_WORD;
2888 val = ldub(s->pc++);
2889 gen_op_movl_T0_im(val);
2891 gen_op_mov_reg_T1[ot][R_EAX]();
2896 if (s->cpl > s->iopl || s->vm86) {
2897 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
2902 ot = dflag ? OT_LONG : OT_WORD;
2903 val = ldub(s->pc++);
2904 gen_op_movl_T0_im(val);
2905 gen_op_mov_TN_reg[ot][1][R_EAX]();
2911 if (s->cpl > s->iopl || s->vm86) {
2912 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
2917 ot = dflag ? OT_LONG : OT_WORD;
2918 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
2920 gen_op_mov_reg_T1[ot][R_EAX]();
2925 if (s->cpl > s->iopl || s->vm86) {
2926 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
2931 ot = dflag ? OT_LONG : OT_WORD;
2932 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
2933 gen_op_mov_TN_reg[ot][1][R_EAX]();
2938 /************************/
2940 case 0xc2: /* ret im */
2944 gen_stack_update(s, val + (2 << s->dflag));
2946 gen_op_andl_T0_ffff();
2950 case 0xc3: /* ret */
2954 gen_op_andl_T0_ffff();
2958 case 0xca: /* lret im */
2964 gen_op_ld_T0_A0[1 + s->dflag]();
2966 gen_op_andl_T0_ffff();
2967 /* NOTE: keeping EIP updated is not a problem in case of
2971 gen_op_addl_A0_im(2 << s->dflag);
2972 gen_op_ld_T0_A0[1 + s->dflag]();
2973 gen_movl_seg_T0(s, R_CS, pc_start - s->cs_base);
2974 /* add stack offset */
2975 gen_stack_update(s, val + (4 << s->dflag));
2978 case 0xcb: /* lret */
2981 case 0xcf: /* iret */
2982 if (s->vm86 && s->iopl != 3) {
2983 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
2985 if (s->cc_op != CC_OP_DYNAMIC)
2986 gen_op_set_cc_op(s->cc_op);
2987 gen_op_jmp_im(pc_start - s->cs_base);
2988 gen_op_iret_protected(s->dflag);
2989 s->cc_op = CC_OP_EFLAGS;
2993 case 0xe8: /* call im */
2995 unsigned int next_eip;
2996 ot = dflag ? OT_LONG : OT_WORD;
2997 val = insn_get(s, ot);
2998 next_eip = s->pc - s->cs_base;
3002 gen_op_movl_T0_im(next_eip);
3007 case 0x9a: /* lcall im */
3009 unsigned int selector, offset;
3010 /* XXX: not restartable */
3012 ot = dflag ? OT_LONG : OT_WORD;
3013 offset = insn_get(s, ot);
3014 selector = insn_get(s, OT_WORD);
3016 /* push return segment + offset */
3017 gen_op_movl_T0_seg(R_CS);
3019 next_eip = s->pc - s->cs_base;
3020 gen_op_movl_T0_im(next_eip);
3023 /* change cs and pc */
3024 gen_op_movl_T0_im(selector);
3025 gen_movl_seg_T0(s, R_CS, pc_start - s->cs_base);
3026 gen_op_jmp_im((unsigned long)offset);
3030 case 0xe9: /* jmp */
3031 ot = dflag ? OT_LONG : OT_WORD;
3032 val = insn_get(s, ot);
3033 val += s->pc - s->cs_base;
3038 case 0xea: /* ljmp im */
3040 unsigned int selector, offset;
3042 ot = dflag ? OT_LONG : OT_WORD;
3043 offset = insn_get(s, ot);
3044 selector = insn_get(s, OT_WORD);
3046 /* change cs and pc */
3047 gen_op_movl_T0_im(selector);
3049 /* we compute EIP to handle the exception case */
3050 gen_op_jmp_im(pc_start - s->cs_base);
3051 gen_op_movl_T1_im(offset);
3052 gen_op_ljmp_T0_T1();
3054 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3055 gen_op_jmp_im((unsigned long)offset);
3060 case 0xeb: /* jmp Jb */
3061 val = (int8_t)insn_get(s, OT_BYTE);
3062 val += s->pc - s->cs_base;
3067 case 0x70 ... 0x7f: /* jcc Jb */
3068 val = (int8_t)insn_get(s, OT_BYTE);
3070 case 0x180 ... 0x18f: /* jcc Jv */
3072 val = insn_get(s, OT_LONG);
3074 val = (int16_t)insn_get(s, OT_WORD);
3077 next_eip = s->pc - s->cs_base;
3081 gen_jcc(s, b, val, next_eip);
3084 case 0x190 ... 0x19f: /* setcc Gv */
3085 modrm = ldub(s->pc++);
3087 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
3089 case 0x140 ... 0x14f: /* cmov Gv, Ev */
3090 ot = dflag ? OT_LONG : OT_WORD;
3091 modrm = ldub(s->pc++);
3092 reg = (modrm >> 3) & 7;
3093 mod = (modrm >> 6) & 3;
3096 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3097 gen_op_ld_T1_A0[ot]();
3100 gen_op_mov_TN_reg[ot][1][rm]();
3102 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
3105 /************************/
3107 case 0x9c: /* pushf */
3108 if (s->vm86 && s->iopl != 3) {
3109 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3111 if (s->cc_op != CC_OP_DYNAMIC)
3112 gen_op_set_cc_op(s->cc_op);
3113 gen_op_movl_T0_eflags();
3117 case 0x9d: /* popf */
3118 if (s->vm86 && s->iopl != 3) {
3119 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3124 gen_op_movl_eflags_T0_cpl0();
3126 gen_op_movw_eflags_T0_cpl0();
3130 gen_op_movl_eflags_T0();
3132 gen_op_movw_eflags_T0();
3136 s->cc_op = CC_OP_EFLAGS;
3137 s->is_jmp = 2; /* abort translation because TF flag may change */
3140 case 0x9e: /* sahf */
3141 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
3142 if (s->cc_op != CC_OP_DYNAMIC)
3143 gen_op_set_cc_op(s->cc_op);
3144 gen_op_movb_eflags_T0();
3145 s->cc_op = CC_OP_EFLAGS;
3147 case 0x9f: /* lahf */
3148 if (s->cc_op != CC_OP_DYNAMIC)
3149 gen_op_set_cc_op(s->cc_op);
3150 gen_op_movl_T0_eflags();
3151 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
3153 case 0xf5: /* cmc */
3154 if (s->cc_op != CC_OP_DYNAMIC)
3155 gen_op_set_cc_op(s->cc_op);
3157 s->cc_op = CC_OP_EFLAGS;
3159 case 0xf8: /* clc */
3160 if (s->cc_op != CC_OP_DYNAMIC)
3161 gen_op_set_cc_op(s->cc_op);
3163 s->cc_op = CC_OP_EFLAGS;
3165 case 0xf9: /* stc */
3166 if (s->cc_op != CC_OP_DYNAMIC)
3167 gen_op_set_cc_op(s->cc_op);
3169 s->cc_op = CC_OP_EFLAGS;
3171 case 0xfc: /* cld */
3174 case 0xfd: /* std */
3178 /************************/
3179 /* bit operations */
3180 case 0x1ba: /* bt/bts/btr/btc Gv, im */
3181 ot = dflag ? OT_LONG : OT_WORD;
3182 modrm = ldub(s->pc++);
3183 op = (modrm >> 3) & 7;
3184 mod = (modrm >> 6) & 3;
3187 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3188 gen_op_ld_T0_A0[ot]();
3190 gen_op_mov_TN_reg[ot][0][rm]();
3193 val = ldub(s->pc++);
3194 gen_op_movl_T1_im(val);
3198 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
3199 s->cc_op = CC_OP_SARB + ot;
3202 gen_op_st_T0_A0[ot]();
3204 gen_op_mov_reg_T0[ot][rm]();
3207 case 0x1a3: /* bt Gv, Ev */
3210 case 0x1ab: /* bts */
3213 case 0x1b3: /* btr */
3216 case 0x1bb: /* btc */
3219 ot = dflag ? OT_LONG : OT_WORD;
3220 modrm = ldub(s->pc++);
3221 reg = (modrm >> 3) & 7;
3222 mod = (modrm >> 6) & 3;
3224 gen_op_mov_TN_reg[OT_LONG][1][reg]();
3226 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3227 /* specific case: we need to add a displacement */
3229 gen_op_add_bitw_A0_T1();
3231 gen_op_add_bitl_A0_T1();
3232 gen_op_ld_T0_A0[ot]();
3234 gen_op_mov_TN_reg[ot][0][rm]();
3236 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
3237 s->cc_op = CC_OP_SARB + ot;
3240 gen_op_st_T0_A0[ot]();
3242 gen_op_mov_reg_T0[ot][rm]();
3245 case 0x1bc: /* bsf */
3246 case 0x1bd: /* bsr */
3247 ot = dflag ? OT_LONG : OT_WORD;
3248 modrm = ldub(s->pc++);
3249 reg = (modrm >> 3) & 7;
3250 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3251 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
3252 /* NOTE: we always write back the result. Intel doc says it is
3253 undefined if T0 == 0 */
3254 gen_op_mov_reg_T0[ot][reg]();
3255 s->cc_op = CC_OP_LOGICB + ot;
3257 /************************/
3259 case 0x27: /* daa */
3260 if (s->cc_op != CC_OP_DYNAMIC)
3261 gen_op_set_cc_op(s->cc_op);
3263 s->cc_op = CC_OP_EFLAGS;
3265 case 0x2f: /* das */
3266 if (s->cc_op != CC_OP_DYNAMIC)
3267 gen_op_set_cc_op(s->cc_op);
3269 s->cc_op = CC_OP_EFLAGS;
3271 case 0x37: /* aaa */
3272 if (s->cc_op != CC_OP_DYNAMIC)
3273 gen_op_set_cc_op(s->cc_op);
3275 s->cc_op = CC_OP_EFLAGS;
3277 case 0x3f: /* aas */
3278 if (s->cc_op != CC_OP_DYNAMIC)
3279 gen_op_set_cc_op(s->cc_op);
3281 s->cc_op = CC_OP_EFLAGS;
3283 case 0xd4: /* aam */
3284 val = ldub(s->pc++);
3286 s->cc_op = CC_OP_LOGICB;
3288 case 0xd5: /* aad */
3289 val = ldub(s->pc++);
3291 s->cc_op = CC_OP_LOGICB;
3293 /************************/
3295 case 0x90: /* nop */
3297 case 0x9b: /* fwait */
3299 case 0xcc: /* int3 */
3300 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
3302 case 0xcd: /* int N */
3303 val = ldub(s->pc++);
3304 /* XXX: add error code for vm86 GPF */
3306 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
3308 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3310 case 0xce: /* into */
3311 if (s->cc_op != CC_OP_DYNAMIC)
3312 gen_op_set_cc_op(s->cc_op);
3313 gen_op_into(s->pc - s->cs_base);
3315 case 0xfa: /* cli */
3317 if (s->cpl <= s->iopl) {
3320 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3326 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3330 case 0xfb: /* sti */
3332 if (s->cpl <= s->iopl) {
3334 s->is_jmp = 2; /* give a chance to handle pending irqs */
3336 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3341 s->is_jmp = 2; /* give a chance to handle pending irqs */
3343 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3347 case 0x62: /* bound */
3348 ot = dflag ? OT_LONG : OT_WORD;
3349 modrm = ldub(s->pc++);
3350 reg = (modrm >> 3) & 7;
3351 mod = (modrm >> 6) & 3;
3354 gen_op_mov_reg_T0[ot][reg]();
3355 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3357 gen_op_boundw(pc_start - s->cs_base);
3359 gen_op_boundl(pc_start - s->cs_base);
3361 case 0x1c8 ... 0x1cf: /* bswap reg */
3363 gen_op_mov_TN_reg[OT_LONG][0][reg]();
3365 gen_op_mov_reg_T0[OT_LONG][reg]();
3367 case 0xd6: /* salc */
3368 if (s->cc_op != CC_OP_DYNAMIC)
3369 gen_op_set_cc_op(s->cc_op);
3372 case 0xe0: /* loopnz */
3373 case 0xe1: /* loopz */
3374 if (s->cc_op != CC_OP_DYNAMIC)
3375 gen_op_set_cc_op(s->cc_op);
3377 case 0xe2: /* loop */
3378 case 0xe3: /* jecxz */
3379 val = (int8_t)insn_get(s, OT_BYTE);
3380 next_eip = s->pc - s->cs_base;
3384 gen_op_loop[s->aflag][b & 3](val, next_eip);
3387 case 0x131: /* rdtsc */
3390 case 0x1a2: /* cpuid */
3393 case 0xf4: /* hlt */
3395 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3397 if (s->cc_op != CC_OP_DYNAMIC)
3398 gen_op_set_cc_op(s->cc_op);
3399 gen_op_jmp_im(s->pc - s->cs_base);
3405 modrm = ldub(s->pc++);
3406 mod = (modrm >> 6) & 3;
3407 op = (modrm >> 3) & 7;
3410 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
3414 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3418 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3420 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3421 gen_op_jmp_im(pc_start - s->cs_base);
3426 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
3430 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3434 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3436 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3437 gen_op_jmp_im(pc_start - s->cs_base);
3448 modrm = ldub(s->pc++);
3449 mod = (modrm >> 6) & 3;
3450 op = (modrm >> 3) & 7;
3456 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3458 gen_op_movl_T0_env(offsetof(CPUX86State,gdt.limit));
3460 gen_op_movl_T0_env(offsetof(CPUX86State,idt.limit));
3462 gen_op_addl_A0_im(2);
3464 gen_op_movl_T0_env(offsetof(CPUX86State,gdt.base));
3466 gen_op_movl_T0_env(offsetof(CPUX86State,idt.base));
3468 gen_op_andl_T0_im(0xffffff);
3476 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3478 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3479 gen_op_lduw_T1_A0();
3480 gen_op_addl_A0_im(2);
3483 gen_op_andl_T0_im(0xffffff);
3485 gen_op_movl_env_T0(offsetof(CPUX86State,gdt.base));
3486 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
3488 gen_op_movl_env_T0(offsetof(CPUX86State,idt.base));
3489 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
3494 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
3495 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
3499 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3501 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3505 case 7: /* invlpg */
3507 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3511 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3519 case 0x102: /* lar */
3520 case 0x103: /* lsl */
3523 ot = dflag ? OT_LONG : OT_WORD;
3524 modrm = ldub(s->pc++);
3525 reg = (modrm >> 3) & 7;
3526 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3527 gen_op_mov_TN_reg[ot][1][reg]();
3528 if (s->cc_op != CC_OP_DYNAMIC)
3529 gen_op_set_cc_op(s->cc_op);
3534 s->cc_op = CC_OP_EFLAGS;
3535 gen_op_mov_reg_T1[ot][reg]();
3538 modrm = ldub(s->pc++);
3539 mod = (modrm >> 6) & 3;
3540 op = (modrm >> 3) & 7;
3542 case 0: /* prefetchnta */
3543 case 1: /* prefetchnt0 */
3544 case 2: /* prefetchnt0 */
3545 case 3: /* prefetchnt0 */
3548 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3549 /* nothing more to do */
3555 case 0x120: /* mov reg, crN */
3556 case 0x122: /* mov crN, reg */
3558 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3560 modrm = ldub(s->pc++);
3561 if ((modrm & 0xc0) != 0xc0)
3564 reg = (modrm >> 3) & 7;
3571 gen_op_mov_TN_reg[OT_LONG][0][rm]();
3572 gen_op_movl_crN_T0(reg);
3575 gen_op_movl_T0_env(offsetof(CPUX86State,cr[reg]));
3576 gen_op_mov_reg_T0[OT_LONG][rm]();
3584 case 0x121: /* mov reg, drN */
3585 case 0x123: /* mov drN, reg */
3587 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3589 modrm = ldub(s->pc++);
3590 if ((modrm & 0xc0) != 0xc0)
3593 reg = (modrm >> 3) & 7;
3594 /* XXX: do it dynamically with CR4.DE bit */
3595 if (reg == 4 || reg == 5)
3598 gen_op_mov_TN_reg[OT_LONG][0][rm]();
3599 gen_op_movl_drN_T0(reg);
3602 gen_op_movl_T0_env(offsetof(CPUX86State,dr[reg]));
3603 gen_op_mov_reg_T0[OT_LONG][rm]();
3607 case 0x106: /* clts */
3609 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
3617 /* lock generation */
3618 if (s->prefix & PREFIX_LOCK)
3622 /* XXX: ensure that no lock was generated */
3626 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
3627 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
3629 /* flags read by an operation */
3630 static uint16_t opc_read_flags[NB_OPS] = {
3631 [INDEX_op_aas] = CC_A,
3632 [INDEX_op_aaa] = CC_A,
3633 [INDEX_op_das] = CC_A | CC_C,
3634 [INDEX_op_daa] = CC_A | CC_C,
3636 [INDEX_op_adcb_T0_T1_cc] = CC_C,
3637 [INDEX_op_adcw_T0_T1_cc] = CC_C,
3638 [INDEX_op_adcl_T0_T1_cc] = CC_C,
3639 [INDEX_op_sbbb_T0_T1_cc] = CC_C,
3640 [INDEX_op_sbbw_T0_T1_cc] = CC_C,
3641 [INDEX_op_sbbl_T0_T1_cc] = CC_C,
3643 /* subtle: due to the incl/decl implementation, C is used */
3644 [INDEX_op_update_inc_cc] = CC_C,
3646 [INDEX_op_into] = CC_O,
3648 [INDEX_op_jb_subb] = CC_C,
3649 [INDEX_op_jb_subw] = CC_C,
3650 [INDEX_op_jb_subl] = CC_C,
3652 [INDEX_op_jz_subb] = CC_Z,
3653 [INDEX_op_jz_subw] = CC_Z,
3654 [INDEX_op_jz_subl] = CC_Z,
3656 [INDEX_op_jbe_subb] = CC_Z | CC_C,
3657 [INDEX_op_jbe_subw] = CC_Z | CC_C,
3658 [INDEX_op_jbe_subl] = CC_Z | CC_C,
3660 [INDEX_op_js_subb] = CC_S,
3661 [INDEX_op_js_subw] = CC_S,
3662 [INDEX_op_js_subl] = CC_S,
3664 [INDEX_op_jl_subb] = CC_O | CC_S,
3665 [INDEX_op_jl_subw] = CC_O | CC_S,
3666 [INDEX_op_jl_subl] = CC_O | CC_S,
3668 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
3669 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
3670 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
3672 [INDEX_op_loopnzw] = CC_Z,
3673 [INDEX_op_loopnzl] = CC_Z,
3674 [INDEX_op_loopzw] = CC_Z,
3675 [INDEX_op_loopzl] = CC_Z,
3677 [INDEX_op_seto_T0_cc] = CC_O,
3678 [INDEX_op_setb_T0_cc] = CC_C,
3679 [INDEX_op_setz_T0_cc] = CC_Z,
3680 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
3681 [INDEX_op_sets_T0_cc] = CC_S,
3682 [INDEX_op_setp_T0_cc] = CC_P,
3683 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
3684 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
3686 [INDEX_op_setb_T0_subb] = CC_C,
3687 [INDEX_op_setb_T0_subw] = CC_C,
3688 [INDEX_op_setb_T0_subl] = CC_C,
3690 [INDEX_op_setz_T0_subb] = CC_Z,
3691 [INDEX_op_setz_T0_subw] = CC_Z,
3692 [INDEX_op_setz_T0_subl] = CC_Z,
3694 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
3695 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
3696 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
3698 [INDEX_op_sets_T0_subb] = CC_S,
3699 [INDEX_op_sets_T0_subw] = CC_S,
3700 [INDEX_op_sets_T0_subl] = CC_S,
3702 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
3703 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
3704 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
3706 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
3707 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
3708 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
3710 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
3711 [INDEX_op_cmc] = CC_C,
3712 [INDEX_op_salc] = CC_C,
3714 [INDEX_op_rclb_T0_T1_cc] = CC_C,
3715 [INDEX_op_rclw_T0_T1_cc] = CC_C,
3716 [INDEX_op_rcll_T0_T1_cc] = CC_C,
3717 [INDEX_op_rcrb_T0_T1_cc] = CC_C,
3718 [INDEX_op_rcrw_T0_T1_cc] = CC_C,
3719 [INDEX_op_rcrl_T0_T1_cc] = CC_C,
3722 /* flags written by an operation */
3723 static uint16_t opc_write_flags[NB_OPS] = {
3724 [INDEX_op_update2_cc] = CC_OSZAPC,
3725 [INDEX_op_update1_cc] = CC_OSZAPC,
3726 [INDEX_op_adcb_T0_T1_cc] = CC_OSZAPC,
3727 [INDEX_op_adcw_T0_T1_cc] = CC_OSZAPC,
3728 [INDEX_op_adcl_T0_T1_cc] = CC_OSZAPC,
3729 [INDEX_op_sbbb_T0_T1_cc] = CC_OSZAPC,
3730 [INDEX_op_sbbw_T0_T1_cc] = CC_OSZAPC,
3731 [INDEX_op_sbbl_T0_T1_cc] = CC_OSZAPC,
3732 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
3733 [INDEX_op_update_neg_cc] = CC_OSZAPC,
3734 /* subtle: due to the incl/decl implementation, C is used */
3735 [INDEX_op_update_inc_cc] = CC_OSZAPC,
3736 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
3738 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
3739 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
3740 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
3741 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
3742 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
3743 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
3744 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
3745 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
3748 [INDEX_op_aam] = CC_OSZAPC,
3749 [INDEX_op_aad] = CC_OSZAPC,
3750 [INDEX_op_aas] = CC_OSZAPC,
3751 [INDEX_op_aaa] = CC_OSZAPC,
3752 [INDEX_op_das] = CC_OSZAPC,
3753 [INDEX_op_daa] = CC_OSZAPC,
3755 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
3756 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
3757 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
3758 [INDEX_op_clc] = CC_C,
3759 [INDEX_op_stc] = CC_C,
3760 [INDEX_op_cmc] = CC_C,
3762 [INDEX_op_rolb_T0_T1_cc] = CC_O | CC_C,
3763 [INDEX_op_rolw_T0_T1_cc] = CC_O | CC_C,
3764 [INDEX_op_roll_T0_T1_cc] = CC_O | CC_C,
3765 [INDEX_op_rorb_T0_T1_cc] = CC_O | CC_C,
3766 [INDEX_op_rorw_T0_T1_cc] = CC_O | CC_C,
3767 [INDEX_op_rorl_T0_T1_cc] = CC_O | CC_C,
3769 [INDEX_op_rclb_T0_T1_cc] = CC_O | CC_C,
3770 [INDEX_op_rclw_T0_T1_cc] = CC_O | CC_C,
3771 [INDEX_op_rcll_T0_T1_cc] = CC_O | CC_C,
3772 [INDEX_op_rcrb_T0_T1_cc] = CC_O | CC_C,
3773 [INDEX_op_rcrw_T0_T1_cc] = CC_O | CC_C,
3774 [INDEX_op_rcrl_T0_T1_cc] = CC_O | CC_C,
3776 [INDEX_op_shlb_T0_T1_cc] = CC_OSZAPC,
3777 [INDEX_op_shlw_T0_T1_cc] = CC_OSZAPC,
3778 [INDEX_op_shll_T0_T1_cc] = CC_OSZAPC,
3780 [INDEX_op_shrb_T0_T1_cc] = CC_OSZAPC,
3781 [INDEX_op_shrw_T0_T1_cc] = CC_OSZAPC,
3782 [INDEX_op_shrl_T0_T1_cc] = CC_OSZAPC,
3784 [INDEX_op_sarb_T0_T1_cc] = CC_OSZAPC,
3785 [INDEX_op_sarw_T0_T1_cc] = CC_OSZAPC,
3786 [INDEX_op_sarl_T0_T1_cc] = CC_OSZAPC,
3788 [INDEX_op_shldw_T0_T1_ECX_cc] = CC_OSZAPC,
3789 [INDEX_op_shldl_T0_T1_ECX_cc] = CC_OSZAPC,
3790 [INDEX_op_shldw_T0_T1_im_cc] = CC_OSZAPC,
3791 [INDEX_op_shldl_T0_T1_im_cc] = CC_OSZAPC,
3793 [INDEX_op_shrdw_T0_T1_ECX_cc] = CC_OSZAPC,
3794 [INDEX_op_shrdl_T0_T1_ECX_cc] = CC_OSZAPC,
3795 [INDEX_op_shrdw_T0_T1_im_cc] = CC_OSZAPC,
3796 [INDEX_op_shrdl_T0_T1_im_cc] = CC_OSZAPC,
3798 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
3799 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
3800 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
3801 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
3802 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
3803 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
3804 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
3805 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
3807 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
3808 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
3809 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
3810 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
3813 #define STRINGOP(x) \
3814 [INDEX_op_ ## x ## b_fast] = CC_OSZAPC, \
3815 [INDEX_op_ ## x ## w_fast] = CC_OSZAPC, \
3816 [INDEX_op_ ## x ## l_fast] = CC_OSZAPC, \
3817 [INDEX_op_ ## x ## b_a32] = CC_OSZAPC, \
3818 [INDEX_op_ ## x ## w_a32] = CC_OSZAPC, \
3819 [INDEX_op_ ## x ## l_a32] = CC_OSZAPC, \
3820 [INDEX_op_ ## x ## b_a16] = CC_OSZAPC, \
3821 [INDEX_op_ ## x ## w_a16] = CC_OSZAPC, \
3822 [INDEX_op_ ## x ## l_a16] = CC_OSZAPC,
3826 STRINGOP(repnz_scas)
3829 STRINGOP(repnz_cmps)
3831 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
3832 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
3833 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
3835 [INDEX_op_cmpxchg8b] = CC_Z,
3836 [INDEX_op_lar] = CC_Z,
3837 [INDEX_op_lsl] = CC_Z,
3838 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
3839 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
3842 /* simpler form of an operation if no flags need to be generated */
3843 static uint16_t opc_simpler[NB_OPS] = {
3844 [INDEX_op_update2_cc] = INDEX_op_nop,
3845 [INDEX_op_update1_cc] = INDEX_op_nop,
3846 [INDEX_op_update_neg_cc] = INDEX_op_nop,
3847 [INDEX_op_update_inc_cc] = INDEX_op_nop,
3849 [INDEX_op_rolb_T0_T1_cc] = INDEX_op_rolb_T0_T1,
3850 [INDEX_op_rolw_T0_T1_cc] = INDEX_op_rolw_T0_T1,
3851 [INDEX_op_roll_T0_T1_cc] = INDEX_op_roll_T0_T1,
3853 [INDEX_op_rorb_T0_T1_cc] = INDEX_op_rorb_T0_T1,
3854 [INDEX_op_rorw_T0_T1_cc] = INDEX_op_rorw_T0_T1,
3855 [INDEX_op_rorl_T0_T1_cc] = INDEX_op_rorl_T0_T1,
3857 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
3858 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
3859 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
3861 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
3862 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
3863 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
3865 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
3866 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
3867 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
3870 static void optimize_flags_init(void)
3873 /* put default values in arrays */
3874 for(i = 0; i < NB_OPS; i++) {
3875 if (opc_simpler[i] == 0)
3880 /* CPU flags computation optimization: we move backward thru the
3881 generated code to see which flags are needed. The operation is
3882 modified if suitable */
3883 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
3886 int live_flags, write_flags, op;
3888 opc_ptr = opc_buf + opc_buf_len;
3889 /* live_flags contains the flags needed by the next instructions
3890 in the code. At the end of the bloc, we consider that all the
3892 live_flags = CC_OSZAPC;
3893 while (opc_ptr > opc_buf) {
3895 /* if none of the flags written by the instruction is used,
3896 then we can try to find a simpler instruction */
3897 write_flags = opc_write_flags[op];
3898 if ((live_flags & write_flags) == 0) {
3899 *opc_ptr = opc_simpler[op];
3901 /* compute the live flags before the instruction */
3902 live_flags &= ~write_flags;
3903 live_flags |= opc_read_flags[op];
3907 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
3908 basic block 'tb'. If search_pc is TRUE, also generate PC
3909 information for each intermediate instruction. */
3910 static inline int gen_intermediate_code_internal(TranslationBlock *tb, int search_pc)
3912 DisasContext dc1, *dc = &dc1;
3914 uint16_t *gen_opc_end;
3920 /* generate intermediate code */
3921 pc_start = (uint8_t *)tb->pc;
3922 cs_base = (uint8_t *)tb->cs_base;
3925 dc->code32 = (flags >> GEN_FLAG_CODE32_SHIFT) & 1;
3926 dc->ss32 = (flags >> GEN_FLAG_SS32_SHIFT) & 1;
3927 dc->addseg = (flags >> GEN_FLAG_ADDSEG_SHIFT) & 1;
3928 dc->f_st = (flags >> GEN_FLAG_ST_SHIFT) & 7;
3929 dc->vm86 = (flags >> GEN_FLAG_VM_SHIFT) & 1;
3930 dc->cpl = (flags >> GEN_FLAG_CPL_SHIFT) & 3;
3931 dc->iopl = (flags >> GEN_FLAG_IOPL_SHIFT) & 3;
3932 dc->tf = (flags >> GEN_FLAG_TF_SHIFT) & 1;
3933 dc->cc_op = CC_OP_DYNAMIC;
3934 dc->cs_base = cs_base;
3936 dc->popl_esp_hack = 0;
3938 gen_opc_ptr = gen_opc_buf;
3939 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
3940 gen_opparam_ptr = gen_opparam_buf;
3942 dc->is_jmp = DISAS_NEXT;
3947 j = gen_opc_ptr - gen_opc_buf;
3951 gen_opc_instr_start[lj++] = 0;
3952 gen_opc_pc[lj] = (uint32_t)pc_ptr;
3953 gen_opc_cc_op[lj] = dc->cc_op;
3954 gen_opc_instr_start[lj] = 1;
3957 ret = disas_insn(dc, pc_ptr);
3959 /* we trigger an illegal instruction operation only if it
3960 is the first instruction. Otherwise, we simply stop
3961 generating the code just before it */
3962 if (pc_ptr == pc_start)
3967 pc_ptr = (void *)ret;
3968 /* if single step mode, we generate only one instruction and
3969 generate an exception */
3972 } while (!dc->is_jmp && gen_opc_ptr < gen_opc_end &&
3973 (pc_ptr - pc_start) < (TARGET_PAGE_SIZE - 32));
3974 /* we must store the eflags state if it is not already done */
3975 if (dc->is_jmp != DISAS_TB_JUMP) {
3976 if (dc->cc_op != CC_OP_DYNAMIC)
3977 gen_op_set_cc_op(dc->cc_op);
3978 if (dc->is_jmp != DISAS_JUMP) {
3979 /* we add an additionnal jmp to update the simulated PC */
3980 gen_op_jmp_im(ret - (unsigned long)dc->cs_base);
3984 gen_op_raise_exception(EXCP01_SSTP);
3986 if (dc->is_jmp != 3) {
3987 /* indicate that the hash table must be used to find the next TB */
3990 *gen_opc_ptr = INDEX_op_end;
3994 fprintf(logfile, "----------------\n");
3995 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
3996 disas(logfile, pc_start, pc_ptr - pc_start, 0, !dc->code32);
3997 fprintf(logfile, "\n");
3999 fprintf(logfile, "OP:\n");
4000 dump_ops(gen_opc_buf, gen_opparam_buf);
4001 fprintf(logfile, "\n");
4005 /* optimize flag computations */
4006 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
4010 fprintf(logfile, "AFTER FLAGS OPT:\n");
4011 dump_ops(gen_opc_buf, gen_opparam_buf);
4012 fprintf(logfile, "\n");
4016 tb->size = pc_ptr - pc_start;
4020 int gen_intermediate_code(TranslationBlock *tb)
4022 return gen_intermediate_code_internal(tb, 0);
4025 int gen_intermediate_code_pc(TranslationBlock *tb)
4027 return gen_intermediate_code_internal(tb, 1);
4030 CPUX86State *cpu_x86_init(void)
4038 env = malloc(sizeof(CPUX86State));
4041 memset(env, 0, sizeof(CPUX86State));
4042 /* basic FPU init */
4043 for(i = 0;i < 8; i++)
4046 /* flags setup : we activate the IRQs by default as in user mode */
4047 env->eflags = 0x2 | IF_MASK;
4049 /* init various static tables */
4052 optimize_flags_init();
4057 void cpu_x86_close(CPUX86State *env)
4062 /***********************************************************/
4064 /* XXX: add PGE support */
4066 /* called when cr3 or PG bit are modified */
4067 static int last_pg_state = -1;
4070 uint8_t *phys_ram_base;
4072 void cpu_x86_update_cr0(CPUX86State *env)
4078 printf("CR0 update: CR0=0x%08x\n", env->cr[0]);
4080 pg_state = env->cr[0] & CR0_PG_MASK;
4081 if (pg_state != last_pg_state) {
4083 /* we map the physical memory at address 0 */
4085 map_addr = mmap((void *)0, phys_ram_size, PROT_WRITE | PROT_READ,
4086 MAP_SHARED | MAP_FIXED, phys_ram_fd, 0);
4087 if (map_addr == MAP_FAILED) {
4089 "Could not map physical memory at host address 0x%08x\n",
4093 page_set_flags(0, phys_ram_size,
4094 PAGE_VALID | PAGE_READ | PAGE_WRITE | PAGE_EXEC);
4096 /* we unmap the physical memory */
4097 munmap((void *)0, phys_ram_size);
4098 page_set_flags(0, phys_ram_size, 0);
4100 last_pg_state = pg_state;
4104 void cpu_x86_update_cr3(CPUX86State *env)
4106 if (env->cr[0] & CR0_PG_MASK) {
4108 printf("CR3 update: CR3=%08x\n", env->cr[3]);
4114 void cpu_x86_init_mmu(CPUX86State *env)
4117 cpu_x86_update_cr0(env);
4120 /* XXX: also flush 4MB pages */
4121 void cpu_x86_flush_tlb(CPUX86State *env, uint32_t addr)
4124 unsigned long virt_addr;
4126 flags = page_get_flags(addr);
4127 if (flags & PAGE_VALID) {
4128 virt_addr = addr & ~0xfff;
4129 munmap((void *)virt_addr, 4096);
4130 page_set_flags(virt_addr, virt_addr + 4096, 0);
4135 -1 = cannot handle fault
4136 0 = nothing more to do
4137 1 = generate PF fault
4139 int cpu_x86_handle_mmu_fault(CPUX86State *env, uint32_t addr, int is_write)
4141 uint8_t *pde_ptr, *pte_ptr;
4142 uint32_t pde, pte, virt_addr;
4143 int cpl, error_code, is_dirty, is_user, prot, page_size;
4146 cpl = env->segs[R_CS].selector & 3;
4147 is_user = (cpl == 3);
4150 printf("MMU fault: addr=0x%08x w=%d u=%d eip=%08x\n",
4151 addr, is_write, is_user, env->eip);
4154 if (env->user_mode_only) {
4155 /* user mode only emulation */
4160 if (!(env->cr[0] & CR0_PG_MASK))
4163 /* page directory entry */
4164 pde_ptr = phys_ram_base + ((env->cr[3] & ~0xfff) + ((addr >> 20) & ~3));
4166 if (!(pde & PG_PRESENT_MASK)) {
4171 if (!(pde & PG_USER_MASK))
4172 goto do_fault_protect;
4173 if (is_write && !(pde & PG_RW_MASK))
4174 goto do_fault_protect;
4176 if ((env->cr[0] & CR0_WP_MASK) && (pde & PG_USER_MASK) &&
4177 is_write && !(pde & PG_RW_MASK))
4178 goto do_fault_protect;
4180 /* if PSE bit is set, then we use a 4MB page */
4181 if ((pde & PG_PSE_MASK) && (env->cr[4] & CR4_PSE_MASK)) {
4182 is_dirty = is_write && !(pde & PG_DIRTY_MASK);
4183 if (!(pde & PG_ACCESSED_MASK)) {
4184 pde |= PG_ACCESSED_MASK;
4186 pde |= PG_DIRTY_MASK;
4190 pte = pde & ~0x003ff000; /* align to 4MB */
4191 page_size = 4096 * 1024;
4192 virt_addr = addr & ~0x003fffff;
4194 if (!(pde & PG_ACCESSED_MASK)) {
4195 pde |= PG_ACCESSED_MASK;
4199 /* page directory entry */
4200 pte_ptr = phys_ram_base + ((pde & ~0xfff) + ((addr >> 10) & 0xffc));
4202 if (!(pte & PG_PRESENT_MASK)) {
4207 if (!(pte & PG_USER_MASK))
4208 goto do_fault_protect;
4209 if (is_write && !(pte & PG_RW_MASK))
4210 goto do_fault_protect;
4212 if ((env->cr[0] & CR0_WP_MASK) && (pte & PG_USER_MASK) &&
4213 is_write && !(pte & PG_RW_MASK))
4214 goto do_fault_protect;
4216 is_dirty = is_write && !(pte & PG_DIRTY_MASK);
4217 if (!(pte & PG_ACCESSED_MASK) || is_dirty) {
4218 pte |= PG_ACCESSED_MASK;
4220 pte |= PG_DIRTY_MASK;
4224 virt_addr = addr & ~0xfff;
4226 /* the page can be put in the TLB */
4229 if (pte & PG_RW_MASK)
4232 if (!(env->cr[0] & CR0_WP_MASK) || !(pte & PG_USER_MASK) ||
4236 map_addr = mmap((void *)virt_addr, page_size, prot,
4237 MAP_SHARED | MAP_FIXED, phys_ram_fd, pte & ~0xfff);
4238 if (map_addr == MAP_FAILED) {
4240 "mmap failed when mapped physical address 0x%08x to virtual address 0x%08x\n",
4241 pte & ~0xfff, virt_addr);
4244 page_set_flags(virt_addr, virt_addr + page_size,
4245 PAGE_VALID | PAGE_EXEC | prot);
4247 printf("mmaping 0x%08x to virt 0x%08x pse=%d\n",
4248 pte & ~0xfff, virt_addr, (page_size != 4096));
4252 error_code = PG_ERROR_P_MASK;
4255 env->error_code = (is_write << PG_ERROR_W_BIT) | error_code;
4257 env->error_code |= PG_ERROR_U_MASK;
4261 /***********************************************************/
4264 static const char *cc_op_str[] = {
4297 void cpu_x86_dump_state(CPUX86State *env, FILE *f, int flags)
4300 char cc_op_name[32];
4302 eflags = env->eflags;
4303 fprintf(f, "EAX=%08x EBX=%08x ECX=%08x EDX=%08x\n"
4304 "ESI=%08x EDI=%08x EBP=%08x ESP=%08x\n"
4305 "EIP=%08x EFL=%08x [%c%c%c%c%c%c%c]\n",
4306 env->regs[R_EAX], env->regs[R_EBX], env->regs[R_ECX], env->regs[R_EDX],
4307 env->regs[R_ESI], env->regs[R_EDI], env->regs[R_EBP], env->regs[R_ESP],
4309 eflags & DF_MASK ? 'D' : '-',
4310 eflags & CC_O ? 'O' : '-',
4311 eflags & CC_S ? 'S' : '-',
4312 eflags & CC_Z ? 'Z' : '-',
4313 eflags & CC_A ? 'A' : '-',
4314 eflags & CC_P ? 'P' : '-',
4315 eflags & CC_C ? 'C' : '-');
4316 fprintf(f, "CS=%04x SS=%04x DS=%04x ES=%04x FS=%04x GS=%04x\n",
4317 env->segs[R_CS].selector,
4318 env->segs[R_SS].selector,
4319 env->segs[R_DS].selector,
4320 env->segs[R_ES].selector,
4321 env->segs[R_FS].selector,
4322 env->segs[R_GS].selector);
4323 if (flags & X86_DUMP_CCOP) {
4324 if ((unsigned)env->cc_op < CC_OP_NB)
4325 strcpy(cc_op_name, cc_op_str[env->cc_op]);
4327 snprintf(cc_op_name, sizeof(cc_op_name), "[%d]", env->cc_op);
4328 fprintf(f, "CCS=%08x CCD=%08x CCO=%-8s\n",
4329 env->cc_src, env->cc_dst, cc_op_name);
4331 if (flags & X86_DUMP_FPU) {
4332 fprintf(f, "ST0=%f ST1=%f ST2=%f ST3=%f\n",
4333 (double)env->fpregs[0],
4334 (double)env->fpregs[1],
4335 (double)env->fpregs[2],
4336 (double)env->fpregs[3]);
4337 fprintf(f, "ST4=%f ST5=%f ST6=%f ST7=%f\n",
4338 (double)env->fpregs[4],
4339 (double)env->fpregs[5],
4340 (double)env->fpregs[7],
4341 (double)env->fpregs[8]);