4 * Copyright (c) 2005-2007 CodeSourcery
5 * Written by Paul Brook
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free Software
19 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
31 #include "m68k-qreg.h"
33 //#define DEBUG_DISPATCH 1
35 static inline void qemu_assert(int cond, const char *msg)
38 fprintf (stderr, "badness: %s\n", msg);
43 /* internal defines */
44 typedef struct DisasContext {
46 target_ulong insn_pc; /* Start of the current instruction. */
52 struct TranslationBlock *tb;
53 int singlestep_enabled;
57 #define DISAS_JUMP_NEXT 4
59 #if defined(CONFIG_USER_ONLY)
62 #define IS_USER(s) s->user
65 /* XXX: move that elsewhere */
66 /* ??? Fix exceptions. */
67 static void *gen_throws_exception;
68 #define gen_last_qop NULL
70 static uint16_t *gen_opc_ptr;
71 static uint32_t *gen_opparam_ptr;
76 #define DEF(s, n, copy_size) INDEX_op_ ## s,
84 #if defined(CONFIG_USER_ONLY)
85 #define gen_st(s, name, addr, val) gen_op_st##name##_raw(addr, val)
86 #define gen_ld(s, name, val, addr) gen_op_ld##name##_raw(val, addr)
88 #define gen_st(s, name, addr, val) do { \
90 gen_op_st##name##_user(addr, val); \
92 gen_op_st##name##_kernel(addr, val); \
94 #define gen_ld(s, name, val, addr) do { \
96 gen_op_ld##name##_user(val, addr); \
98 gen_op_ld##name##_kernel(val, addr); \
102 #include "op-hacks.h"
110 #define DREG(insn, pos) (((insn >> pos) & 7) + QREG_D0)
111 #define AREG(insn, pos) (((insn >> pos) & 7) + QREG_A0)
112 #define FREG(insn, pos) (((insn >> pos) & 7) + QREG_F0)
114 typedef void (*disas_proc)(DisasContext *, uint16_t);
116 #ifdef DEBUG_DISPATCH
117 #define DISAS_INSN(name) \
118 static void real_disas_##name (DisasContext *s, uint16_t insn); \
119 static void disas_##name (DisasContext *s, uint16_t insn) { \
120 if (logfile) fprintf(logfile, "Dispatch " #name "\n"); \
121 real_disas_##name(s, insn); } \
122 static void real_disas_##name (DisasContext *s, uint16_t insn)
124 #define DISAS_INSN(name) \
125 static void disas_##name (DisasContext *s, uint16_t insn)
128 /* Generate a load from the specified address. Narrow values are
129 sign extended to full register width. */
130 static inline int gen_load(DisasContext * s, int opsize, int addr, int sign)
136 tmp = gen_new_qreg(QMODE_I32);
138 gen_ld(s, 8s32, tmp, addr);
140 gen_ld(s, 8u32, tmp, addr);
143 tmp = gen_new_qreg(QMODE_I32);
145 gen_ld(s, 16s32, tmp, addr);
147 gen_ld(s, 16u32, tmp, addr);
150 tmp = gen_new_qreg(QMODE_I32);
151 gen_ld(s, 32, tmp, addr);
154 tmp = gen_new_qreg(QMODE_F32);
155 gen_ld(s, f32, tmp, addr);
158 tmp = gen_new_qreg(QMODE_F64);
159 gen_ld(s, f64, tmp, addr);
162 qemu_assert(0, "bad load size");
164 gen_throws_exception = gen_last_qop;
168 /* Generate a store. */
169 static inline void gen_store(DisasContext *s, int opsize, int addr, int val)
174 gen_st(s, 8, addr, val);
177 gen_st(s, 16, addr, val);
180 gen_st(s, 32, addr, val);
183 gen_st(s, f32, addr, val);
186 gen_st(s, f64, addr, val);
189 qemu_assert(0, "bad store size");
191 gen_throws_exception = gen_last_qop;
194 /* Generate an unsigned load if VAL is 0 a signed load if val is -1,
195 otherwise generate a store. */
196 static int gen_ldst(DisasContext *s, int opsize, int addr, int val)
199 gen_store(s, opsize, addr, val);
202 return gen_load(s, opsize, addr, val != 0);
206 /* Read a 32-bit immediate constant. */
207 static inline uint32_t read_im32(DisasContext *s)
210 im = ((uint32_t)lduw_code(s->pc)) << 16;
212 im |= lduw_code(s->pc);
217 /* Calculate and address index. */
218 static int gen_addr_index(uint16_t ext, int tmp)
223 add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
224 if ((ext & 0x800) == 0) {
225 gen_op_ext16s32(tmp, add);
228 scale = (ext >> 9) & 3;
230 gen_op_shl32(tmp, add, gen_im32(scale));
236 /* Handle a base + index + displacement effective addresss. A base of
237 -1 means pc-relative. */
238 static int gen_lea_indexed(DisasContext *s, int opsize, int base)
247 ext = lduw_code(s->pc);
250 if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
254 /* full extension word format */
255 if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
258 if ((ext & 0x30) > 0x10) {
259 /* base displacement */
260 if ((ext & 0x30) == 0x20) {
261 bd = (int16_t)lduw_code(s->pc);
269 tmp = gen_new_qreg(QMODE_I32);
270 if ((ext & 0x44) == 0) {
272 add = gen_addr_index(ext, tmp);
276 if ((ext & 0x80) == 0) {
277 /* base not suppressed */
279 base = gen_im32(offset + bd);
283 gen_op_add32(tmp, add, base);
291 gen_op_add32(tmp, add, gen_im32(bd));
297 if ((ext & 3) != 0) {
298 /* memory indirect */
299 base = gen_load(s, OS_LONG, add, 0);
300 if ((ext & 0x44) == 4) {
301 add = gen_addr_index(ext, tmp);
302 gen_op_add32(tmp, add, base);
308 /* outer displacement */
309 if ((ext & 3) == 2) {
310 od = (int16_t)lduw_code(s->pc);
319 gen_op_add32(tmp, add, gen_im32(od));
324 /* brief extension word format */
325 tmp = gen_new_qreg(QMODE_I32);
326 add = gen_addr_index(ext, tmp);
328 gen_op_add32(tmp, add, base);
330 gen_op_add32(tmp, tmp, gen_im32((int8_t)ext));
332 gen_op_add32(tmp, add, gen_im32(offset + (int8_t)ext));
339 /* Update the CPU env CC_OP state. */
340 static inline void gen_flush_cc_op(DisasContext *s)
342 if (s->cc_op != CC_OP_DYNAMIC)
343 gen_op_mov32(QREG_CC_OP, gen_im32(s->cc_op));
346 /* Evaluate all the CC flags. */
347 static inline void gen_flush_flags(DisasContext *s)
349 if (s->cc_op == CC_OP_FLAGS)
352 gen_op_flush_flags();
353 s->cc_op = CC_OP_FLAGS;
356 static inline int opsize_bytes(int opsize)
359 case OS_BYTE: return 1;
360 case OS_WORD: return 2;
361 case OS_LONG: return 4;
362 case OS_SINGLE: return 4;
363 case OS_DOUBLE: return 8;
365 qemu_assert(0, "bad operand size");
369 /* Assign value to a register. If the width is less than the register width
370 only the low part of the register is set. */
371 static void gen_partset_reg(int opsize, int reg, int val)
376 gen_op_and32(reg, reg, gen_im32(0xffffff00));
377 tmp = gen_new_qreg(QMODE_I32);
378 gen_op_and32(tmp, val, gen_im32(0xff));
379 gen_op_or32(reg, reg, tmp);
382 gen_op_and32(reg, reg, gen_im32(0xffff0000));
383 tmp = gen_new_qreg(QMODE_I32);
384 gen_op_and32(tmp, val, gen_im32(0xffff));
385 gen_op_or32(reg, reg, tmp);
388 gen_op_mov32(reg, val);
391 gen_op_pack_32_f32(reg, val);
394 qemu_assert(0, "Bad operand size");
399 /* Sign or zero extend a value. */
400 static inline int gen_extend(int val, int opsize, int sign)
406 tmp = gen_new_qreg(QMODE_I32);
408 gen_op_ext8s32(tmp, val);
410 gen_op_ext8u32(tmp, val);
413 tmp = gen_new_qreg(QMODE_I32);
415 gen_op_ext16s32(tmp, val);
417 gen_op_ext16u32(tmp, val);
423 tmp = gen_new_qreg(QMODE_F32);
424 gen_op_pack_f32_32(tmp, val);
427 qemu_assert(0, "Bad operand size");
432 /* Generate code for an "effective address". Does not adjust the base
433 register for autoincrememnt addressing modes. */
434 static int gen_lea(DisasContext *s, uint16_t insn, int opsize)
442 switch ((insn >> 3) & 7) {
443 case 0: /* Data register direct. */
444 case 1: /* Address register direct. */
446 case 2: /* Indirect register */
447 case 3: /* Indirect postincrement. */
450 case 4: /* Indirect predecrememnt. */
452 tmp = gen_new_qreg(QMODE_I32);
453 gen_op_sub32(tmp, reg, gen_im32(opsize_bytes(opsize)));
455 case 5: /* Indirect displacement. */
457 tmp = gen_new_qreg(QMODE_I32);
458 ext = lduw_code(s->pc);
460 gen_op_add32(tmp, reg, gen_im32((int16_t)ext));
462 case 6: /* Indirect index + displacement. */
464 return gen_lea_indexed(s, opsize, reg);
467 case 0: /* Absolute short. */
468 offset = ldsw_code(s->pc);
470 return gen_im32(offset);
471 case 1: /* Absolute long. */
472 offset = read_im32(s);
473 return gen_im32(offset);
474 case 2: /* pc displacement */
475 tmp = gen_new_qreg(QMODE_I32);
477 offset += ldsw_code(s->pc);
479 return gen_im32(offset);
480 case 3: /* pc index+displacement. */
481 return gen_lea_indexed(s, opsize, -1);
482 case 4: /* Immediate. */
487 /* Should never happen. */
491 /* Helper function for gen_ea. Reuse the computed address between the
492 for read/write operands. */
493 static inline int gen_ea_once(DisasContext *s, uint16_t insn, int opsize,
498 if (addrp && val > 0) {
501 tmp = gen_lea(s, insn, opsize);
507 return gen_ldst(s, opsize, tmp, val);
510 /* Generate code to load/store a value ito/from an EA. If VAL > 0 this is
511 a write otherwise it is a read (0 == sign extend, -1 == zero extend).
512 ADDRP is non-null for readwrite operands. */
513 static int gen_ea(DisasContext *s, uint16_t insn, int opsize, int val,
521 switch ((insn >> 3) & 7) {
522 case 0: /* Data register direct. */
525 gen_partset_reg(opsize, reg, val);
528 return gen_extend(reg, opsize, val);
530 case 1: /* Address register direct. */
533 gen_op_mov32(reg, val);
536 return gen_extend(reg, opsize, val);
538 case 2: /* Indirect register */
540 return gen_ldst(s, opsize, reg, val);
541 case 3: /* Indirect postincrement. */
543 result = gen_ldst(s, opsize, reg, val);
544 /* ??? This is not exception safe. The instruction may still
545 fault after this point. */
546 if (val > 0 || !addrp)
547 gen_op_add32(reg, reg, gen_im32(opsize_bytes(opsize)));
549 case 4: /* Indirect predecrememnt. */
552 if (addrp && val > 0) {
555 tmp = gen_lea(s, insn, opsize);
561 result = gen_ldst(s, opsize, tmp, val);
562 /* ??? This is not exception safe. The instruction may still
563 fault after this point. */
564 if (val > 0 || !addrp) {
566 gen_op_mov32(reg, tmp);
570 case 5: /* Indirect displacement. */
571 case 6: /* Indirect index + displacement. */
572 return gen_ea_once(s, insn, opsize, val, addrp);
575 case 0: /* Absolute short. */
576 case 1: /* Absolute long. */
577 case 2: /* pc displacement */
578 case 3: /* pc index+displacement. */
579 return gen_ea_once(s, insn, opsize, val, addrp);
580 case 4: /* Immediate. */
581 /* Sign extend values for consistency. */
585 offset = ldsb_code(s->pc + 1);
587 offset = ldub_code(s->pc + 1);
592 offset = ldsw_code(s->pc);
594 offset = lduw_code(s->pc);
598 offset = read_im32(s);
601 qemu_assert(0, "Bad immediate operand");
603 return gen_im32(offset);
608 /* Should never happen. */
612 static void gen_logic_cc(DisasContext *s, int val)
614 gen_op_logic_cc(val);
615 s->cc_op = CC_OP_LOGIC;
618 static void gen_jmpcc(DisasContext *s, int cond, int l1)
629 case 2: /* HI (!C && !Z) */
630 tmp = gen_new_qreg(QMODE_I32);
631 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_C | CCF_Z));
632 gen_op_jmp_z32(tmp, l1);
634 case 3: /* LS (C || Z) */
635 tmp = gen_new_qreg(QMODE_I32);
636 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_C | CCF_Z));
637 gen_op_jmp_nz32(tmp, l1);
639 case 4: /* CC (!C) */
640 tmp = gen_new_qreg(QMODE_I32);
641 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_C));
642 gen_op_jmp_z32(tmp, l1);
645 tmp = gen_new_qreg(QMODE_I32);
646 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_C));
647 gen_op_jmp_nz32(tmp, l1);
649 case 6: /* NE (!Z) */
650 tmp = gen_new_qreg(QMODE_I32);
651 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_Z));
652 gen_op_jmp_z32(tmp, l1);
655 tmp = gen_new_qreg(QMODE_I32);
656 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_Z));
657 gen_op_jmp_nz32(tmp, l1);
659 case 8: /* VC (!V) */
660 tmp = gen_new_qreg(QMODE_I32);
661 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_V));
662 gen_op_jmp_z32(tmp, l1);
665 tmp = gen_new_qreg(QMODE_I32);
666 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_V));
667 gen_op_jmp_nz32(tmp, l1);
669 case 10: /* PL (!N) */
670 tmp = gen_new_qreg(QMODE_I32);
671 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_N));
672 gen_op_jmp_z32(tmp, l1);
674 case 11: /* MI (N) */
675 tmp = gen_new_qreg(QMODE_I32);
676 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_N));
677 gen_op_jmp_nz32(tmp, l1);
679 case 12: /* GE (!(N ^ V)) */
680 tmp = gen_new_qreg(QMODE_I32);
681 gen_op_shr32(tmp, QREG_CC_DEST, gen_im32(2));
682 gen_op_xor32(tmp, tmp, QREG_CC_DEST);
683 gen_op_and32(tmp, tmp, gen_im32(CCF_V));
684 gen_op_jmp_z32(tmp, l1);
686 case 13: /* LT (N ^ V) */
687 tmp = gen_new_qreg(QMODE_I32);
688 gen_op_shr32(tmp, QREG_CC_DEST, gen_im32(2));
689 gen_op_xor32(tmp, tmp, QREG_CC_DEST);
690 gen_op_and32(tmp, tmp, gen_im32(CCF_V));
691 gen_op_jmp_nz32(tmp, l1);
693 case 14: /* GT (!(Z || (N ^ V))) */
696 l2 = gen_new_label();
697 tmp = gen_new_qreg(QMODE_I32);
698 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_Z));
699 gen_op_jmp_nz32(tmp, l2);
700 tmp = gen_new_qreg(QMODE_I32);
701 gen_op_shr32(tmp, QREG_CC_DEST, gen_im32(2));
702 gen_op_xor32(tmp, tmp, QREG_CC_DEST);
703 gen_op_and32(tmp, tmp, gen_im32(CCF_V));
704 gen_op_jmp_nz32(tmp, l2);
709 case 15: /* LE (Z || (N ^ V)) */
710 tmp = gen_new_qreg(QMODE_I32);
711 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_Z));
712 gen_op_jmp_nz32(tmp, l1);
713 tmp = gen_new_qreg(QMODE_I32);
714 gen_op_shr32(tmp, QREG_CC_DEST, gen_im32(2));
715 gen_op_xor32(tmp, tmp, QREG_CC_DEST);
716 gen_op_and32(tmp, tmp, gen_im32(CCF_V));
717 gen_op_jmp_nz32(tmp, l1);
720 /* Should ever happen. */
731 l1 = gen_new_label();
732 cond = (insn >> 8) & 0xf;
734 gen_op_and32(reg, reg, gen_im32(0xffffff00));
735 gen_jmpcc(s, cond ^ 1, l1);
736 gen_op_or32(reg, reg, gen_im32(0xff));
740 /* Force a TB lookup after an instruction that changes the CPU state. */
741 static void gen_lookup_tb(DisasContext *s)
744 gen_op_mov32(QREG_PC, gen_im32(s->pc));
745 s->is_jmp = DISAS_UPDATE;
748 /* Generate a jump to to the address in qreg DEST. */
749 static void gen_jmp(DisasContext *s, int dest)
752 gen_op_mov32(QREG_PC, dest);
753 s->is_jmp = DISAS_JUMP;
756 static void gen_exception(DisasContext *s, uint32_t where, int nr)
759 gen_jmp(s, gen_im32(where));
760 gen_op_raise_exception(nr);
763 static inline void gen_addr_fault(DisasContext *s)
765 gen_exception(s, s->insn_pc, EXCP_ADDRESS);
768 #define SRC_EA(result, opsize, val, addrp) do { \
769 result = gen_ea(s, insn, opsize, val, addrp); \
770 if (result == -1) { \
776 #define DEST_EA(insn, opsize, val, addrp) do { \
777 int ea_result = gen_ea(s, insn, opsize, val, addrp); \
778 if (ea_result == -1) { \
784 /* Generate a jump to an immediate address. */
785 static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
787 TranslationBlock *tb;
790 if (__builtin_expect (s->singlestep_enabled, 0)) {
791 gen_exception(s, dest, EXCP_DEBUG);
792 } else if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
793 (s->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
794 gen_op_goto_tb(0, n, (long)tb);
795 gen_op_mov32(QREG_PC, gen_im32(dest));
796 gen_op_mov32(QREG_T0, gen_im32((long)tb + n));
799 gen_jmp(s, gen_im32(dest));
800 gen_op_mov32(QREG_T0, gen_im32(0));
803 s->is_jmp = DISAS_TB_JUMP;
806 DISAS_INSN(undef_mac)
808 gen_exception(s, s->pc - 2, EXCP_LINEA);
811 DISAS_INSN(undef_fpu)
813 gen_exception(s, s->pc - 2, EXCP_LINEF);
818 gen_exception(s, s->pc - 2, EXCP_UNSUPPORTED);
819 cpu_abort(cpu_single_env, "Illegal instruction: %04x @ %08x",
830 sign = (insn & 0x100) != 0;
832 tmp = gen_new_qreg(QMODE_I32);
834 gen_op_ext16s32(tmp, reg);
836 gen_op_ext16u32(tmp, reg);
837 SRC_EA(src, OS_WORD, sign ? -1 : 0, NULL);
838 gen_op_mul32(tmp, tmp, src);
839 gen_op_mov32(reg, tmp);
840 /* Unlike m68k, coldfire always clears the overflow bit. */
841 gen_logic_cc(s, tmp);
851 sign = (insn & 0x100) != 0;
854 gen_op_ext16s32(QREG_DIV1, reg);
856 gen_op_ext16u32(QREG_DIV1, reg);
858 SRC_EA(src, OS_WORD, sign ? -1 : 0, NULL);
859 gen_op_mov32(QREG_DIV2, src);
866 tmp = gen_new_qreg(QMODE_I32);
867 src = gen_new_qreg(QMODE_I32);
868 gen_op_ext16u32(tmp, QREG_DIV1);
869 gen_op_shl32(src, QREG_DIV2, gen_im32(16));
870 gen_op_or32(reg, tmp, src);
872 s->cc_op = CC_OP_FLAGS;
882 ext = lduw_code(s->pc);
885 gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
890 gen_op_mov32(QREG_DIV1, num);
891 SRC_EA(den, OS_LONG, 0, NULL);
892 gen_op_mov32(QREG_DIV2, den);
900 gen_op_mov32 (reg, QREG_DIV1);
903 gen_op_mov32 (reg, QREG_DIV2);
906 s->cc_op = CC_OP_FLAGS;
918 add = (insn & 0x4000) != 0;
920 dest = gen_new_qreg(QMODE_I32);
922 SRC_EA(tmp, OS_LONG, 0, &addr);
926 SRC_EA(src, OS_LONG, 0, NULL);
929 gen_op_add32(dest, tmp, src);
930 gen_op_update_xflag_lt(dest, src);
931 s->cc_op = CC_OP_ADD;
933 gen_op_update_xflag_lt(tmp, src);
934 gen_op_sub32(dest, tmp, src);
935 s->cc_op = CC_OP_SUB;
937 gen_op_update_cc_add(dest, src);
939 DEST_EA(insn, OS_LONG, dest, &addr);
941 gen_op_mov32(reg, dest);
946 /* Reverse the order of the bits in REG. */
954 val = gen_new_qreg(QMODE_I32);
955 tmp1 = gen_new_qreg(QMODE_I32);
956 tmp2 = gen_new_qreg(QMODE_I32);
958 gen_op_mov32(val, reg);
959 /* Reverse bits within each nibble. */
960 gen_op_shl32(tmp1, val, gen_im32(3));
961 gen_op_and32(tmp1, tmp1, gen_im32(0x88888888));
962 gen_op_shl32(tmp2, val, gen_im32(1));
963 gen_op_and32(tmp2, tmp2, gen_im32(0x44444444));
964 gen_op_or32(tmp1, tmp1, tmp2);
965 gen_op_shr32(tmp2, val, gen_im32(1));
966 gen_op_and32(tmp2, tmp2, gen_im32(0x22222222));
967 gen_op_or32(tmp1, tmp1, tmp2);
968 gen_op_shr32(tmp2, val, gen_im32(3));
969 gen_op_and32(tmp2, tmp2, gen_im32(0x11111111));
970 gen_op_or32(tmp1, tmp1, tmp2);
971 /* Reverse nibbles withing bytes. */
972 gen_op_shl32(val, tmp1, gen_im32(4));
973 gen_op_and32(val, val, gen_im32(0xf0f0f0f0));
974 gen_op_shr32(tmp2, tmp1, gen_im32(4));
975 gen_op_and32(tmp2, tmp2, gen_im32(0x0f0f0f0f));
976 gen_op_or32(val, val, tmp2);
978 gen_op_bswap32(reg, val);
979 gen_op_mov32(reg, val);
982 DISAS_INSN(bitop_reg)
992 if ((insn & 0x38) != 0)
996 op = (insn >> 6) & 3;
997 SRC_EA(src1, opsize, 0, op ? &addr: NULL);
998 src2 = DREG(insn, 9);
999 dest = gen_new_qreg(QMODE_I32);
1002 tmp = gen_new_qreg(QMODE_I32);
1003 if (opsize == OS_BYTE)
1004 gen_op_and32(tmp, src2, gen_im32(7));
1006 gen_op_and32(tmp, src2, gen_im32(31));
1008 tmp = gen_new_qreg(QMODE_I32);
1009 gen_op_shl32(tmp, gen_im32(1), src2);
1011 gen_op_btest(src1, tmp);
1014 gen_op_xor32(dest, src1, tmp);
1017 gen_op_not32(tmp, tmp);
1018 gen_op_and32(dest, src1, tmp);
1021 gen_op_or32(dest, src1, tmp);
1027 DEST_EA(insn, opsize, dest, &addr);
1036 reg = DREG(insn, 0);
1037 tmp = gen_new_qreg(QMODE_I32);
1039 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_V));
1040 l1 = gen_new_label();
1041 gen_op_jmp_z32(tmp, l1);
1042 tmp = gen_new_qreg(QMODE_I32);
1043 gen_op_shr32(tmp, reg, gen_im32(31));
1044 gen_op_xor32(tmp, tmp, gen_im32(0x80000000));
1045 gen_op_mov32(reg, tmp);
1047 gen_logic_cc(s, tmp);
1050 static void gen_push(DisasContext *s, int val)
1054 tmp = gen_new_qreg(QMODE_I32);
1055 gen_op_sub32(tmp, QREG_SP, gen_im32(4));
1056 gen_store(s, OS_LONG, tmp, val);
1057 gen_op_mov32(QREG_SP, tmp);
1069 mask = lduw_code(s->pc);
1071 tmp = gen_lea(s, insn, OS_LONG);
1076 addr = gen_new_qreg(QMODE_I32);
1077 gen_op_mov32(addr, tmp);
1078 is_load = ((insn & 0x0400) != 0);
1079 for (i = 0; i < 16; i++, mask >>= 1) {
1086 tmp = gen_load(s, OS_LONG, addr, 0);
1087 gen_op_mov32(reg, tmp);
1089 gen_store(s, OS_LONG, addr, reg);
1092 gen_op_add32(addr, addr, gen_im32(4));
1097 DISAS_INSN(bitop_im)
1108 if ((insn & 0x38) != 0)
1112 op = (insn >> 6) & 3;
1114 bitnum = lduw_code(s->pc);
1116 if (bitnum & 0xff00) {
1117 disas_undef(s, insn);
1121 SRC_EA(src1, opsize, 0, op ? &addr: NULL);
1124 tmp = gen_new_qreg(QMODE_I32);
1125 if (opsize == OS_BYTE)
1131 gen_op_btest(src1, gen_im32(mask));
1133 dest = gen_new_qreg(QMODE_I32);
1139 gen_op_xor32(dest, src1, gen_im32(mask));
1142 gen_op_and32(dest, src1, gen_im32(~mask));
1145 gen_op_or32(dest, src1, gen_im32(mask));
1151 DEST_EA(insn, opsize, dest, &addr);
1154 DISAS_INSN(arith_im)
1162 op = (insn >> 9) & 7;
1163 SRC_EA(src1, OS_LONG, 0, (op == 6) ? NULL : &addr);
1164 src2 = gen_im32(read_im32(s));
1165 dest = gen_new_qreg(QMODE_I32);
1168 gen_op_or32(dest, src1, src2);
1169 gen_logic_cc(s, dest);
1172 gen_op_and32(dest, src1, src2);
1173 gen_logic_cc(s, dest);
1176 gen_op_mov32(dest, src1);
1177 gen_op_update_xflag_lt(dest, src2);
1178 gen_op_sub32(dest, dest, src2);
1179 gen_op_update_cc_add(dest, src2);
1180 s->cc_op = CC_OP_SUB;
1183 gen_op_mov32(dest, src1);
1184 gen_op_add32(dest, dest, src2);
1185 gen_op_update_cc_add(dest, src2);
1186 gen_op_update_xflag_lt(dest, src2);
1187 s->cc_op = CC_OP_ADD;
1190 gen_op_xor32(dest, src1, src2);
1191 gen_logic_cc(s, dest);
1194 gen_op_mov32(dest, src1);
1195 gen_op_sub32(dest, dest, src2);
1196 gen_op_update_cc_add(dest, src2);
1197 s->cc_op = CC_OP_SUB;
1203 DEST_EA(insn, OS_LONG, dest, &addr);
1211 reg = DREG(insn, 0);
1212 gen_op_bswap32(reg, reg);
1222 switch (insn >> 12) {
1223 case 1: /* move.b */
1226 case 2: /* move.l */
1229 case 3: /* move.w */
1235 SRC_EA(src, opsize, -1, NULL);
1236 op = (insn >> 6) & 7;
1239 /* The value will already have been sign extended. */
1240 dest = AREG(insn, 9);
1241 gen_op_mov32(dest, src);
1245 dest_ea = ((insn >> 9) & 7) | (op << 3);
1246 DEST_EA(dest_ea, opsize, src, NULL);
1247 /* This will be correct because loads sign extend. */
1248 gen_logic_cc(s, src);
1259 reg = DREG(insn, 0);
1260 dest = gen_new_qreg(QMODE_I32);
1261 gen_op_mov32 (dest, gen_im32(0));
1262 gen_op_subx_cc(dest, reg);
1264 tmp = gen_new_qreg(QMODE_I32);
1265 gen_op_mov32 (tmp, QREG_CC_DEST);
1266 gen_op_update_cc_add(dest, reg);
1267 gen_op_mov32(reg, dest);
1268 s->cc_op = CC_OP_DYNAMIC;
1270 gen_op_or32(tmp, tmp, gen_im32(~CCF_Z));
1271 gen_op_and32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1272 s->cc_op = CC_OP_FLAGS;
1280 reg = AREG(insn, 9);
1281 tmp = gen_lea(s, insn, OS_LONG);
1286 gen_op_mov32(reg, tmp);
1293 switch ((insn >> 6) & 3) {
1306 DEST_EA(insn, opsize, gen_im32(0), NULL);
1307 gen_logic_cc(s, gen_im32(0));
1310 static int gen_get_ccr(DisasContext *s)
1315 dest = gen_new_qreg(QMODE_I32);
1316 gen_op_get_xflag(dest);
1317 gen_op_shl32(dest, dest, gen_im32(4));
1318 gen_op_or32(dest, dest, QREG_CC_DEST);
1322 DISAS_INSN(move_from_ccr)
1327 ccr = gen_get_ccr(s);
1328 reg = DREG(insn, 0);
1329 gen_partset_reg(OS_WORD, reg, ccr);
1337 reg = DREG(insn, 0);
1338 src1 = gen_new_qreg(QMODE_I32);
1339 gen_op_mov32(src1, reg);
1340 gen_op_neg32(reg, src1);
1341 s->cc_op = CC_OP_SUB;
1342 gen_op_update_cc_add(reg, src1);
1343 gen_op_update_xflag_lt(gen_im32(0), src1);
1344 s->cc_op = CC_OP_SUB;
1347 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
1349 gen_op_logic_cc(gen_im32(val & 0xf));
1350 gen_op_update_xflag_tst(gen_im32((val & 0x10) >> 4));
1352 gen_op_set_sr(gen_im32(val & 0xff00));
1356 static void gen_set_sr(DisasContext *s, uint16_t insn, int ccr_only)
1361 s->cc_op = CC_OP_FLAGS;
1362 if ((insn & 0x38) == 0)
1364 src1 = gen_new_qreg(QMODE_I32);
1365 reg = DREG(insn, 0);
1366 gen_op_and32(src1, reg, gen_im32(0xf));
1367 gen_op_logic_cc(src1);
1368 gen_op_shr32(src1, reg, gen_im32(4));
1369 gen_op_and32(src1, src1, gen_im32(1));
1370 gen_op_update_xflag_tst(src1);
1375 else if ((insn & 0x3f) == 0x3c)
1378 val = lduw_code(s->pc);
1380 gen_set_sr_im(s, val, ccr_only);
1383 disas_undef(s, insn);
1386 DISAS_INSN(move_to_ccr)
1388 gen_set_sr(s, insn, 1);
1395 reg = DREG(insn, 0);
1396 gen_op_not32(reg, reg);
1397 gen_logic_cc(s, reg);
1407 dest = gen_new_qreg(QMODE_I32);
1408 src1 = gen_new_qreg(QMODE_I32);
1409 src2 = gen_new_qreg(QMODE_I32);
1410 reg = DREG(insn, 0);
1411 gen_op_shl32(src1, reg, gen_im32(16));
1412 gen_op_shr32(src2, reg, gen_im32(16));
1413 gen_op_or32(dest, src1, src2);
1414 gen_op_mov32(reg, dest);
1415 gen_logic_cc(s, dest);
1422 tmp = gen_lea(s, insn, OS_LONG);
1436 reg = DREG(insn, 0);
1437 op = (insn >> 6) & 7;
1438 tmp = gen_new_qreg(QMODE_I32);
1440 gen_op_ext16s32(tmp, reg);
1442 gen_op_ext8s32(tmp, reg);
1444 gen_partset_reg(OS_WORD, reg, tmp);
1446 gen_op_mov32(reg, tmp);
1447 gen_logic_cc(s, tmp);
1455 switch ((insn >> 6) & 3) {
1468 SRC_EA(tmp, opsize, -1, NULL);
1469 gen_logic_cc(s, tmp);
1474 /* Implemented as a NOP. */
1479 gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
1482 /* ??? This should be atomic. */
1489 dest = gen_new_qreg(QMODE_I32);
1490 SRC_EA(src1, OS_BYTE, -1, &addr);
1491 gen_logic_cc(s, src1);
1492 gen_op_or32(dest, src1, gen_im32(0x80));
1493 DEST_EA(insn, OS_BYTE, dest, &addr);
1503 /* The upper 32 bits of the product are discarded, so
1504 muls.l and mulu.l are functionally equivalent. */
1505 ext = lduw_code(s->pc);
1508 gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
1511 reg = DREG(ext, 12);
1512 SRC_EA(src1, OS_LONG, 0, NULL);
1513 dest = gen_new_qreg(QMODE_I32);
1514 gen_op_mul32(dest, src1, reg);
1515 gen_op_mov32(reg, dest);
1516 /* Unlike m68k, coldfire always clears the overflow bit. */
1517 gen_logic_cc(s, dest);
1526 offset = ldsw_code(s->pc);
1528 reg = AREG(insn, 0);
1529 tmp = gen_new_qreg(QMODE_I32);
1530 gen_op_sub32(tmp, QREG_SP, gen_im32(4));
1531 gen_store(s, OS_LONG, tmp, reg);
1533 gen_op_mov32(reg, tmp);
1534 gen_op_add32(QREG_SP, tmp, gen_im32(offset));
1543 src = gen_new_qreg(QMODE_I32);
1544 reg = AREG(insn, 0);
1545 gen_op_mov32(src, reg);
1546 tmp = gen_load(s, OS_LONG, src, 0);
1547 gen_op_mov32(reg, tmp);
1548 gen_op_add32(QREG_SP, src, gen_im32(4));
1559 tmp = gen_load(s, OS_LONG, QREG_SP, 0);
1560 gen_op_add32(QREG_SP, QREG_SP, gen_im32(4));
1568 /* Load the target address first to ensure correct exception
1570 tmp = gen_lea(s, insn, OS_LONG);
1575 if ((insn & 0x40) == 0) {
1577 gen_push(s, gen_im32(s->pc));
1590 SRC_EA(src1, OS_LONG, 0, &addr);
1591 val = (insn >> 9) & 7;
1594 src2 = gen_im32(val);
1595 dest = gen_new_qreg(QMODE_I32);
1596 gen_op_mov32(dest, src1);
1597 if ((insn & 0x38) == 0x08) {
1598 /* Don't update condition codes if the destination is an
1599 address register. */
1600 if (insn & 0x0100) {
1601 gen_op_sub32(dest, dest, src2);
1603 gen_op_add32(dest, dest, src2);
1606 if (insn & 0x0100) {
1607 gen_op_update_xflag_lt(dest, src2);
1608 gen_op_sub32(dest, dest, src2);
1609 s->cc_op = CC_OP_SUB;
1611 gen_op_add32(dest, dest, src2);
1612 gen_op_update_xflag_lt(dest, src2);
1613 s->cc_op = CC_OP_ADD;
1615 gen_op_update_cc_add(dest, src2);
1617 DEST_EA(insn, OS_LONG, dest, &addr);
1623 case 2: /* One extension word. */
1626 case 3: /* Two extension words. */
1629 case 4: /* No extension words. */
1632 disas_undef(s, insn);
1644 op = (insn >> 8) & 0xf;
1645 offset = (int8_t)insn;
1647 offset = ldsw_code(s->pc);
1649 } else if (offset == -1) {
1650 offset = read_im32(s);
1654 gen_push(s, gen_im32(s->pc));
1659 l1 = gen_new_label();
1660 gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
1661 gen_jmp_tb(s, 1, base + offset);
1663 gen_jmp_tb(s, 0, s->pc);
1665 /* Unconditional branch. */
1666 gen_jmp_tb(s, 0, base + offset);
1674 tmp = gen_im32((int8_t)insn);
1675 gen_op_mov32(DREG(insn, 9), tmp);
1676 gen_logic_cc(s, tmp);
1689 SRC_EA(src, opsize, (insn & 0x80) ? 0 : -1, NULL);
1690 reg = DREG(insn, 9);
1691 gen_op_mov32(reg, src);
1692 gen_logic_cc(s, src);
1702 reg = DREG(insn, 9);
1703 dest = gen_new_qreg(QMODE_I32);
1705 SRC_EA(src, OS_LONG, 0, &addr);
1706 gen_op_or32(dest, src, reg);
1707 DEST_EA(insn, OS_LONG, dest, &addr);
1709 SRC_EA(src, OS_LONG, 0, NULL);
1710 gen_op_or32(dest, src, reg);
1711 gen_op_mov32(reg, dest);
1713 gen_logic_cc(s, dest);
1721 SRC_EA(src, OS_LONG, 0, NULL);
1722 reg = AREG(insn, 9);
1723 gen_op_sub32(reg, reg, src);
1734 reg = DREG(insn, 9);
1735 src = DREG(insn, 0);
1736 dest = gen_new_qreg(QMODE_I32);
1737 gen_op_mov32 (dest, reg);
1738 gen_op_subx_cc(dest, src);
1740 tmp = gen_new_qreg(QMODE_I32);
1741 gen_op_mov32 (tmp, QREG_CC_DEST);
1742 gen_op_update_cc_add(dest, src);
1743 gen_op_mov32(reg, dest);
1744 s->cc_op = CC_OP_DYNAMIC;
1746 gen_op_or32(tmp, tmp, gen_im32(~CCF_Z));
1747 gen_op_and32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1748 s->cc_op = CC_OP_FLAGS;
1756 val = (insn >> 9) & 7;
1759 src = gen_im32(val);
1760 gen_logic_cc(s, src);
1761 DEST_EA(insn, OS_LONG, src, NULL);
1772 op = (insn >> 6) & 3;
1776 s->cc_op = CC_OP_CMPB;
1780 s->cc_op = CC_OP_CMPW;
1784 s->cc_op = CC_OP_SUB;
1789 SRC_EA(src, opsize, -1, NULL);
1790 reg = DREG(insn, 9);
1791 dest = gen_new_qreg(QMODE_I32);
1792 gen_op_sub32(dest, reg, src);
1793 gen_op_update_cc_add(dest, src);
1808 SRC_EA(src, opsize, -1, NULL);
1809 reg = AREG(insn, 9);
1810 dest = gen_new_qreg(QMODE_I32);
1811 gen_op_sub32(dest, reg, src);
1812 gen_op_update_cc_add(dest, src);
1813 s->cc_op = CC_OP_SUB;
1823 SRC_EA(src, OS_LONG, 0, &addr);
1824 reg = DREG(insn, 9);
1825 dest = gen_new_qreg(QMODE_I32);
1826 gen_op_xor32(dest, src, reg);
1827 gen_logic_cc(s, dest);
1828 DEST_EA(insn, OS_LONG, dest, &addr);
1838 reg = DREG(insn, 9);
1839 dest = gen_new_qreg(QMODE_I32);
1841 SRC_EA(src, OS_LONG, 0, &addr);
1842 gen_op_and32(dest, src, reg);
1843 DEST_EA(insn, OS_LONG, dest, &addr);
1845 SRC_EA(src, OS_LONG, 0, NULL);
1846 gen_op_and32(dest, src, reg);
1847 gen_op_mov32(reg, dest);
1849 gen_logic_cc(s, dest);
1857 SRC_EA(src, OS_LONG, 0, NULL);
1858 reg = AREG(insn, 9);
1859 gen_op_add32(reg, reg, src);
1870 reg = DREG(insn, 9);
1871 src = DREG(insn, 0);
1872 dest = gen_new_qreg(QMODE_I32);
1873 gen_op_mov32 (dest, reg);
1874 gen_op_addx_cc(dest, src);
1876 tmp = gen_new_qreg(QMODE_I32);
1877 gen_op_mov32 (tmp, QREG_CC_DEST);
1878 gen_op_update_cc_add(dest, src);
1879 gen_op_mov32(reg, dest);
1880 s->cc_op = CC_OP_DYNAMIC;
1882 gen_op_or32(tmp, tmp, gen_im32(~CCF_Z));
1883 gen_op_and32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1884 s->cc_op = CC_OP_FLAGS;
1887 DISAS_INSN(shift_im)
1892 reg = DREG(insn, 0);
1893 tmp = (insn >> 9) & 7;
1897 gen_op_shl_im_cc(reg, tmp);
1898 s->cc_op = CC_OP_SHL;
1901 gen_op_shr_im_cc(reg, tmp);
1902 s->cc_op = CC_OP_SHR;
1904 gen_op_sar_im_cc(reg, tmp);
1905 s->cc_op = CC_OP_SAR;
1910 DISAS_INSN(shift_reg)
1916 reg = DREG(insn, 0);
1917 src = DREG(insn, 9);
1918 tmp = gen_new_qreg(QMODE_I32);
1919 gen_op_and32(tmp, src, gen_im32(63));
1921 gen_op_shl_cc(reg, tmp);
1922 s->cc_op = CC_OP_SHL;
1925 gen_op_shr_cc(reg, tmp);
1926 s->cc_op = CC_OP_SHR;
1928 gen_op_sar_cc(reg, tmp);
1929 s->cc_op = CC_OP_SAR;
1937 reg = DREG(insn, 0);
1938 gen_logic_cc(s, reg);
1939 gen_op_ff1(reg, reg);
1942 static int gen_get_sr(DisasContext *s)
1947 ccr = gen_get_ccr(s);
1948 sr = gen_new_qreg(QMODE_I32);
1949 gen_op_and32(sr, QREG_SR, gen_im32(0xffe0));
1950 gen_op_or32(sr, sr, ccr);
1960 ext = lduw_code(s->pc);
1962 if (ext != 0x46FC) {
1963 gen_exception(s, addr, EXCP_UNSUPPORTED);
1966 ext = lduw_code(s->pc);
1968 if (IS_USER(s) || (ext & SR_S) == 0) {
1969 gen_exception(s, addr, EXCP_PRIVILEGE);
1972 gen_push(s, gen_get_sr(s));
1973 gen_set_sr_im(s, ext, 0);
1976 DISAS_INSN(move_from_sr)
1982 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1986 reg = DREG(insn, 0);
1987 gen_partset_reg(OS_WORD, reg, sr);
1990 DISAS_INSN(move_to_sr)
1993 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1996 gen_set_sr(s, insn, 0);
2000 DISAS_INSN(move_from_usp)
2003 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2006 /* TODO: Implement USP. */
2007 gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
2010 DISAS_INSN(move_to_usp)
2013 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2016 /* TODO: Implement USP. */
2017 gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
2022 gen_jmp(s, gen_im32(s->pc));
2031 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2035 ext = lduw_code(s->pc);
2038 gen_set_sr_im(s, ext, 0);
2039 gen_jmp(s, gen_im32(s->pc));
2046 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2049 gen_exception(s, s->pc - 2, EXCP_RTE);
2058 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2062 ext = lduw_code(s->pc);
2066 reg = AREG(ext, 12);
2068 reg = DREG(ext, 12);
2070 gen_op_movec(gen_im32(ext & 0xfff), reg);
2077 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2080 /* ICache fetch. Implement as no-op. */
2086 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2089 /* Cache push/invalidate. Implement as no-op. */
2094 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2100 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2103 /* TODO: Implement wdebug. */
2104 qemu_assert(0, "WDEBUG not implemented");
2109 gen_exception(s, s->pc - 2, EXCP_TRAP0 + (insn & 0xf));
2112 /* ??? FP exceptions are not implemented. Most exceptions are deferred until
2113 immediately before the next FP instruction is executed. */
2124 ext = lduw_code(s->pc);
2126 opmode = ext & 0x7f;
2127 switch ((ext >> 13) & 7) {
2132 case 3: /* fmove out */
2135 /* ??? TODO: Proper behavior on overflow. */
2136 switch ((ext >> 10) & 7) {
2139 res = gen_new_qreg(QMODE_I32);
2140 gen_op_f64_to_i32(res, src);
2144 res = gen_new_qreg(QMODE_F32);
2145 gen_op_f64_to_f32(res, src);
2149 res = gen_new_qreg(QMODE_I32);
2150 gen_op_f64_to_i32(res, src);
2158 res = gen_new_qreg(QMODE_I32);
2159 gen_op_f64_to_i32(res, src);
2164 DEST_EA(insn, opsize, res, NULL);
2166 case 4: /* fmove to control register. */
2167 switch ((ext >> 10) & 7) {
2169 /* Not implemented. Ignore writes. */
2174 cpu_abort(NULL, "Unimplemented: fmove to control %d",
2178 case 5: /* fmove from control register. */
2179 switch ((ext >> 10) & 7) {
2181 /* Not implemented. Always return zero. */
2187 cpu_abort(NULL, "Unimplemented: fmove from control %d",
2191 DEST_EA(insn, OS_LONG, res, NULL);
2193 case 6: /* fmovem */
2198 if ((ext & 0x1f00) != 0x1000 || (ext & 0xff) == 0)
2200 src = gen_lea(s, insn, OS_LONG);
2205 addr = gen_new_qreg(QMODE_I32);
2206 gen_op_mov32(addr, src);
2212 if (ext & (1 << 13)) {
2214 gen_st(s, f64, addr, dest);
2217 gen_ld(s, f64, dest, addr);
2219 if (ext & (mask - 1))
2220 gen_op_add32(addr, addr, gen_im32(8));
2228 if (ext & (1 << 14)) {
2231 /* Source effective address. */
2232 switch ((ext >> 10) & 7) {
2233 case 0: opsize = OS_LONG; break;
2234 case 1: opsize = OS_SINGLE; break;
2235 case 4: opsize = OS_WORD; break;
2236 case 5: opsize = OS_DOUBLE; break;
2237 case 6: opsize = OS_BYTE; break;
2241 SRC_EA(tmp, opsize, -1, NULL);
2242 if (opsize == OS_DOUBLE) {
2245 src = gen_new_qreg(QMODE_F64);
2250 gen_op_i32_to_f64(src, tmp);
2253 gen_op_f32_to_f64(src, tmp);
2258 /* Source register. */
2259 src = FREG(ext, 10);
2261 dest = FREG(ext, 7);
2262 res = gen_new_qreg(QMODE_F64);
2264 gen_op_movf64(res, dest);
2267 case 0: case 0x40: case 0x44: /* fmove */
2268 gen_op_movf64(res, src);
2271 gen_op_iround_f64(res, src);
2274 case 3: /* fintrz */
2275 gen_op_itrunc_f64(res, src);
2278 case 4: case 0x41: case 0x45: /* fsqrt */
2279 gen_op_sqrtf64(res, src);
2281 case 0x18: case 0x58: case 0x5c: /* fabs */
2282 gen_op_absf64(res, src);
2284 case 0x1a: case 0x5a: case 0x5e: /* fneg */
2285 gen_op_chsf64(res, src);
2287 case 0x20: case 0x60: case 0x64: /* fdiv */
2288 gen_op_divf64(res, res, src);
2290 case 0x22: case 0x62: case 0x66: /* fadd */
2291 gen_op_addf64(res, res, src);
2293 case 0x23: case 0x63: case 0x67: /* fmul */
2294 gen_op_mulf64(res, res, src);
2296 case 0x28: case 0x68: case 0x6c: /* fsub */
2297 gen_op_subf64(res, res, src);
2299 case 0x38: /* fcmp */
2300 gen_op_sub_cmpf64(res, res, src);
2304 case 0x3a: /* ftst */
2305 gen_op_movf64(res, src);
2313 if (opmode & 0x40) {
2314 if ((opmode & 0x4) != 0)
2316 } else if ((s->fpcr & M68K_FPCR_PREC) == 0) {
2323 tmp = gen_new_qreg(QMODE_F32);
2324 gen_op_f64_to_f32(tmp, res);
2325 gen_op_f32_to_f64(res, tmp);
2327 gen_op_fp_result(res);
2329 gen_op_movf64(dest, res);
2334 disas_undef_fpu(s, insn);
2346 offset = ldsw_code(s->pc);
2348 if (insn & (1 << 6)) {
2349 offset = (offset << 16) | lduw_code(s->pc);
2353 l1 = gen_new_label();
2354 /* TODO: Raise BSUN exception. */
2355 flag = gen_new_qreg(QMODE_I32);
2356 zero = gen_new_qreg(QMODE_F64);
2357 gen_op_zerof64(zero);
2358 gen_op_compare_quietf64(flag, QREG_FP_RESULT, zero);
2359 /* Jump to l1 if condition is true. */
2360 switch (insn & 0xf) {
2363 case 1: /* eq (=0) */
2364 gen_op_jmp_z32(flag, l1);
2366 case 2: /* ogt (=1) */
2367 gen_op_sub32(flag, flag, gen_im32(1));
2368 gen_op_jmp_z32(flag, l1);
2370 case 3: /* oge (=0 or =1) */
2371 gen_op_jmp_z32(flag, l1);
2372 gen_op_sub32(flag, flag, gen_im32(1));
2373 gen_op_jmp_z32(flag, l1);
2375 case 4: /* olt (=-1) */
2376 gen_op_jmp_s32(flag, l1);
2378 case 5: /* ole (=-1 or =0) */
2379 gen_op_jmp_s32(flag, l1);
2380 gen_op_jmp_z32(flag, l1);
2382 case 6: /* ogl (=-1 or =1) */
2383 gen_op_jmp_s32(flag, l1);
2384 gen_op_sub32(flag, flag, gen_im32(1));
2385 gen_op_jmp_z32(flag, l1);
2387 case 7: /* or (=2) */
2388 gen_op_sub32(flag, flag, gen_im32(2));
2389 gen_op_jmp_z32(flag, l1);
2391 case 8: /* un (<2) */
2392 gen_op_sub32(flag, flag, gen_im32(2));
2393 gen_op_jmp_s32(flag, l1);
2395 case 9: /* ueq (=0 or =2) */
2396 gen_op_jmp_z32(flag, l1);
2397 gen_op_sub32(flag, flag, gen_im32(2));
2398 gen_op_jmp_z32(flag, l1);
2400 case 10: /* ugt (>0) */
2401 /* ??? Add jmp_gtu. */
2402 gen_op_sub32(flag, flag, gen_im32(1));
2403 gen_op_jmp_ns32(flag, l1);
2405 case 11: /* uge (>=0) */
2406 gen_op_jmp_ns32(flag, l1);
2408 case 12: /* ult (=-1 or =2) */
2409 gen_op_jmp_s32(flag, l1);
2410 gen_op_sub32(flag, flag, gen_im32(2));
2411 gen_op_jmp_z32(flag, l1);
2413 case 13: /* ule (!=1) */
2414 gen_op_sub32(flag, flag, gen_im32(1));
2415 gen_op_jmp_nz32(flag, l1);
2417 case 14: /* ne (!=0) */
2418 gen_op_jmp_nz32(flag, l1);
2421 gen_op_mov32(flag, gen_im32(1));
2424 gen_jmp_tb(s, 0, s->pc);
2426 gen_jmp_tb(s, 1, addr + offset);
2429 DISAS_INSN(frestore)
2431 /* TODO: Implement frestore. */
2432 qemu_assert(0, "FRESTORE not implemented");
2437 /* TODO: Implement fsave. */
2438 qemu_assert(0, "FSAVE not implemented");
2441 static inline int gen_mac_extract_word(DisasContext *s, int val, int upper)
2443 int tmp = gen_new_qreg(QMODE_I32);
2444 if (s->env->macsr & MACSR_FI) {
2446 gen_op_and32(tmp, val, gen_im32(0xffff0000));
2448 gen_op_shl32(tmp, val, gen_im32(16));
2449 } else if (s->env->macsr & MACSR_SU) {
2451 gen_op_sar32(tmp, val, gen_im32(16));
2453 gen_op_ext16s32(tmp, val);
2456 gen_op_shr32(tmp, val, gen_im32(16));
2458 gen_op_ext16u32(tmp, val);
2474 int saved_flags = -1;
2476 ext = lduw_code(s->pc);
2479 acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
2480 dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
2481 if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
2482 disas_undef(s, insn);
2486 /* MAC with load. */
2487 tmp = gen_lea(s, insn, OS_LONG);
2488 addr = gen_new_qreg(QMODE_I32);
2489 gen_op_and32(addr, tmp, QREG_MAC_MASK);
2490 /* Load the value now to ensure correct exception behavior.
2491 Perform writeback after reading the MAC inputs. */
2492 loadval = gen_load(s, OS_LONG, addr, 0);
2495 rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
2496 ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
2498 loadval = addr = -1;
2499 rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
2500 ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2503 gen_op_mac_clear_flags();
2505 if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
2506 /* Skip the multiply if we know we will ignore it. */
2507 l1 = gen_new_label();
2508 tmp = gen_new_qreg(QMODE_I32);
2509 gen_op_and32(tmp, QREG_MACSR, gen_im32(1 << (acc + 8)));
2510 gen_op_jmp_nz32(tmp, l1);
2513 if ((ext & 0x0800) == 0) {
2515 rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
2516 ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
2518 if (s->env->macsr & MACSR_FI) {
2519 gen_op_macmulf(rx, ry);
2521 if (s->env->macsr & MACSR_SU)
2522 gen_op_macmuls(rx, ry);
2524 gen_op_macmulu(rx, ry);
2525 switch ((ext >> 9) & 3) {
2536 /* Save the overflow flag from the multiply. */
2537 saved_flags = gen_new_qreg(QMODE_I32);
2538 gen_op_mov32(saved_flags, QREG_MACSR);
2541 if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
2542 /* Skip the accumulate if the value is already saturated. */
2543 l1 = gen_new_label();
2544 tmp = gen_new_qreg(QMODE_I32);
2545 gen_op_and32(tmp, QREG_MACSR, gen_im32(MACSR_PAV0 << acc));
2546 gen_op_jmp_nz32(tmp, l1);
2554 if (s->env->macsr & MACSR_FI)
2555 gen_op_macsatf(acc);
2556 else if (s->env->macsr & MACSR_SU)
2557 gen_op_macsats(acc);
2559 gen_op_macsatu(acc);
2565 /* Dual accumulate variant. */
2566 acc = (ext >> 2) & 3;
2567 /* Restore the overflow flag from the multiplier. */
2568 gen_op_mov32(QREG_MACSR, saved_flags);
2569 if ((s->env->macsr & MACSR_OMC) != 0) {
2570 /* Skip the accumulate if the value is already saturated. */
2571 l1 = gen_new_label();
2572 tmp = gen_new_qreg(QMODE_I32);
2573 gen_op_and32(tmp, QREG_MACSR, gen_im32(MACSR_PAV0 << acc));
2574 gen_op_jmp_nz32(tmp, l1);
2580 if (s->env->macsr & MACSR_FI)
2581 gen_op_macsatf(acc);
2582 else if (s->env->macsr & MACSR_SU)
2583 gen_op_macsats(acc);
2585 gen_op_macsatu(acc);
2589 gen_op_mac_set_flags(acc);
2593 rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
2594 gen_op_mov32(rw, loadval);
2595 /* FIXME: Should address writeback happen with the masked or
2597 switch ((insn >> 3) & 7) {
2598 case 3: /* Post-increment. */
2599 gen_op_add32(AREG(insn, 0), addr, gen_im32(4));
2601 case 4: /* Pre-decrement. */
2602 gen_op_mov32(AREG(insn, 0), addr);
2607 DISAS_INSN(from_mac)
2612 rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2613 acc = (insn >> 9) & 3;
2614 if (s->env->macsr & MACSR_FI) {
2615 gen_op_get_macf(rx, acc);
2616 } else if ((s->env->macsr & MACSR_OMC) == 0) {
2617 gen_op_get_maci(rx, acc);
2618 } else if (s->env->macsr & MACSR_SU) {
2619 gen_op_get_macs(rx, acc);
2621 gen_op_get_macu(rx, acc);
2624 gen_op_clear_mac(acc);
2627 DISAS_INSN(move_mac)
2632 dest = (insn >> 9) & 3;
2633 gen_op_move_mac(dest, src);
2634 gen_op_mac_clear_flags();
2635 gen_op_mac_set_flags(dest);
2638 DISAS_INSN(from_macsr)
2642 reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2643 gen_op_mov32(reg, QREG_MACSR);
2646 DISAS_INSN(from_mask)
2649 reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2650 gen_op_mov32(reg, QREG_MAC_MASK);
2653 DISAS_INSN(from_mext)
2657 reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2658 acc = (insn & 0x400) ? 2 : 0;
2659 if (s->env->macsr & MACSR_FI)
2660 gen_op_get_mac_extf(reg, acc);
2662 gen_op_get_mac_exti(reg, acc);
2665 DISAS_INSN(macsr_to_ccr)
2667 gen_op_mov32(QREG_CC_X, gen_im32(0));
2668 gen_op_and32(QREG_CC_DEST, QREG_MACSR, gen_im32(0xf));
2669 s->cc_op = CC_OP_FLAGS;
2676 acc = (insn >>9) & 3;
2677 SRC_EA(val, OS_LONG, 0, NULL);
2678 if (s->env->macsr & MACSR_FI) {
2679 gen_op_set_macf(val, acc);
2680 } else if (s->env->macsr & MACSR_SU) {
2681 gen_op_set_macs(val, acc);
2683 gen_op_set_macu(val, acc);
2685 gen_op_mac_clear_flags();
2686 gen_op_mac_set_flags(acc);
2689 DISAS_INSN(to_macsr)
2692 SRC_EA(val, OS_LONG, 0, NULL);
2693 gen_op_set_macsr(val);
2700 SRC_EA(val, OS_LONG, 0, NULL);
2701 gen_op_or32(QREG_MAC_MASK, val, gen_im32(0xffff0000));
2708 SRC_EA(val, OS_LONG, 0, NULL);
2709 acc = (insn & 0x400) ? 2 : 0;
2710 if (s->env->macsr & MACSR_FI)
2711 gen_op_set_mac_extf(val, acc);
2712 else if (s->env->macsr & MACSR_SU)
2713 gen_op_set_mac_exts(val, acc);
2715 gen_op_set_mac_extu(val, acc);
2718 static disas_proc opcode_table[65536];
2721 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
2727 /* Sanity check. All set bits must be included in the mask. */
2728 if (opcode & ~mask) {
2730 "qemu internal error: bogus opcode definition %04x/%04x\n",
2734 /* This could probably be cleverer. For now just optimize the case where
2735 the top bits are known. */
2736 /* Find the first zero bit in the mask. */
2738 while ((i & mask) != 0)
2740 /* Iterate over all combinations of this and lower bits. */
2745 from = opcode & ~(i - 1);
2747 for (i = from; i < to; i++) {
2748 if ((i & mask) == opcode)
2749 opcode_table[i] = proc;
2753 /* Register m68k opcode handlers. Order is important.
2754 Later insn override earlier ones. */
2755 void register_m68k_insns (CPUM68KState *env)
2757 #define INSN(name, opcode, mask, feature) do { \
2758 if (m68k_feature(env, M68K_FEATURE_##feature)) \
2759 register_opcode(disas_##name, 0x##opcode, 0x##mask); \
2761 INSN(undef, 0000, 0000, CF_ISA_A);
2762 INSN(arith_im, 0080, fff8, CF_ISA_A);
2763 INSN(bitrev, 00c0, fff8, CF_ISA_APLUSC);
2764 INSN(bitop_reg, 0100, f1c0, CF_ISA_A);
2765 INSN(bitop_reg, 0140, f1c0, CF_ISA_A);
2766 INSN(bitop_reg, 0180, f1c0, CF_ISA_A);
2767 INSN(bitop_reg, 01c0, f1c0, CF_ISA_A);
2768 INSN(arith_im, 0280, fff8, CF_ISA_A);
2769 INSN(byterev, 02c0, fff8, CF_ISA_APLUSC);
2770 INSN(arith_im, 0480, fff8, CF_ISA_A);
2771 INSN(ff1, 04c0, fff8, CF_ISA_APLUSC);
2772 INSN(arith_im, 0680, fff8, CF_ISA_A);
2773 INSN(bitop_im, 0800, ffc0, CF_ISA_A);
2774 INSN(bitop_im, 0840, ffc0, CF_ISA_A);
2775 INSN(bitop_im, 0880, ffc0, CF_ISA_A);
2776 INSN(bitop_im, 08c0, ffc0, CF_ISA_A);
2777 INSN(arith_im, 0a80, fff8, CF_ISA_A);
2778 INSN(arith_im, 0c00, ff38, CF_ISA_A);
2779 INSN(move, 1000, f000, CF_ISA_A);
2780 INSN(move, 2000, f000, CF_ISA_A);
2781 INSN(move, 3000, f000, CF_ISA_A);
2782 INSN(strldsr, 40e7, ffff, CF_ISA_APLUSC);
2783 INSN(negx, 4080, fff8, CF_ISA_A);
2784 INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
2785 INSN(lea, 41c0, f1c0, CF_ISA_A);
2786 INSN(clr, 4200, ff00, CF_ISA_A);
2787 INSN(undef, 42c0, ffc0, CF_ISA_A);
2788 INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
2789 INSN(neg, 4480, fff8, CF_ISA_A);
2790 INSN(move_to_ccr, 44c0, ffc0, CF_ISA_A);
2791 INSN(not, 4680, fff8, CF_ISA_A);
2792 INSN(move_to_sr, 46c0, ffc0, CF_ISA_A);
2793 INSN(pea, 4840, ffc0, CF_ISA_A);
2794 INSN(swap, 4840, fff8, CF_ISA_A);
2795 INSN(movem, 48c0, fbc0, CF_ISA_A);
2796 INSN(ext, 4880, fff8, CF_ISA_A);
2797 INSN(ext, 48c0, fff8, CF_ISA_A);
2798 INSN(ext, 49c0, fff8, CF_ISA_A);
2799 INSN(tst, 4a00, ff00, CF_ISA_A);
2800 INSN(tas, 4ac0, ffc0, CF_ISA_B);
2801 INSN(halt, 4ac8, ffff, CF_ISA_A);
2802 INSN(pulse, 4acc, ffff, CF_ISA_A);
2803 INSN(illegal, 4afc, ffff, CF_ISA_A);
2804 INSN(mull, 4c00, ffc0, CF_ISA_A);
2805 INSN(divl, 4c40, ffc0, CF_ISA_A);
2806 INSN(sats, 4c80, fff8, CF_ISA_B);
2807 INSN(trap, 4e40, fff0, CF_ISA_A);
2808 INSN(link, 4e50, fff8, CF_ISA_A);
2809 INSN(unlk, 4e58, fff8, CF_ISA_A);
2810 INSN(move_to_usp, 4e60, fff8, USP);
2811 INSN(move_from_usp, 4e68, fff8, USP);
2812 INSN(nop, 4e71, ffff, CF_ISA_A);
2813 INSN(stop, 4e72, ffff, CF_ISA_A);
2814 INSN(rte, 4e73, ffff, CF_ISA_A);
2815 INSN(rts, 4e75, ffff, CF_ISA_A);
2816 INSN(movec, 4e7b, ffff, CF_ISA_A);
2817 INSN(jump, 4e80, ffc0, CF_ISA_A);
2818 INSN(jump, 4ec0, ffc0, CF_ISA_A);
2819 INSN(addsubq, 5180, f1c0, CF_ISA_A);
2820 INSN(scc, 50c0, f0f8, CF_ISA_A);
2821 INSN(addsubq, 5080, f1c0, CF_ISA_A);
2822 INSN(tpf, 51f8, fff8, CF_ISA_A);
2824 /* Branch instructions. */
2825 INSN(branch, 6000, f000, CF_ISA_A);
2826 /* Disable long branch instructions, then add back the ones we want. */
2827 INSN(undef, 60ff, f0ff, CF_ISA_A); /* All long branches. */
2828 INSN(branch, 60ff, f0ff, CF_ISA_B);
2829 INSN(undef, 60ff, ffff, CF_ISA_B); /* bra.l */
2830 INSN(branch, 60ff, ffff, BRAL);
2832 INSN(moveq, 7000, f100, CF_ISA_A);
2833 INSN(mvzs, 7100, f100, CF_ISA_B);
2834 INSN(or, 8000, f000, CF_ISA_A);
2835 INSN(divw, 80c0, f0c0, CF_ISA_A);
2836 INSN(addsub, 9000, f000, CF_ISA_A);
2837 INSN(subx, 9180, f1f8, CF_ISA_A);
2838 INSN(suba, 91c0, f1c0, CF_ISA_A);
2840 INSN(undef_mac, a000, f000, CF_ISA_A);
2841 INSN(mac, a000, f100, CF_EMAC);
2842 INSN(from_mac, a180, f9b0, CF_EMAC);
2843 INSN(move_mac, a110, f9fc, CF_EMAC);
2844 INSN(from_macsr,a980, f9f0, CF_EMAC);
2845 INSN(from_mask, ad80, fff0, CF_EMAC);
2846 INSN(from_mext, ab80, fbf0, CF_EMAC);
2847 INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
2848 INSN(to_mac, a100, f9c0, CF_EMAC);
2849 INSN(to_macsr, a900, ffc0, CF_EMAC);
2850 INSN(to_mext, ab00, fbc0, CF_EMAC);
2851 INSN(to_mask, ad00, ffc0, CF_EMAC);
2853 INSN(mov3q, a140, f1c0, CF_ISA_B);
2854 INSN(cmp, b000, f1c0, CF_ISA_B); /* cmp.b */
2855 INSN(cmp, b040, f1c0, CF_ISA_B); /* cmp.w */
2856 INSN(cmpa, b0c0, f1c0, CF_ISA_B); /* cmpa.w */
2857 INSN(cmp, b080, f1c0, CF_ISA_A);
2858 INSN(cmpa, b1c0, f1c0, CF_ISA_A);
2859 INSN(eor, b180, f1c0, CF_ISA_A);
2860 INSN(and, c000, f000, CF_ISA_A);
2861 INSN(mulw, c0c0, f0c0, CF_ISA_A);
2862 INSN(addsub, d000, f000, CF_ISA_A);
2863 INSN(addx, d180, f1f8, CF_ISA_A);
2864 INSN(adda, d1c0, f1c0, CF_ISA_A);
2865 INSN(shift_im, e080, f0f0, CF_ISA_A);
2866 INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
2867 INSN(undef_fpu, f000, f000, CF_ISA_A);
2868 INSN(fpu, f200, ffc0, CF_FPU);
2869 INSN(fbcc, f280, ffc0, CF_FPU);
2870 INSN(frestore, f340, ffc0, CF_FPU);
2871 INSN(fsave, f340, ffc0, CF_FPU);
2872 INSN(intouch, f340, ffc0, CF_ISA_A);
2873 INSN(cpushl, f428, ff38, CF_ISA_A);
2874 INSN(wddata, fb00, ff00, CF_ISA_A);
2875 INSN(wdebug, fbc0, ffc0, CF_ISA_A);
2879 /* ??? Some of this implementation is not exception safe. We should always
2880 write back the result to memory before setting the condition codes. */
2881 static void disas_m68k_insn(CPUState * env, DisasContext *s)
2885 insn = lduw_code(s->pc);
2888 opcode_table[insn](s, insn);
2892 /* Save the result of a floating point operation. */
2893 static void expand_op_fp_result(qOP *qop)
2895 gen_op_movf64(QREG_FP_RESULT, qop->args[0]);
2898 /* Dummy op to indicate that the flags have been set. */
2899 static void expand_op_flags_set(qOP *qop)
2903 /* Convert the confition codes into CC_OP_FLAGS format. */
2904 static void expand_op_flush_flags(qOP *qop)
2908 if (qop->args[0] == CC_OP_DYNAMIC)
2909 cc_opreg = QREG_CC_OP;
2911 cc_opreg = gen_im32(qop->args[0]);
2912 gen_op_helper32(QREG_NULL, cc_opreg, HELPER_flush_flags);
2915 /* Set CC_DEST after a logical or direct flag setting operation. */
2916 static void expand_op_logic_cc(qOP *qop)
2918 gen_op_mov32(QREG_CC_DEST, qop->args[0]);
2921 /* Set CC_SRC and CC_DEST after an arithmetic operation. */
2922 static void expand_op_update_cc_add(qOP *qop)
2924 gen_op_mov32(QREG_CC_DEST, qop->args[0]);
2925 gen_op_mov32(QREG_CC_SRC, qop->args[1]);
2928 /* Update the X flag. */
2929 static void expand_op_update_xflag(qOP *qop)
2934 arg0 = qop->args[0];
2935 arg1 = qop->args[1];
2936 if (arg1 == QREG_NULL) {
2938 gen_op_mov32(QREG_CC_X, arg0);
2940 /* CC_X = arg0 < (unsigned)arg1. */
2941 gen_op_set_ltu32(QREG_CC_X, arg0, arg1);
2945 /* Set arg0 to the contents of the X flag. */
2946 static void expand_op_get_xflag(qOP *qop)
2948 gen_op_mov32(qop->args[0], QREG_CC_X);
2951 /* Expand a shift by immediate. The ISA only allows shifts by 1-8, so we
2952 already know the shift is within range. */
2953 static inline void expand_shift_im(qOP *qop, int right, int arith)
2963 val = gen_new_qreg(QMODE_I32);
2964 gen_op_mov32(val, reg);
2965 gen_op_mov32(QREG_CC_DEST, val);
2966 gen_op_mov32(QREG_CC_SRC, tmp);
2969 gen_op_sar32(reg, val, tmp);
2971 gen_op_shr32(reg, val, tmp);
2976 tmp = gen_im32(im - 1);
2978 gen_op_shl32(reg, val, tmp);
2979 tmp = gen_im32(32 - im);
2981 if (tmp != QREG_NULL)
2982 gen_op_shr32(val, val, tmp);
2983 gen_op_and32(QREG_CC_X, val, gen_im32(1));
2986 static void expand_op_shl_im_cc(qOP *qop)
2988 expand_shift_im(qop, 0, 0);
2991 static void expand_op_shr_im_cc(qOP *qop)
2993 expand_shift_im(qop, 1, 0);
2996 static void expand_op_sar_im_cc(qOP *qop)
2998 expand_shift_im(qop, 1, 1);
3001 /* Expand a shift by register. */
3002 /* ??? This gives incorrect answers for shifts by 0 or >= 32 */
3003 static inline void expand_shift_reg(qOP *qop, int right, int arith)
3011 shift = qop->args[1];
3012 val = gen_new_qreg(QMODE_I32);
3013 gen_op_mov32(val, reg);
3014 gen_op_mov32(QREG_CC_DEST, val);
3015 gen_op_mov32(QREG_CC_SRC, shift);
3016 tmp = gen_new_qreg(QMODE_I32);
3019 gen_op_sar32(reg, val, shift);
3021 gen_op_shr32(reg, val, shift);
3023 gen_op_sub32(tmp, shift, gen_im32(1));
3025 gen_op_shl32(reg, val, shift);
3026 gen_op_sub32(tmp, gen_im32(31), shift);
3028 gen_op_shl32(val, val, tmp);
3029 gen_op_and32(QREG_CC_X, val, gen_im32(1));
3032 static void expand_op_shl_cc(qOP *qop)
3034 expand_shift_reg(qop, 0, 0);
3037 static void expand_op_shr_cc(qOP *qop)
3039 expand_shift_reg(qop, 1, 0);
3042 static void expand_op_sar_cc(qOP *qop)
3044 expand_shift_reg(qop, 1, 1);
3047 /* Set the Z flag to (arg0 & arg1) == 0. */
3048 static void expand_op_btest(qOP *qop)
3053 l1 = gen_new_label();
3054 tmp = gen_new_qreg(QMODE_I32);
3055 gen_op_and32(tmp, qop->args[0], qop->args[1]);
3056 gen_op_and32(QREG_CC_DEST, QREG_CC_DEST, gen_im32(~(uint32_t)CCF_Z));
3057 gen_op_jmp_nz32(tmp, l1);
3058 gen_op_or32(QREG_CC_DEST, QREG_CC_DEST, gen_im32(CCF_Z));
3062 /* arg0 += arg1 + CC_X */
3063 static void expand_op_addx_cc(qOP *qop)
3065 int arg0 = qop->args[0];
3066 int arg1 = qop->args[1];
3069 gen_op_add32 (arg0, arg0, arg1);
3070 l1 = gen_new_label();
3071 l2 = gen_new_label();
3072 gen_op_jmp_z32(QREG_CC_X, l1);
3073 gen_op_add32(arg0, arg0, gen_im32(1));
3074 gen_op_mov32(QREG_CC_OP, gen_im32(CC_OP_ADDX));
3075 gen_op_set_leu32(QREG_CC_X, arg0, arg1);
3078 gen_op_mov32(QREG_CC_OP, gen_im32(CC_OP_ADD));
3079 gen_op_set_ltu32(QREG_CC_X, arg0, arg1);
3083 /* arg0 -= arg1 + CC_X */
3084 static void expand_op_subx_cc(qOP *qop)
3086 int arg0 = qop->args[0];
3087 int arg1 = qop->args[1];
3090 l1 = gen_new_label();
3091 l2 = gen_new_label();
3092 gen_op_jmp_z32(QREG_CC_X, l1);
3093 gen_op_set_leu32(QREG_CC_X, arg0, arg1);
3094 gen_op_sub32(arg0, arg0, gen_im32(1));
3095 gen_op_mov32(QREG_CC_OP, gen_im32(CC_OP_SUBX));
3098 gen_op_set_ltu32(QREG_CC_X, arg0, arg1);
3099 gen_op_mov32(QREG_CC_OP, gen_im32(CC_OP_SUB));
3101 gen_op_sub32 (arg0, arg0, arg1);
3104 /* Expand target specific ops to generic qops. */
3105 static void expand_target_qops(void)
3111 /* Copy the list of qops, expanding target specific ops as we go. */
3112 qop = gen_first_qop;
3113 gen_first_qop = NULL;
3114 gen_last_qop = NULL;
3115 for (; qop; qop = next) {
3118 if (c < FIRST_TARGET_OP) {
3119 qop->prev = gen_last_qop;
3122 gen_last_qop->next = qop;
3124 gen_first_qop = qop;
3129 #define DEF(name, nargs, barrier) \
3130 case INDEX_op_##name: \
3131 expand_op_##name(qop); \
3133 #include "qop-target.def"
3136 cpu_abort(NULL, "Unexpanded target qop");
3141 /* ??? Implement this. */
3143 optimize_flags(void)
3148 /* generate intermediate code for basic block 'tb'. */
3150 gen_intermediate_code_internal(CPUState *env, TranslationBlock *tb,
3153 DisasContext dc1, *dc = &dc1;
3154 uint16_t *gen_opc_end;
3156 target_ulong pc_start;
3160 /* generate intermediate code */
3165 gen_opc_ptr = gen_opc_buf;
3166 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
3167 gen_opparam_ptr = gen_opparam_buf;
3170 dc->is_jmp = DISAS_NEXT;
3172 dc->cc_op = CC_OP_DYNAMIC;
3173 dc->singlestep_enabled = env->singlestep_enabled;
3174 dc->fpcr = env->fpcr;
3175 dc->user = (env->sr & SR_S) == 0;
3181 pc_offset = dc->pc - pc_start;
3182 gen_throws_exception = NULL;
3183 if (env->nb_breakpoints > 0) {
3184 for(j = 0; j < env->nb_breakpoints; j++) {
3185 if (env->breakpoints[j] == dc->pc) {
3186 gen_exception(dc, dc->pc, EXCP_DEBUG);
3187 dc->is_jmp = DISAS_JUMP;
3195 j = gen_opc_ptr - gen_opc_buf;
3199 gen_opc_instr_start[lj++] = 0;
3201 gen_opc_pc[lj] = dc->pc;
3202 gen_opc_instr_start[lj] = 1;
3204 last_cc_op = dc->cc_op;
3205 dc->insn_pc = dc->pc;
3206 disas_m68k_insn(env, dc);
3208 /* Terminate the TB on memory ops if watchpoints are present. */
3209 /* FIXME: This should be replacd by the deterministic execution
3210 * IRQ raising bits. */
3211 if (dc->is_mem && env->nb_watchpoints)
3213 } while (!dc->is_jmp && gen_opc_ptr < gen_opc_end &&
3214 !env->singlestep_enabled &&
3215 (pc_offset) < (TARGET_PAGE_SIZE - 32));
3217 if (__builtin_expect(env->singlestep_enabled, 0)) {
3218 /* Make sure the pc is updated, and raise a debug exception. */
3220 gen_flush_cc_op(dc);
3221 gen_op_mov32(QREG_PC, gen_im32((long)dc->pc));
3223 gen_op_raise_exception(EXCP_DEBUG);
3225 switch(dc->is_jmp) {
3227 gen_flush_cc_op(dc);
3228 gen_jmp_tb(dc, 0, dc->pc);
3233 gen_flush_cc_op(dc);
3234 /* indicate that the hash table must be used to find the next TB */
3235 gen_op_mov32(QREG_T0, gen_im32(0));
3239 /* nothing more to generate */
3243 *gen_opc_ptr = INDEX_op_end;
3246 if (loglevel & CPU_LOG_TB_IN_ASM) {
3247 fprintf(logfile, "----------------\n");
3248 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
3249 target_disas(logfile, pc_start, dc->pc - pc_start, 0);
3250 fprintf(logfile, "\n");
3251 if (loglevel & (CPU_LOG_TB_OP)) {
3252 fprintf(logfile, "OP:\n");
3253 dump_ops(gen_opc_buf, gen_opparam_buf);
3254 fprintf(logfile, "\n");
3259 j = gen_opc_ptr - gen_opc_buf;
3262 gen_opc_instr_start[lj++] = 0;
3264 tb->size = dc->pc - pc_start;
3268 //expand_target_qops();
3272 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
3274 return gen_intermediate_code_internal(env, tb, 0);
3277 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
3279 return gen_intermediate_code_internal(env, tb, 1);
3282 void cpu_reset(CPUM68KState *env)
3284 memset(env, 0, offsetof(CPUM68KState, breakpoints));
3285 #if !defined (CONFIG_USER_ONLY)
3288 m68k_switch_sp(env);
3289 /* ??? FP regs should be initialized to NaN. */
3290 env->cc_op = CC_OP_FLAGS;
3291 /* TODO: We should set PC from the interrupt vector. */
3296 CPUM68KState *cpu_m68k_init(void)
3300 env = malloc(sizeof(CPUM68KState));
3309 void cpu_m68k_close(CPUM68KState *env)
3314 void cpu_dump_state(CPUState *env, FILE *f,
3315 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
3321 for (i = 0; i < 8; i++)
3323 u.d = env->fregs[i];
3324 cpu_fprintf (f, "D%d = %08x A%d = %08x F%d = %08x%08x (%12g)\n",
3325 i, env->dregs[i], i, env->aregs[i],
3326 i, u.l.upper, u.l.lower, u.d);
3328 cpu_fprintf (f, "PC = %08x ", env->pc);
3330 cpu_fprintf (f, "SR = %04x %c%c%c%c%c ", sr, (sr & 0x10) ? 'X' : '-',
3331 (sr & CCF_N) ? 'N' : '-', (sr & CCF_Z) ? 'Z' : '-',
3332 (sr & CCF_V) ? 'V' : '-', (sr & CCF_C) ? 'C' : '-');
3333 cpu_fprintf (f, "FPRESULT = %12g\n", env->fp_result);