dead_tmp(var);
}
+/* Variant of store_reg which uses branch&exchange logic when storing
+ to r15 in ARM architecture v7 and above. The source must be a temporary
+ and will be marked as dead. */
+#define store_reg_bx(dc, reg, var) \
+ if (ENABLE_ARCH_7 && reg == 15) \
+ gen_bx(dc, var); \
+ else \
+ store_reg(dc, reg, var);
/* Basic operations. */
#define gen_op_movl_T0_T1() tcg_gen_mov_i32(cpu_T[0], cpu_T[1])
dead_tmp(tmp);
}
+/* dest = T0 + T1 + CF. */
+static void gen_add_carry(TCGv dest, TCGv t0, TCGv t1)
+{
+ TCGv tmp;
+ tcg_gen_add_i32(dest, t0, t1);
+ tmp = load_cpu_field(CF);
+ tcg_gen_add_i32(dest, dest, tmp);
+ dead_tmp(tmp);
+}
+
/* dest = T0 - T1 + CF - 1. */
static void gen_sub_carry(TCGv dest, TCGv t0, TCGv t1)
{
return 0;
}
-/* Generate an old-style exception return. */
-static void gen_exception_return(DisasContext *s)
+/* Generate an old-style exception return. Marks pc as dead. */
+static void gen_exception_return(DisasContext *s, TCGv pc)
{
TCGv tmp;
- gen_movl_reg_T0(s, 15);
+ store_reg(s, 15, pc);
tmp = load_cpu_field(spsr);
gen_set_cpsr(tmp, 0xffffffff);
dead_tmp(tmp);
shift = ((insn >> 8) & 0xf) * 2;
if (shift)
val = (val >> shift) | (val << (32 - shift));
- gen_op_movl_T1_im(val);
+ tmp2 = new_tmp();
+ tcg_gen_movi_i32(tmp2, val);
if (logic_cc && shift)
- gen_set_CF_bit31(cpu_T[1]);
+ gen_set_CF_bit31(tmp2);
} else {
/* register */
rm = (insn) & 0xf;
- gen_movl_T1_reg(s, rm);
+ tmp2 = load_reg(s, rm);
shiftop = (insn >> 5) & 3;
if (!(insn & (1 << 4))) {
shift = (insn >> 7) & 0x1f;
- gen_arm_shift_im(cpu_T[1], shiftop, shift, logic_cc);
+ gen_arm_shift_im(tmp2, shiftop, shift, logic_cc);
} else {
rs = (insn >> 8) & 0xf;
tmp = load_reg(s, rs);
- gen_arm_shift_reg(cpu_T[1], shiftop, tmp, logic_cc);
+ gen_arm_shift_reg(tmp2, shiftop, tmp, logic_cc);
}
}
if (op1 != 0x0f && op1 != 0x0d) {
rn = (insn >> 16) & 0xf;
- gen_movl_T0_reg(s, rn);
- }
+ gen_movl_T0_reg(s, rn); tmp = load_reg(s, rn);
+ } else
+ tmp = new_tmp();
rd = (insn >> 12) & 0xf;
switch(op1) {
case 0x00:
- gen_op_andl_T0_T1();
- gen_movl_reg_T0(s, rd);
+ tcg_gen_and_i32(tmp, tmp, tmp2);
if (logic_cc)
- gen_op_logic_T0_cc();
+ gen_logic_CC(tmp);
+ store_reg_bx(s, rd, tmp);
break;
case 0x01:
- gen_op_xorl_T0_T1();
- gen_movl_reg_T0(s, rd);
+ tcg_gen_xor_i32(tmp, tmp, tmp2);
if (logic_cc)
- gen_op_logic_T0_cc();
+ gen_logic_CC(tmp);
+ store_reg_bx(s, rd, tmp);
break;
case 0x02:
if (set_cc && rd == 15) {
/* SUBS r15, ... is used for exception return. */
if (IS_USER(s))
goto illegal_op;
- gen_op_subl_T0_T1_cc();
- gen_exception_return(s);
+ gen_helper_sub_cc(tmp, tmp, tmp2);
+ gen_exception_return(s, tmp);
} else {
if (set_cc)
- gen_op_subl_T0_T1_cc();
+ gen_helper_sub_cc(tmp, tmp, tmp2);
else
- gen_op_subl_T0_T1();
- gen_movl_reg_T0(s, rd);
+ tcg_gen_sub_i32(tmp, tmp, tmp2);
+ store_reg_bx(s, rd, tmp);
}
break;
case 0x03:
if (set_cc)
- gen_op_rsbl_T0_T1_cc();
+ gen_helper_sub_cc(tmp, tmp2, tmp);
else
- gen_op_rsbl_T0_T1();
- gen_movl_reg_T0(s, rd);
+ tcg_gen_sub_i32(tmp, tmp2, tmp);
+ store_reg_bx(s, rd, tmp);
break;
case 0x04:
if (set_cc)
- gen_op_addl_T0_T1_cc();
+ gen_helper_add_cc(tmp, tmp, tmp2);
else
- gen_op_addl_T0_T1();
- gen_movl_reg_T0(s, rd);
+ tcg_gen_add_i32(tmp, tmp, tmp2);
+ store_reg_bx(s, rd, tmp);
break;
case 0x05:
if (set_cc)
- gen_op_adcl_T0_T1_cc();
+ gen_helper_adc_cc(tmp, tmp, tmp2);
else
- gen_adc_T0_T1();
- gen_movl_reg_T0(s, rd);
+ gen_add_carry(tmp, tmp, tmp2);
+ store_reg_bx(s, rd, tmp);
break;
case 0x06:
if (set_cc)
- gen_op_sbcl_T0_T1_cc();
+ gen_helper_sbc_cc(tmp, tmp, tmp2);
else
- gen_sbc_T0_T1();
- gen_movl_reg_T0(s, rd);
+ gen_sub_carry(tmp, tmp, tmp2);
+ store_reg_bx(s, rd, tmp);
break;
case 0x07:
if (set_cc)
- gen_op_rscl_T0_T1_cc();
+ gen_helper_sbc_cc(tmp, tmp2, tmp);
else
- gen_rsc_T0_T1();
- gen_movl_reg_T0(s, rd);
+ gen_sub_carry(tmp, tmp2, tmp);
+ store_reg_bx(s, rd, tmp);
break;
case 0x08:
if (set_cc) {
- gen_op_andl_T0_T1();
- gen_op_logic_T0_cc();
+ tcg_gen_and_i32(tmp, tmp, tmp2);
+ gen_logic_CC(tmp);
}
+ dead_tmp(tmp);
break;
case 0x09:
if (set_cc) {
- gen_op_xorl_T0_T1();
- gen_op_logic_T0_cc();
+ tcg_gen_xor_i32(tmp, tmp, tmp2);
+ gen_logic_CC(tmp);
}
+ dead_tmp(tmp);
break;
case 0x0a:
- if (set_cc) {
- gen_op_subl_T0_T1_cc();
- }
+ if (set_cc)
+ gen_helper_sub_cc(tmp, tmp, tmp2);
+ dead_tmp(tmp);
break;
case 0x0b:
- if (set_cc) {
- gen_op_addl_T0_T1_cc();
- }
+ if (set_cc)
+ gen_helper_add_cc(tmp, tmp, tmp2);
+ dead_tmp(tmp);
break;
case 0x0c:
- gen_op_orl_T0_T1();
- gen_movl_reg_T0(s, rd);
+ tcg_gen_or_i32(tmp, tmp, tmp2);
if (logic_cc)
- gen_op_logic_T0_cc();
+ gen_logic_CC(tmp);
+ store_reg_bx(s, rd, tmp);
break;
case 0x0d:
- if (rd == 15) {
- if (logic_cc) {
- /* MOVS r15, ... is used for exception return. */
- if (IS_USER(s))
- goto illegal_op;
- gen_op_movl_T0_T1();
- gen_exception_return(s);
- break;
- } else if (ENABLE_ARCH_7) {
- tmp = new_tmp();
- tcg_gen_mov_i32(tmp, cpu_T[1]);
- gen_bx(s, tmp);
- break;
- }
+ tcg_gen_mov_i32(tmp, tmp2);
+ if (logic_cc && rd == 15) {
+ /* MOVS r15, ... is used for exception return. */
+ if (IS_USER(s))
+ goto illegal_op;
+ gen_exception_return(s, tmp);
+ } else {
+ store_reg_bx(s, rd, tmp);
+ if (logic_cc)
+ gen_logic_CC(tmp2);
}
- gen_movl_reg_T1(s, rd);
- if (logic_cc)
- gen_op_logic_T1_cc();
break;
case 0x0e:
- gen_op_bicl_T0_T1();
- gen_movl_reg_T0(s, rd);
+ tcg_gen_bic_i32(tmp, tmp, tmp2);
if (logic_cc)
- gen_op_logic_T0_cc();
+ gen_logic_CC(tmp);
+ store_reg_bx(s, rd, tmp);
break;
default:
case 0x0f:
- gen_op_notl_T1();
- gen_movl_reg_T1(s, rd);
+ tcg_gen_not_i32(tmp, tmp2);
if (logic_cc)
- gen_op_logic_T1_cc();
+ gen_logic_CC(tmp);
+ store_reg_bx(s, rd, tmp);
break;
}
+ dead_tmp(tmp2);
} else {
/* other instructions */
op1 = (insn >> 24) & 0xf;