target-alpha: convert palcode ops to TCG
[qemu] / target-alpha / translate.c
1 /*
2  *  Alpha emulation cpu translation for qemu.
3  *
4  *  Copyright (c) 2007 Jocelyn Mayer
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with this library; if not, write to the Free Software
18  * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19  */
20
21 #include <stdint.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24
25 #include "cpu.h"
26 #include "exec-all.h"
27 #include "disas.h"
28 #include "host-utils.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31 #include "qemu-common.h"
32
33 #define DO_SINGLE_STEP
34 #define ALPHA_DEBUG_DISAS
35 #define DO_TB_FLUSH
36
37 typedef struct DisasContext DisasContext;
38 struct DisasContext {
39     uint64_t pc;
40     int mem_idx;
41 #if !defined (CONFIG_USER_ONLY)
42     int pal_mode;
43 #endif
44     uint32_t amask;
45 };
46
47 /* global register indexes */
48 static TCGv cpu_env;
49 static TCGv cpu_ir[31];
50 static TCGv cpu_fir[31];
51 static TCGv cpu_pc;
52 static TCGv cpu_lock;
53
54 /* register names */
55 static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
56
57 #include "gen-icount.h"
58
59 static void alpha_translate_init(void)
60 {
61     int i;
62     char *p;
63     static int done_init = 0;
64
65     if (done_init)
66         return;
67
68     cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
69
70     p = cpu_reg_names;
71     for (i = 0; i < 31; i++) {
72         sprintf(p, "ir%d", i);
73         cpu_ir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
74                                        offsetof(CPUState, ir[i]), p);
75         p += (i < 10) ? 4 : 5;
76
77         sprintf(p, "fir%d", i);
78         cpu_fir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
79                                         offsetof(CPUState, fir[i]), p);
80         p += (i < 10) ? 5 : 6;
81     }
82
83     cpu_pc = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
84                                 offsetof(CPUState, pc), "pc");
85
86     cpu_lock = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
87                                   offsetof(CPUState, lock), "lock");
88
89     /* register helpers */
90 #undef DEF_HELPER
91 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
92 #include "helper.h"
93
94     done_init = 1;
95 }
96
97 static always_inline void gen_excp (DisasContext *ctx,
98                                     int exception, int error_code)
99 {
100     TCGv tmp1, tmp2;
101
102     tcg_gen_movi_i64(cpu_pc, ctx->pc);
103     tmp1 = tcg_const_i32(exception);
104     tmp2 = tcg_const_i32(error_code);
105     tcg_gen_helper_0_2(helper_excp, tmp1, tmp2);
106     tcg_temp_free(tmp2);
107     tcg_temp_free(tmp1);
108 }
109
110 static always_inline void gen_invalid (DisasContext *ctx)
111 {
112     gen_excp(ctx, EXCP_OPCDEC, 0);
113 }
114
115 static always_inline void gen_qemu_ldf (TCGv t0, TCGv t1, int flags)
116 {
117     TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
118     tcg_gen_qemu_ld32u(tmp, t1, flags);
119     tcg_gen_helper_1_1(helper_memory_to_f, t0, tmp);
120     tcg_temp_free(tmp);
121 }
122
123 static always_inline void gen_qemu_ldg (TCGv t0, TCGv t1, int flags)
124 {
125     TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
126     tcg_gen_qemu_ld64(tmp, t1, flags);
127     tcg_gen_helper_1_1(helper_memory_to_g, t0, tmp);
128     tcg_temp_free(tmp);
129 }
130
131 static always_inline void gen_qemu_lds (TCGv t0, TCGv t1, int flags)
132 {
133     TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
134     tcg_gen_qemu_ld32u(tmp, t1, flags);
135     tcg_gen_helper_1_1(helper_memory_to_s, t0, tmp);
136     tcg_temp_free(tmp);
137 }
138
139 static always_inline void gen_qemu_ldl_l (TCGv t0, TCGv t1, int flags)
140 {
141     tcg_gen_mov_i64(cpu_lock, t1);
142     tcg_gen_qemu_ld32s(t0, t1, flags);
143 }
144
145 static always_inline void gen_qemu_ldq_l (TCGv t0, TCGv t1, int flags)
146 {
147     tcg_gen_mov_i64(cpu_lock, t1);
148     tcg_gen_qemu_ld64(t0, t1, flags);
149 }
150
151 static always_inline void gen_load_mem (DisasContext *ctx,
152                                         void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1, int flags),
153                                         int ra, int rb, int32_t disp16,
154                                         int fp, int clear)
155 {
156     TCGv addr;
157
158     if (unlikely(ra == 31))
159         return;
160
161     addr = tcg_temp_new(TCG_TYPE_I64);
162     if (rb != 31) {
163         tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
164         if (clear)
165             tcg_gen_andi_i64(addr, addr, ~0x7);
166     } else {
167         if (clear)
168             disp16 &= ~0x7;
169         tcg_gen_movi_i64(addr, disp16);
170     }
171     if (fp)
172         tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
173     else
174         tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
175     tcg_temp_free(addr);
176 }
177
178 static always_inline void gen_qemu_stf (TCGv t0, TCGv t1, int flags)
179 {
180     TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
181     tcg_gen_helper_1_1(helper_f_to_memory, tmp, t0);
182     tcg_gen_qemu_st32(tmp, t1, flags);
183     tcg_temp_free(tmp);
184 }
185
186 static always_inline void gen_qemu_stg (TCGv t0, TCGv t1, int flags)
187 {
188     TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
189     tcg_gen_helper_1_1(helper_g_to_memory, tmp, t0);
190     tcg_gen_qemu_st64(tmp, t1, flags);
191     tcg_temp_free(tmp);
192 }
193
194 static always_inline void gen_qemu_sts (TCGv t0, TCGv t1, int flags)
195 {
196     TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
197     tcg_gen_helper_1_1(helper_s_to_memory, tmp, t0);
198     tcg_gen_qemu_st32(tmp, t1, flags);
199     tcg_temp_free(tmp);
200 }
201
202 static always_inline void gen_qemu_stl_c (TCGv t0, TCGv t1, int flags)
203 {
204     int l1, l2;
205
206     l1 = gen_new_label();
207     l2 = gen_new_label();
208     tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
209     tcg_gen_qemu_st32(t0, t1, flags);
210     tcg_gen_movi_i64(t0, 0);
211     tcg_gen_br(l2);
212     gen_set_label(l1);
213     tcg_gen_movi_i64(t0, 1);
214     gen_set_label(l2);
215     tcg_gen_movi_i64(cpu_lock, -1);
216 }
217
218 static always_inline void gen_qemu_stq_c (TCGv t0, TCGv t1, int flags)
219 {
220     int l1, l2;
221
222     l1 = gen_new_label();
223     l2 = gen_new_label();
224     tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
225     tcg_gen_qemu_st64(t0, t1, flags);
226     tcg_gen_movi_i64(t0, 0);
227     tcg_gen_br(l2);
228     gen_set_label(l1);
229     tcg_gen_movi_i64(t0, 1);
230     gen_set_label(l2);
231     tcg_gen_movi_i64(cpu_lock, -1);
232 }
233
234 static always_inline void gen_store_mem (DisasContext *ctx,
235                                          void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1, int flags),
236                                          int ra, int rb, int32_t disp16,
237                                          int fp, int clear)
238 {
239     TCGv addr = tcg_temp_new(TCG_TYPE_I64);
240     if (rb != 31) {
241         tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
242         if (clear)
243             tcg_gen_andi_i64(addr, addr, ~0x7);
244     } else {
245         if (clear)
246             disp16 &= ~0x7;
247         tcg_gen_movi_i64(addr, disp16);
248     }
249     if (ra != 31) {
250         if (fp)
251             tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
252         else
253             tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
254     } else {
255         TCGv zero = tcg_const_i64(0);
256         tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
257         tcg_temp_free(zero);
258     }
259     tcg_temp_free(addr);
260 }
261
262 static always_inline void gen_bcond (DisasContext *ctx,
263                                      TCGCond cond,
264                                      int ra, int32_t disp16, int mask)
265 {
266     int l1, l2;
267
268     l1 = gen_new_label();
269     l2 = gen_new_label();
270     if (likely(ra != 31)) {
271         if (mask) {
272             TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
273             tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
274             tcg_gen_brcondi_i64(cond, tmp, 0, l1);
275             tcg_temp_free(tmp);
276         } else
277             tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
278     } else {
279         /* Very uncommon case - Do not bother to optimize.  */
280         TCGv tmp = tcg_const_i64(0);
281         tcg_gen_brcondi_i64(cond, tmp, 0, l1);
282         tcg_temp_free(tmp);
283     }
284     tcg_gen_movi_i64(cpu_pc, ctx->pc);
285     tcg_gen_br(l2);
286     gen_set_label(l1);
287     tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
288     gen_set_label(l2);
289 }
290
291 static always_inline void gen_fbcond (DisasContext *ctx,
292                                       void* func,
293                                       int ra, int32_t disp16)
294 {
295     int l1, l2;
296     TCGv tmp;
297
298     l1 = gen_new_label();
299     l2 = gen_new_label();
300     if (ra != 31) {
301         tmp = tcg_temp_new(TCG_TYPE_I64);
302         tcg_gen_helper_1_1(func, tmp, cpu_fir[ra]);
303     } else  {
304         tmp = tcg_const_i64(0);
305         tcg_gen_helper_1_1(func, tmp, tmp);
306     }
307     tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
308     tcg_gen_movi_i64(cpu_pc, ctx->pc);
309     tcg_gen_br(l2);
310     gen_set_label(l1);
311     tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
312     gen_set_label(l2);
313 }
314
315 static always_inline void gen_cmov (TCGCond inv_cond,
316                                     int ra, int rb, int rc,
317                                     int islit, uint8_t lit, int mask)
318 {
319     int l1;
320
321     if (unlikely(rc == 31))
322         return;
323
324     l1 = gen_new_label();
325
326     if (ra != 31) {
327         if (mask) {
328             TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
329             tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
330             tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
331             tcg_temp_free(tmp);
332         } else
333             tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
334     } else {
335         /* Very uncommon case - Do not bother to optimize.  */
336         TCGv tmp = tcg_const_i64(0);
337         tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
338         tcg_temp_free(tmp);
339     }
340
341     if (islit)
342         tcg_gen_movi_i64(cpu_ir[rc], lit);
343     else
344         tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
345     gen_set_label(l1);
346 }
347
348 static always_inline void gen_farith2 (void *helper,
349                                        int rb, int rc)
350 {
351     if (unlikely(rc == 31))
352       return;
353
354     if (rb != 31)
355         tcg_gen_helper_1_1(helper, cpu_fir[rc], cpu_fir[rb]);
356     else {
357         TCGv tmp = tcg_const_i64(0);
358         tcg_gen_helper_1_1(helper, cpu_fir[rc], tmp);
359         tcg_temp_free(tmp);
360     }
361 }
362
363 static always_inline void gen_farith3 (void *helper,
364                                        int ra, int rb, int rc)
365 {
366     if (unlikely(rc == 31))
367         return;
368
369     if (ra != 31) {
370         if (rb != 31)
371             tcg_gen_helper_1_2(helper, cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]);
372         else {
373             TCGv tmp = tcg_const_i64(0);
374             tcg_gen_helper_1_2(helper, cpu_fir[rc], cpu_fir[ra], tmp);
375             tcg_temp_free(tmp);
376         }
377     } else {
378         TCGv tmp = tcg_const_i64(0);
379         if (rb != 31)
380             tcg_gen_helper_1_2(helper, cpu_fir[rc], tmp, cpu_fir[rb]);
381         else
382             tcg_gen_helper_1_2(helper, cpu_fir[rc], tmp, tmp);
383         tcg_temp_free(tmp);
384     }
385 }
386
387 static always_inline void gen_fcmov (void *func,
388                                      int ra, int rb, int rc)
389 {
390     int l1;
391     TCGv tmp;
392
393     if (unlikely(rc == 31))
394         return;
395
396     l1 = gen_new_label();
397     tmp = tcg_temp_new(TCG_TYPE_I64);
398     if (ra != 31) {
399         tmp = tcg_temp_new(TCG_TYPE_I64);
400         tcg_gen_helper_1_1(func, tmp, cpu_fir[ra]);
401     } else  {
402         tmp = tcg_const_i64(0);
403         tcg_gen_helper_1_1(func, tmp, tmp);
404     }
405     tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);
406     if (rb != 31)
407         tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
408     else
409         tcg_gen_movi_i64(cpu_fir[rc], 0);
410     gen_set_label(l1);
411 }
412
413 /* EXTWH, EXTWH, EXTLH, EXTQH */
414 static always_inline void gen_ext_h(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
415                                     int ra, int rb, int rc,
416                                     int islit, uint8_t lit)
417 {
418     if (unlikely(rc == 31))
419         return;
420
421     if (ra != 31) {
422         if (islit) {
423             if (lit != 0)
424                 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
425             else
426                 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
427         } else {
428             TCGv tmp1, tmp2;
429             tmp1 = tcg_temp_new(TCG_TYPE_I64);
430             tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
431             tcg_gen_shli_i64(tmp1, tmp1, 3);
432             tmp2 = tcg_const_i64(64);
433             tcg_gen_sub_i64(tmp1, tmp2, tmp1);
434             tcg_temp_free(tmp2);
435             tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
436             tcg_temp_free(tmp1);
437         }
438         if (tcg_gen_ext_i64)
439             tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
440     } else
441         tcg_gen_movi_i64(cpu_ir[rc], 0);
442 }
443
444 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
445 static always_inline void gen_ext_l(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
446                                     int ra, int rb, int rc,
447                                     int islit, uint8_t lit)
448 {
449     if (unlikely(rc == 31))
450         return;
451
452     if (ra != 31) {
453         if (islit) {
454                 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
455         } else {
456             TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
457             tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
458             tcg_gen_shli_i64(tmp, tmp, 3);
459             tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
460             tcg_temp_free(tmp);
461         }
462         if (tcg_gen_ext_i64)
463             tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
464     } else
465         tcg_gen_movi_i64(cpu_ir[rc], 0);
466 }
467
468 /* Code to call arith3 helpers */
469 static always_inline void gen_arith3 (void *helper,
470                                       int ra, int rb, int rc,
471                                       int islit, uint8_t lit)
472 {
473     if (unlikely(rc == 31))
474         return;
475
476     if (ra != 31) {
477         if (islit) {
478             TCGv tmp = tcg_const_i64(lit);
479             tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], tmp);
480             tcg_temp_free(tmp);
481         } else
482             tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
483     } else {
484         TCGv tmp1 = tcg_const_i64(0);
485         if (islit) {
486             TCGv tmp2 = tcg_const_i64(lit);
487             tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, tmp2);
488             tcg_temp_free(tmp2);
489         } else
490             tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, cpu_ir[rb]);
491         tcg_temp_free(tmp1);
492     }
493 }
494
495 static always_inline void gen_cmp(TCGCond cond,
496                                   int ra, int rb, int rc,
497                                   int islit, uint8_t lit)
498 {
499     int l1, l2;
500     TCGv tmp;
501
502     if (unlikely(rc == 31))
503     return;
504
505     l1 = gen_new_label();
506     l2 = gen_new_label();
507
508     if (ra != 31) {
509         tmp = tcg_temp_new(TCG_TYPE_I64);
510         tcg_gen_mov_i64(tmp, cpu_ir[ra]);
511     } else
512         tmp = tcg_const_i64(0);
513     if (islit)
514         tcg_gen_brcondi_i64(cond, tmp, lit, l1);
515     else
516         tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
517
518     tcg_gen_movi_i64(cpu_ir[rc], 0);
519     tcg_gen_br(l2);
520     gen_set_label(l1);
521     tcg_gen_movi_i64(cpu_ir[rc], 1);
522     gen_set_label(l2);
523 }
524
525 static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
526 {
527     uint32_t palcode;
528     int32_t disp21, disp16, disp12;
529     uint16_t fn11, fn16;
530     uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
531     uint8_t lit;
532     int ret;
533
534     /* Decode all instruction fields */
535     opc = insn >> 26;
536     ra = (insn >> 21) & 0x1F;
537     rb = (insn >> 16) & 0x1F;
538     rc = insn & 0x1F;
539     sbz = (insn >> 13) & 0x07;
540     islit = (insn >> 12) & 1;
541     if (rb == 31 && !islit) {
542         islit = 1;
543         lit = 0;
544     } else
545         lit = (insn >> 13) & 0xFF;
546     palcode = insn & 0x03FFFFFF;
547     disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
548     disp16 = (int16_t)(insn & 0x0000FFFF);
549     disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
550     fn16 = insn & 0x0000FFFF;
551     fn11 = (insn >> 5) & 0x000007FF;
552     fpfn = fn11 & 0x3F;
553     fn7 = (insn >> 5) & 0x0000007F;
554     fn2 = (insn >> 5) & 0x00000003;
555     ret = 0;
556 #if defined ALPHA_DEBUG_DISAS
557     if (logfile != NULL) {
558         fprintf(logfile, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
559                 opc, ra, rb, rc, disp16);
560     }
561 #endif
562     switch (opc) {
563     case 0x00:
564         /* CALL_PAL */
565         if (palcode >= 0x80 && palcode < 0xC0) {
566             /* Unprivileged PAL call */
567             gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x1F) << 6), 0);
568 #if !defined (CONFIG_USER_ONLY)
569         } else if (palcode < 0x40) {
570             /* Privileged PAL code */
571             if (ctx->mem_idx & 1)
572                 goto invalid_opc;
573             else
574                 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x1F) << 6), 0);
575 #endif
576         } else {
577             /* Invalid PAL call */
578             goto invalid_opc;
579         }
580         ret = 3;
581         break;
582     case 0x01:
583         /* OPC01 */
584         goto invalid_opc;
585     case 0x02:
586         /* OPC02 */
587         goto invalid_opc;
588     case 0x03:
589         /* OPC03 */
590         goto invalid_opc;
591     case 0x04:
592         /* OPC04 */
593         goto invalid_opc;
594     case 0x05:
595         /* OPC05 */
596         goto invalid_opc;
597     case 0x06:
598         /* OPC06 */
599         goto invalid_opc;
600     case 0x07:
601         /* OPC07 */
602         goto invalid_opc;
603     case 0x08:
604         /* LDA */
605         if (likely(ra != 31)) {
606             if (rb != 31)
607                 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
608             else
609                 tcg_gen_movi_i64(cpu_ir[ra], disp16);
610         }
611         break;
612     case 0x09:
613         /* LDAH */
614         if (likely(ra != 31)) {
615             if (rb != 31)
616                 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
617             else
618                 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
619         }
620         break;
621     case 0x0A:
622         /* LDBU */
623         if (!(ctx->amask & AMASK_BWX))
624             goto invalid_opc;
625         gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
626         break;
627     case 0x0B:
628         /* LDQ_U */
629         gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
630         break;
631     case 0x0C:
632         /* LDWU */
633         if (!(ctx->amask & AMASK_BWX))
634             goto invalid_opc;
635         gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 1);
636         break;
637     case 0x0D:
638         /* STW */
639         gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
640         break;
641     case 0x0E:
642         /* STB */
643         gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
644         break;
645     case 0x0F:
646         /* STQ_U */
647         gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1);
648         break;
649     case 0x10:
650         switch (fn7) {
651         case 0x00:
652             /* ADDL */
653             if (likely(rc != 31)) {
654                 if (ra != 31) {
655                     if (islit) {
656                         tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
657                         tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
658                     } else {
659                         tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
660                         tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
661                     }
662                 } else {
663                     if (islit)
664                         tcg_gen_movi_i64(cpu_ir[rc], lit);
665                     else
666                         tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
667                 }
668             }
669             break;
670         case 0x02:
671             /* S4ADDL */
672             if (likely(rc != 31)) {
673                 if (ra != 31) {
674                     TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
675                     tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
676                     if (islit)
677                         tcg_gen_addi_i64(tmp, tmp, lit);
678                     else
679                         tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
680                     tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
681                     tcg_temp_free(tmp);
682                 } else {
683                     if (islit)
684                         tcg_gen_movi_i64(cpu_ir[rc], lit);
685                     else
686                         tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
687                 }
688             }
689             break;
690         case 0x09:
691             /* SUBL */
692             if (likely(rc != 31)) {
693                 if (ra != 31) {
694                     if (islit)
695                         tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
696                     else
697                         tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
698                     tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
699                 } else {
700                     if (islit)
701                         tcg_gen_movi_i64(cpu_ir[rc], -lit);
702                     else {
703                         tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
704                         tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
705                 }
706             }
707             break;
708         case 0x0B:
709             /* S4SUBL */
710             if (likely(rc != 31)) {
711                 if (ra != 31) {
712                     TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
713                     tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
714                     if (islit)
715                         tcg_gen_subi_i64(tmp, tmp, lit);
716                     else
717                         tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
718                     tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
719                     tcg_temp_free(tmp);
720                 } else {
721                     if (islit)
722                         tcg_gen_movi_i64(cpu_ir[rc], -lit);
723                     else {
724                         tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
725                         tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
726                     }
727                 }
728             }
729             break;
730         case 0x0F:
731             /* CMPBGE */
732             gen_arith3(helper_cmpbge, ra, rb, rc, islit, lit);
733             break;
734         case 0x12:
735             /* S8ADDL */
736             if (likely(rc != 31)) {
737                 if (ra != 31) {
738                     TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
739                     tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
740                     if (islit)
741                         tcg_gen_addi_i64(tmp, tmp, lit);
742                     else
743                         tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
744                     tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
745                     tcg_temp_free(tmp);
746                 } else {
747                     if (islit)
748                         tcg_gen_movi_i64(cpu_ir[rc], lit);
749                     else
750                         tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
751                 }
752             }
753             break;
754         case 0x1B:
755             /* S8SUBL */
756             if (likely(rc != 31)) {
757                 if (ra != 31) {
758                     TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
759                     tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
760                     if (islit)
761                         tcg_gen_subi_i64(tmp, tmp, lit);
762                     else
763                        tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
764                     tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
765                     tcg_temp_free(tmp);
766                 } else {
767                     if (islit)
768                         tcg_gen_movi_i64(cpu_ir[rc], -lit);
769                     else
770                         tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
771                         tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
772                     }
773                 }
774             }
775             break;
776         case 0x1D:
777             /* CMPULT */
778             gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
779             break;
780         case 0x20:
781             /* ADDQ */
782             if (likely(rc != 31)) {
783                 if (ra != 31) {
784                     if (islit)
785                         tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
786                     else
787                         tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
788                 } else {
789                     if (islit)
790                         tcg_gen_movi_i64(cpu_ir[rc], lit);
791                     else
792                         tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
793                 }
794             }
795             break;
796         case 0x22:
797             /* S4ADDQ */
798             if (likely(rc != 31)) {
799                 if (ra != 31) {
800                     TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
801                     tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
802                     if (islit)
803                         tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
804                     else
805                         tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
806                     tcg_temp_free(tmp);
807                 } else {
808                     if (islit)
809                         tcg_gen_movi_i64(cpu_ir[rc], lit);
810                     else
811                         tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
812                 }
813             }
814             break;
815         case 0x29:
816             /* SUBQ */
817             if (likely(rc != 31)) {
818                 if (ra != 31) {
819                     if (islit)
820                         tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
821                     else
822                         tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
823                 } else {
824                     if (islit)
825                         tcg_gen_movi_i64(cpu_ir[rc], -lit);
826                     else
827                         tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
828                 }
829             }
830             break;
831         case 0x2B:
832             /* S4SUBQ */
833             if (likely(rc != 31)) {
834                 if (ra != 31) {
835                     TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
836                     tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
837                     if (islit)
838                         tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
839                     else
840                         tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
841                     tcg_temp_free(tmp);
842                 } else {
843                     if (islit)
844                         tcg_gen_movi_i64(cpu_ir[rc], -lit);
845                     else
846                         tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
847                 }
848             }
849             break;
850         case 0x2D:
851             /* CMPEQ */
852             gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
853             break;
854         case 0x32:
855             /* S8ADDQ */
856             if (likely(rc != 31)) {
857                 if (ra != 31) {
858                     TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
859                     tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
860                     if (islit)
861                         tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
862                     else
863                         tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
864                     tcg_temp_free(tmp);
865                 } else {
866                     if (islit)
867                         tcg_gen_movi_i64(cpu_ir[rc], lit);
868                     else
869                         tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
870                 }
871             }
872             break;
873         case 0x3B:
874             /* S8SUBQ */
875             if (likely(rc != 31)) {
876                 if (ra != 31) {
877                     TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
878                     tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
879                     if (islit)
880                         tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
881                     else
882                         tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
883                     tcg_temp_free(tmp);
884                 } else {
885                     if (islit)
886                         tcg_gen_movi_i64(cpu_ir[rc], -lit);
887                     else
888                         tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
889                 }
890             }
891             break;
892         case 0x3D:
893             /* CMPULE */
894             gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
895             break;
896         case 0x40:
897             /* ADDL/V */
898             gen_arith3(helper_addlv, ra, rb, rc, islit, lit);
899             break;
900         case 0x49:
901             /* SUBL/V */
902             gen_arith3(helper_sublv, ra, rb, rc, islit, lit);
903             break;
904         case 0x4D:
905             /* CMPLT */
906             gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
907             break;
908         case 0x60:
909             /* ADDQ/V */
910             gen_arith3(helper_addqv, ra, rb, rc, islit, lit);
911             break;
912         case 0x69:
913             /* SUBQ/V */
914             gen_arith3(helper_subqv, ra, rb, rc, islit, lit);
915             break;
916         case 0x6D:
917             /* CMPLE */
918             gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
919             break;
920         default:
921             goto invalid_opc;
922         }
923         break;
924     case 0x11:
925         switch (fn7) {
926         case 0x00:
927             /* AND */
928             if (likely(rc != 31)) {
929                 if (ra == 31)
930                     tcg_gen_movi_i64(cpu_ir[rc], 0);
931                 else if (islit)
932                     tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
933                 else
934                     tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
935             }
936             break;
937         case 0x08:
938             /* BIC */
939             if (likely(rc != 31)) {
940                 if (ra != 31) {
941                     if (islit)
942                         tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
943                     else {
944                         TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
945                         tcg_gen_not_i64(tmp, cpu_ir[rb]);
946                         tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], tmp);
947                         tcg_temp_free(tmp);
948                     }
949                 } else
950                     tcg_gen_movi_i64(cpu_ir[rc], 0);
951             }
952             break;
953         case 0x14:
954             /* CMOVLBS */
955             gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
956             break;
957         case 0x16:
958             /* CMOVLBC */
959             gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
960             break;
961         case 0x20:
962             /* BIS */
963             if (likely(rc != 31)) {
964                 if (ra != 31) {
965                     if (islit)
966                         tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
967                     else
968                         tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
969                 } else {
970                     if (islit)
971                         tcg_gen_movi_i64(cpu_ir[rc], lit);
972                     else
973                         tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
974                 }
975             }
976             break;
977         case 0x24:
978             /* CMOVEQ */
979             gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
980             break;
981         case 0x26:
982             /* CMOVNE */
983             gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
984             break;
985         case 0x28:
986             /* ORNOT */
987             if (likely(rc != 31)) {
988                 if (ra != 31) {
989                     if (islit)
990                         tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
991                     else {
992                         TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
993                         tcg_gen_not_i64(tmp, cpu_ir[rb]);
994                         tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], tmp);
995                         tcg_temp_free(tmp);
996                     }
997                 } else {
998                     if (islit)
999                         tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1000                     else
1001                         tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1002                 }
1003             }
1004             break;
1005         case 0x40:
1006             /* XOR */
1007             if (likely(rc != 31)) {
1008                 if (ra != 31) {
1009                     if (islit)
1010                         tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1011                     else
1012                         tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1013                 } else {
1014                     if (islit)
1015                         tcg_gen_movi_i64(cpu_ir[rc], lit);
1016                     else
1017                         tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1018                 }
1019             }
1020             break;
1021         case 0x44:
1022             /* CMOVLT */
1023             gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1024             break;
1025         case 0x46:
1026             /* CMOVGE */
1027             gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1028             break;
1029         case 0x48:
1030             /* EQV */
1031             if (likely(rc != 31)) {
1032                 if (ra != 31) {
1033                     if (islit)
1034                         tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1035                     else {
1036                         TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1037                         tcg_gen_not_i64(tmp, cpu_ir[rb]);
1038                         tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1039                         tcg_temp_free(tmp);
1040                     }
1041                 } else {
1042                     if (islit)
1043                         tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1044                     else
1045                         tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1046                 }
1047             }
1048             break;
1049         case 0x61:
1050             /* AMASK */
1051             if (likely(rc != 31)) {
1052                 if (islit)
1053                     tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
1054                 else
1055                     tcg_gen_helper_1_1(helper_amask, cpu_ir[rc], cpu_ir[rb]);
1056             }
1057             break;
1058         case 0x64:
1059             /* CMOVLE */
1060             gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1061             break;
1062         case 0x66:
1063             /* CMOVGT */
1064             gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1065             break;
1066         case 0x6C:
1067             /* IMPLVER */
1068             if (rc != 31)
1069                 tcg_gen_helper_1_0(helper_load_implver, cpu_ir[rc]);
1070             break;
1071         default:
1072             goto invalid_opc;
1073         }
1074         break;
1075     case 0x12:
1076         switch (fn7) {
1077         case 0x02:
1078             /* MSKBL */
1079             gen_arith3(helper_mskbl, ra, rb, rc, islit, lit);
1080             break;
1081         case 0x06:
1082             /* EXTBL */
1083             gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1084             break;
1085         case 0x0B:
1086             /* INSBL */
1087             gen_arith3(helper_insbl, ra, rb, rc, islit, lit);
1088             break;
1089         case 0x12:
1090             /* MSKWL */
1091             gen_arith3(helper_mskwl, ra, rb, rc, islit, lit);
1092             break;
1093         case 0x16:
1094             /* EXTWL */
1095             gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1096             break;
1097         case 0x1B:
1098             /* INSWL */
1099             gen_arith3(helper_inswl, ra, rb, rc, islit, lit);
1100             break;
1101         case 0x22:
1102             /* MSKLL */
1103             gen_arith3(helper_mskll, ra, rb, rc, islit, lit);
1104             break;
1105         case 0x26:
1106             /* EXTLL */
1107             gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1108             break;
1109         case 0x2B:
1110             /* INSLL */
1111             gen_arith3(helper_insll, ra, rb, rc, islit, lit);
1112             break;
1113         case 0x30:
1114             /* ZAP */
1115             gen_arith3(helper_zap, ra, rb, rc, islit, lit);
1116             break;
1117         case 0x31:
1118             /* ZAPNOT */
1119             gen_arith3(helper_zapnot, ra, rb, rc, islit, lit);
1120             break;
1121         case 0x32:
1122             /* MSKQL */
1123             gen_arith3(helper_mskql, ra, rb, rc, islit, lit);
1124             break;
1125         case 0x34:
1126             /* SRL */
1127             if (likely(rc != 31)) {
1128                 if (ra != 31) {
1129                     if (islit)
1130                         tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1131                     else {
1132                         TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1133                         tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1134                         tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1135                         tcg_temp_free(shift);
1136                     }
1137                 } else
1138                     tcg_gen_movi_i64(cpu_ir[rc], 0);
1139             }
1140             break;
1141         case 0x36:
1142             /* EXTQL */
1143             gen_ext_l(NULL, ra, rb, rc, islit, lit);
1144             break;
1145         case 0x39:
1146             /* SLL */
1147             if (likely(rc != 31)) {
1148                 if (ra != 31) {
1149                     if (islit)
1150                         tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1151                     else {
1152                         TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1153                         tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1154                         tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1155                         tcg_temp_free(shift);
1156                     }
1157                 } else
1158                     tcg_gen_movi_i64(cpu_ir[rc], 0);
1159             }
1160             break;
1161         case 0x3B:
1162             /* INSQL */
1163             gen_arith3(helper_insql, ra, rb, rc, islit, lit);
1164             break;
1165         case 0x3C:
1166             /* SRA */
1167             if (likely(rc != 31)) {
1168                 if (ra != 31) {
1169                     if (islit)
1170                         tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1171                     else {
1172                         TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1173                         tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1174                         tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1175                         tcg_temp_free(shift);
1176                     }
1177                 } else
1178                     tcg_gen_movi_i64(cpu_ir[rc], 0);
1179             }
1180             break;
1181         case 0x52:
1182             /* MSKWH */
1183             gen_arith3(helper_mskwh, ra, rb, rc, islit, lit);
1184             break;
1185         case 0x57:
1186             /* INSWH */
1187             gen_arith3(helper_inswh, ra, rb, rc, islit, lit);
1188             break;
1189         case 0x5A:
1190             /* EXTWH */
1191             gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1192             break;
1193         case 0x62:
1194             /* MSKLH */
1195             gen_arith3(helper_msklh, ra, rb, rc, islit, lit);
1196             break;
1197         case 0x67:
1198             /* INSLH */
1199             gen_arith3(helper_inslh, ra, rb, rc, islit, lit);
1200             break;
1201         case 0x6A:
1202             /* EXTLH */
1203             gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1204             break;
1205         case 0x72:
1206             /* MSKQH */
1207             gen_arith3(helper_mskqh, ra, rb, rc, islit, lit);
1208             break;
1209         case 0x77:
1210             /* INSQH */
1211             gen_arith3(helper_insqh, ra, rb, rc, islit, lit);
1212             break;
1213         case 0x7A:
1214             /* EXTQH */
1215             gen_ext_h(NULL, ra, rb, rc, islit, lit);
1216             break;
1217         default:
1218             goto invalid_opc;
1219         }
1220         break;
1221     case 0x13:
1222         switch (fn7) {
1223         case 0x00:
1224             /* MULL */
1225             if (likely(rc != 31)) {
1226                 if (ra == 31)
1227                     tcg_gen_movi_i64(cpu_ir[rc], 0);
1228                 else {
1229                     if (islit)
1230                         tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1231                     else
1232                         tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1233                     tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1234                 }
1235             }
1236             break;
1237         case 0x20:
1238             /* MULQ */
1239             if (likely(rc != 31)) {
1240                 if (ra == 31)
1241                     tcg_gen_movi_i64(cpu_ir[rc], 0);
1242                 else if (islit)
1243                     tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1244                 else
1245                     tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1246             }
1247             break;
1248         case 0x30:
1249             /* UMULH */
1250             gen_arith3(helper_umulh, ra, rb, rc, islit, lit);
1251             break;
1252         case 0x40:
1253             /* MULL/V */
1254             gen_arith3(helper_mullv, ra, rb, rc, islit, lit);
1255             break;
1256         case 0x60:
1257             /* MULQ/V */
1258             gen_arith3(helper_mulqv, ra, rb, rc, islit, lit);
1259             break;
1260         default:
1261             goto invalid_opc;
1262         }
1263         break;
1264     case 0x14:
1265         switch (fpfn) { /* f11 & 0x3F */
1266         case 0x04:
1267             /* ITOFS */
1268             if (!(ctx->amask & AMASK_FIX))
1269                 goto invalid_opc;
1270             if (likely(rc != 31)) {
1271                 if (ra != 31) {
1272                     TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
1273                     tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1274                     tcg_gen_helper_1_1(helper_memory_to_s, cpu_fir[rc], tmp);
1275                     tcg_temp_free(tmp);
1276                 } else
1277                     tcg_gen_movi_i64(cpu_fir[rc], 0);
1278             }
1279             break;
1280         case 0x0A:
1281             /* SQRTF */
1282             if (!(ctx->amask & AMASK_FIX))
1283                 goto invalid_opc;
1284             gen_farith2(&helper_sqrtf, rb, rc);
1285             break;
1286         case 0x0B:
1287             /* SQRTS */
1288             if (!(ctx->amask & AMASK_FIX))
1289                 goto invalid_opc;
1290             gen_farith2(&helper_sqrts, rb, rc);
1291             break;
1292         case 0x14:
1293             /* ITOFF */
1294             if (!(ctx->amask & AMASK_FIX))
1295                 goto invalid_opc;
1296             if (likely(rc != 31)) {
1297                 if (ra != 31) {
1298                     TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
1299                     tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1300                     tcg_gen_helper_1_1(helper_memory_to_f, cpu_fir[rc], tmp);
1301                     tcg_temp_free(tmp);
1302                 } else
1303                     tcg_gen_movi_i64(cpu_fir[rc], 0);
1304             }
1305             break;
1306         case 0x24:
1307             /* ITOFT */
1308             if (!(ctx->amask & AMASK_FIX))
1309                 goto invalid_opc;
1310             if (likely(rc != 31)) {
1311                 if (ra != 31)
1312                     tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1313                 else
1314                     tcg_gen_movi_i64(cpu_fir[rc], 0);
1315             }
1316             break;
1317         case 0x2A:
1318             /* SQRTG */
1319             if (!(ctx->amask & AMASK_FIX))
1320                 goto invalid_opc;
1321             gen_farith2(&helper_sqrtg, rb, rc);
1322             break;
1323         case 0x02B:
1324             /* SQRTT */
1325             if (!(ctx->amask & AMASK_FIX))
1326                 goto invalid_opc;
1327             gen_farith2(&helper_sqrtt, rb, rc);
1328             break;
1329         default:
1330             goto invalid_opc;
1331         }
1332         break;
1333     case 0x15:
1334         /* VAX floating point */
1335         /* XXX: rounding mode and trap are ignored (!) */
1336         switch (fpfn) { /* f11 & 0x3F */
1337         case 0x00:
1338             /* ADDF */
1339             gen_farith3(&helper_addf, ra, rb, rc);
1340             break;
1341         case 0x01:
1342             /* SUBF */
1343             gen_farith3(&helper_subf, ra, rb, rc);
1344             break;
1345         case 0x02:
1346             /* MULF */
1347             gen_farith3(&helper_mulf, ra, rb, rc);
1348             break;
1349         case 0x03:
1350             /* DIVF */
1351             gen_farith3(&helper_divf, ra, rb, rc);
1352             break;
1353         case 0x1E:
1354             /* CVTDG */
1355 #if 0 // TODO
1356             gen_farith2(&helper_cvtdg, rb, rc);
1357 #else
1358             goto invalid_opc;
1359 #endif
1360             break;
1361         case 0x20:
1362             /* ADDG */
1363             gen_farith3(&helper_addg, ra, rb, rc);
1364             break;
1365         case 0x21:
1366             /* SUBG */
1367             gen_farith3(&helper_subg, ra, rb, rc);
1368             break;
1369         case 0x22:
1370             /* MULG */
1371             gen_farith3(&helper_mulg, ra, rb, rc);
1372             break;
1373         case 0x23:
1374             /* DIVG */
1375             gen_farith3(&helper_divg, ra, rb, rc);
1376             break;
1377         case 0x25:
1378             /* CMPGEQ */
1379             gen_farith3(&helper_cmpgeq, ra, rb, rc);
1380             break;
1381         case 0x26:
1382             /* CMPGLT */
1383             gen_farith3(&helper_cmpglt, ra, rb, rc);
1384             break;
1385         case 0x27:
1386             /* CMPGLE */
1387             gen_farith3(&helper_cmpgle, ra, rb, rc);
1388             break;
1389         case 0x2C:
1390             /* CVTGF */
1391             gen_farith2(&helper_cvtgf, rb, rc);
1392             break;
1393         case 0x2D:
1394             /* CVTGD */
1395 #if 0 // TODO
1396             gen_farith2(ctx, &helper_cvtgd, rb, rc);
1397 #else
1398             goto invalid_opc;
1399 #endif
1400             break;
1401         case 0x2F:
1402             /* CVTGQ */
1403             gen_farith2(&helper_cvtgq, rb, rc);
1404             break;
1405         case 0x3C:
1406             /* CVTQF */
1407             gen_farith2(&helper_cvtqf, rb, rc);
1408             break;
1409         case 0x3E:
1410             /* CVTQG */
1411             gen_farith2(&helper_cvtqg, rb, rc);
1412             break;
1413         default:
1414             goto invalid_opc;
1415         }
1416         break;
1417     case 0x16:
1418         /* IEEE floating-point */
1419         /* XXX: rounding mode and traps are ignored (!) */
1420         switch (fpfn) { /* f11 & 0x3F */
1421         case 0x00:
1422             /* ADDS */
1423             gen_farith3(&helper_adds, ra, rb, rc);
1424             break;
1425         case 0x01:
1426             /* SUBS */
1427             gen_farith3(&helper_subs, ra, rb, rc);
1428             break;
1429         case 0x02:
1430             /* MULS */
1431             gen_farith3(&helper_muls, ra, rb, rc);
1432             break;
1433         case 0x03:
1434             /* DIVS */
1435             gen_farith3(&helper_divs, ra, rb, rc);
1436             break;
1437         case 0x20:
1438             /* ADDT */
1439             gen_farith3(&helper_addt, ra, rb, rc);
1440             break;
1441         case 0x21:
1442             /* SUBT */
1443             gen_farith3(&helper_subt, ra, rb, rc);
1444             break;
1445         case 0x22:
1446             /* MULT */
1447             gen_farith3(&helper_mult, ra, rb, rc);
1448             break;
1449         case 0x23:
1450             /* DIVT */
1451             gen_farith3(&helper_divt, ra, rb, rc);
1452             break;
1453         case 0x24:
1454             /* CMPTUN */
1455             gen_farith3(&helper_cmptun, ra, rb, rc);
1456             break;
1457         case 0x25:
1458             /* CMPTEQ */
1459             gen_farith3(&helper_cmpteq, ra, rb, rc);
1460             break;
1461         case 0x26:
1462             /* CMPTLT */
1463             gen_farith3(&helper_cmptlt, ra, rb, rc);
1464             break;
1465         case 0x27:
1466             /* CMPTLE */
1467             gen_farith3(&helper_cmptle, ra, rb, rc);
1468             break;
1469         case 0x2C:
1470             /* XXX: incorrect */
1471             if (fn11 == 0x2AC) {
1472                 /* CVTST */
1473                 gen_farith2(&helper_cvtst, rb, rc);
1474             } else {
1475                 /* CVTTS */
1476                 gen_farith2(&helper_cvtts, rb, rc);
1477             }
1478             break;
1479         case 0x2F:
1480             /* CVTTQ */
1481             gen_farith2(&helper_cvttq, rb, rc);
1482             break;
1483         case 0x3C:
1484             /* CVTQS */
1485             gen_farith2(&helper_cvtqs, rb, rc);
1486             break;
1487         case 0x3E:
1488             /* CVTQT */
1489             gen_farith2(&helper_cvtqt, rb, rc);
1490             break;
1491         default:
1492             goto invalid_opc;
1493         }
1494         break;
1495     case 0x17:
1496         switch (fn11) {
1497         case 0x010:
1498             /* CVTLQ */
1499             gen_farith2(&helper_cvtlq, rb, rc);
1500             break;
1501         case 0x020:
1502             if (likely(rc != 31)) {
1503                 if (ra == rb)
1504                     /* FMOV */
1505                     tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1506                 else
1507                     /* CPYS */
1508                     gen_farith3(&helper_cpys, ra, rb, rc);
1509             }
1510             break;
1511         case 0x021:
1512             /* CPYSN */
1513             gen_farith3(&helper_cpysn, ra, rb, rc);
1514             break;
1515         case 0x022:
1516             /* CPYSE */
1517             gen_farith3(&helper_cpyse, ra, rb, rc);
1518             break;
1519         case 0x024:
1520             /* MT_FPCR */
1521             if (likely(ra != 31))
1522                 tcg_gen_helper_0_1(helper_store_fpcr, cpu_fir[ra]);
1523             else {
1524                 TCGv tmp = tcg_const_i64(0);
1525                 tcg_gen_helper_0_1(helper_store_fpcr, tmp);
1526                 tcg_temp_free(tmp);
1527             }
1528             break;
1529         case 0x025:
1530             /* MF_FPCR */
1531             if (likely(ra != 31))
1532                 tcg_gen_helper_1_0(helper_load_fpcr, cpu_fir[ra]);
1533             break;
1534         case 0x02A:
1535             /* FCMOVEQ */
1536             gen_fcmov(&helper_cmpfeq, ra, rb, rc);
1537             break;
1538         case 0x02B:
1539             /* FCMOVNE */
1540             gen_fcmov(&helper_cmpfne, ra, rb, rc);
1541             break;
1542         case 0x02C:
1543             /* FCMOVLT */
1544             gen_fcmov(&helper_cmpflt, ra, rb, rc);
1545             break;
1546         case 0x02D:
1547             /* FCMOVGE */
1548             gen_fcmov(&helper_cmpfge, ra, rb, rc);
1549             break;
1550         case 0x02E:
1551             /* FCMOVLE */
1552             gen_fcmov(&helper_cmpfle, ra, rb, rc);
1553             break;
1554         case 0x02F:
1555             /* FCMOVGT */
1556             gen_fcmov(&helper_cmpfgt, ra, rb, rc);
1557             break;
1558         case 0x030:
1559             /* CVTQL */
1560             gen_farith2(&helper_cvtql, rb, rc);
1561             break;
1562         case 0x130:
1563             /* CVTQL/V */
1564             gen_farith2(&helper_cvtqlv, rb, rc);
1565             break;
1566         case 0x530:
1567             /* CVTQL/SV */
1568             gen_farith2(&helper_cvtqlsv, rb, rc);
1569             break;
1570         default:
1571             goto invalid_opc;
1572         }
1573         break;
1574     case 0x18:
1575         switch ((uint16_t)disp16) {
1576         case 0x0000:
1577             /* TRAPB */
1578             /* No-op. Just exit from the current tb */
1579             ret = 2;
1580             break;
1581         case 0x0400:
1582             /* EXCB */
1583             /* No-op. Just exit from the current tb */
1584             ret = 2;
1585             break;
1586         case 0x4000:
1587             /* MB */
1588             /* No-op */
1589             break;
1590         case 0x4400:
1591             /* WMB */
1592             /* No-op */
1593             break;
1594         case 0x8000:
1595             /* FETCH */
1596             /* No-op */
1597             break;
1598         case 0xA000:
1599             /* FETCH_M */
1600             /* No-op */
1601             break;
1602         case 0xC000:
1603             /* RPCC */
1604             if (ra != 31)
1605                 tcg_gen_helper_1_0(helper_load_pcc, cpu_ir[ra]);
1606             break;
1607         case 0xE000:
1608             /* RC */
1609             if (ra != 31)
1610                 tcg_gen_helper_1_0(helper_rc, cpu_ir[ra]);
1611             break;
1612         case 0xE800:
1613             /* ECB */
1614             /* XXX: TODO: evict tb cache at address rb */
1615 #if 0
1616             ret = 2;
1617 #else
1618             goto invalid_opc;
1619 #endif
1620             break;
1621         case 0xF000:
1622             /* RS */
1623             if (ra != 31)
1624                 tcg_gen_helper_1_0(helper_rs, cpu_ir[ra]);
1625             break;
1626         case 0xF800:
1627             /* WH64 */
1628             /* No-op */
1629             break;
1630         default:
1631             goto invalid_opc;
1632         }
1633         break;
1634     case 0x19:
1635         /* HW_MFPR (PALcode) */
1636 #if defined (CONFIG_USER_ONLY)
1637         goto invalid_opc;
1638 #else
1639         if (!ctx->pal_mode)
1640             goto invalid_opc;
1641         if (ra != 31) {
1642             TCGv tmp = tcg_const_i32(insn & 0xFF);
1643             tcg_gen_helper_1_2(helper_mfpr, cpu_ir[ra], tmp, cpu_ir[ra]);
1644             tcg_temp_free(tmp);
1645         }
1646         break;
1647 #endif
1648     case 0x1A:
1649         if (ra != 31)
1650             tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1651         if (rb != 31)
1652             tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1653         else
1654             tcg_gen_movi_i64(cpu_pc, 0);
1655         /* Those four jumps only differ by the branch prediction hint */
1656         switch (fn2) {
1657         case 0x0:
1658             /* JMP */
1659             break;
1660         case 0x1:
1661             /* JSR */
1662             break;
1663         case 0x2:
1664             /* RET */
1665             break;
1666         case 0x3:
1667             /* JSR_COROUTINE */
1668             break;
1669         }
1670         ret = 1;
1671         break;
1672     case 0x1B:
1673         /* HW_LD (PALcode) */
1674 #if defined (CONFIG_USER_ONLY)
1675         goto invalid_opc;
1676 #else
1677         if (!ctx->pal_mode)
1678             goto invalid_opc;
1679         if (ra != 31) {
1680             TCGv addr = tcg_temp_new(TCG_TYPE_I64);
1681             if (rb != 31)
1682                 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1683             else
1684                 tcg_gen_movi_i64(addr, disp12);
1685             switch ((insn >> 12) & 0xF) {
1686             case 0x0:
1687                 /* Longword physical access */
1688                 tcg_gen_helper_0_2(helper_ldl_raw, cpu_ir[ra], addr);
1689                 break;
1690             case 0x1:
1691                 /* Quadword physical access */
1692                 tcg_gen_helper_0_2(helper_ldq_raw, cpu_ir[ra], addr);
1693                 break;
1694             case 0x2:
1695                 /* Longword physical access with lock */
1696                 tcg_gen_helper_0_2(helper_ldl_l_raw, cpu_ir[ra], addr);
1697                 break;
1698             case 0x3:
1699                 /* Quadword physical access with lock */
1700                 tcg_gen_helper_0_2(helper_ldq_l_raw, cpu_ir[ra], addr);
1701                 break;
1702             case 0x4:
1703                 /* Longword virtual PTE fetch */
1704                 tcg_gen_helper_0_2(helper_ldl_kernel, cpu_ir[ra], addr);
1705                 break;
1706             case 0x5:
1707                 /* Quadword virtual PTE fetch */
1708                 tcg_gen_helper_0_2(helper_ldq_kernel, cpu_ir[ra], addr);
1709                 break;
1710             case 0x6:
1711                 /* Incpu_ir[ra]id */
1712                 goto incpu_ir[ra]id_opc;
1713             case 0x7:
1714                 /* Incpu_ir[ra]id */
1715                 goto incpu_ir[ra]id_opc;
1716             case 0x8:
1717                 /* Longword virtual access */
1718                 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
1719                 tcg_gen_helper_0_2(helper_ldl_raw, cpu_ir[ra], addr);
1720                 break;
1721             case 0x9:
1722                 /* Quadword virtual access */
1723                 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
1724                 tcg_gen_helper_0_2(helper_ldq_raw, cpu_ir[ra], addr);
1725                 break;
1726             case 0xA:
1727                 /* Longword virtual access with protection check */
1728                 tcg_gen_qemu_ld32s(cpu_ir[ra], addr, ctx->flags);
1729                 break;
1730             case 0xB:
1731                 /* Quadword virtual access with protection check */
1732                 tcg_gen_qemu_ld64(cpu_ir[ra], addr, ctx->flags);
1733                 break;
1734             case 0xC:
1735                 /* Longword virtual access with altenate access mode */
1736                 tcg_gen_helper_0_0(helper_set_alt_mode);
1737                 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
1738                 tcg_gen_helper_0_2(helper_ldl_raw, cpu_ir[ra], addr);
1739                 tcg_gen_helper_0_0(helper_restore_mode);
1740                 break;
1741             case 0xD:
1742                 /* Quadword virtual access with altenate access mode */
1743                 tcg_gen_helper_0_0(helper_set_alt_mode);
1744                 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
1745                 tcg_gen_helper_0_2(helper_ldq_raw, cpu_ir[ra], addr);
1746                 tcg_gen_helper_0_0(helper_restore_mode);
1747                 break;
1748             case 0xE:
1749                 /* Longword virtual access with alternate access mode and
1750                  * protection checks
1751                  */
1752                 tcg_gen_helper_0_0(helper_set_alt_mode);
1753                 tcg_gen_helper_0_2(helper_ldl_data, cpu_ir[ra], addr);
1754                 tcg_gen_helper_0_0(helper_restore_mode);
1755                 break;
1756             case 0xF:
1757                 /* Quadword virtual access with alternate access mode and
1758                  * protection checks
1759                  */
1760                 tcg_gen_helper_0_0(helper_set_alt_mode);
1761                 tcg_gen_helper_0_2(helper_ldq_data, cpu_ir[ra], addr);
1762                 tcg_gen_helper_0_0(helper_restore_mode);
1763                 break;
1764             }
1765             tcg_temp_free(addr);
1766         }
1767         break;
1768 #endif
1769     case 0x1C:
1770         switch (fn7) {
1771         case 0x00:
1772             /* SEXTB */
1773             if (!(ctx->amask & AMASK_BWX))
1774                 goto invalid_opc;
1775             if (likely(rc != 31)) {
1776                 if (islit)
1777                     tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1778                 else
1779                     tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1780             }
1781             break;
1782         case 0x01:
1783             /* SEXTW */
1784             if (!(ctx->amask & AMASK_BWX))
1785                 goto invalid_opc;
1786             if (likely(rc != 31)) {
1787                 if (islit)
1788                     tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1789                 else
1790                     tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1791             }
1792             break;
1793         case 0x30:
1794             /* CTPOP */
1795             if (!(ctx->amask & AMASK_CIX))
1796                 goto invalid_opc;
1797             if (likely(rc != 31)) {
1798                 if (islit)
1799                     tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1800                 else
1801                     tcg_gen_helper_1_1(helper_ctpop, cpu_ir[rc], cpu_ir[rb]);
1802             }
1803             break;
1804         case 0x31:
1805             /* PERR */
1806             if (!(ctx->amask & AMASK_MVI))
1807                 goto invalid_opc;
1808             /* XXX: TODO */
1809             goto invalid_opc;
1810             break;
1811         case 0x32:
1812             /* CTLZ */
1813             if (!(ctx->amask & AMASK_CIX))
1814                 goto invalid_opc;
1815             if (likely(rc != 31)) {
1816                 if (islit)
1817                     tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1818                 else
1819                     tcg_gen_helper_1_1(helper_ctlz, cpu_ir[rc], cpu_ir[rb]);
1820             }
1821             break;
1822         case 0x33:
1823             /* CTTZ */
1824             if (!(ctx->amask & AMASK_CIX))
1825                 goto invalid_opc;
1826             if (likely(rc != 31)) {
1827                 if (islit)
1828                     tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1829                 else
1830                     tcg_gen_helper_1_1(helper_cttz, cpu_ir[rc], cpu_ir[rb]);
1831             }
1832             break;
1833         case 0x34:
1834             /* UNPKBW */
1835             if (!(ctx->amask & AMASK_MVI))
1836                 goto invalid_opc;
1837             /* XXX: TODO */
1838             goto invalid_opc;
1839             break;
1840         case 0x35:
1841             /* UNPKWL */
1842             if (!(ctx->amask & AMASK_MVI))
1843                 goto invalid_opc;
1844             /* XXX: TODO */
1845             goto invalid_opc;
1846             break;
1847         case 0x36:
1848             /* PKWB */
1849             if (!(ctx->amask & AMASK_MVI))
1850                 goto invalid_opc;
1851             /* XXX: TODO */
1852             goto invalid_opc;
1853             break;
1854         case 0x37:
1855             /* PKLB */
1856             if (!(ctx->amask & AMASK_MVI))
1857                 goto invalid_opc;
1858             /* XXX: TODO */
1859             goto invalid_opc;
1860             break;
1861         case 0x38:
1862             /* MINSB8 */
1863             if (!(ctx->amask & AMASK_MVI))
1864                 goto invalid_opc;
1865             /* XXX: TODO */
1866             goto invalid_opc;
1867             break;
1868         case 0x39:
1869             /* MINSW4 */
1870             if (!(ctx->amask & AMASK_MVI))
1871                 goto invalid_opc;
1872             /* XXX: TODO */
1873             goto invalid_opc;
1874             break;
1875         case 0x3A:
1876             /* MINUB8 */
1877             if (!(ctx->amask & AMASK_MVI))
1878                 goto invalid_opc;
1879             /* XXX: TODO */
1880             goto invalid_opc;
1881             break;
1882         case 0x3B:
1883             /* MINUW4 */
1884             if (!(ctx->amask & AMASK_MVI))
1885                 goto invalid_opc;
1886             /* XXX: TODO */
1887             goto invalid_opc;
1888             break;
1889         case 0x3C:
1890             /* MAXUB8 */
1891             if (!(ctx->amask & AMASK_MVI))
1892                 goto invalid_opc;
1893             /* XXX: TODO */
1894             goto invalid_opc;
1895             break;
1896         case 0x3D:
1897             /* MAXUW4 */
1898             if (!(ctx->amask & AMASK_MVI))
1899                 goto invalid_opc;
1900             /* XXX: TODO */
1901             goto invalid_opc;
1902             break;
1903         case 0x3E:
1904             /* MAXSB8 */
1905             if (!(ctx->amask & AMASK_MVI))
1906                 goto invalid_opc;
1907             /* XXX: TODO */
1908             goto invalid_opc;
1909             break;
1910         case 0x3F:
1911             /* MAXSW4 */
1912             if (!(ctx->amask & AMASK_MVI))
1913                 goto invalid_opc;
1914             /* XXX: TODO */
1915             goto invalid_opc;
1916             break;
1917         case 0x70:
1918             /* FTOIT */
1919             if (!(ctx->amask & AMASK_FIX))
1920                 goto invalid_opc;
1921             if (likely(rc != 31)) {
1922                 if (ra != 31)
1923                     tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
1924                 else
1925                     tcg_gen_movi_i64(cpu_ir[rc], 0);
1926             }
1927             break;
1928         case 0x78:
1929             /* FTOIS */
1930             if (!(ctx->amask & AMASK_FIX))
1931                 goto invalid_opc;
1932             if (rc != 31) {
1933                 TCGv tmp1 = tcg_temp_new(TCG_TYPE_I32);
1934                 if (ra != 31)
1935                     tcg_gen_helper_1_1(helper_s_to_memory, tmp1, cpu_fir[ra]);
1936                 else {
1937                     TCGv tmp2 = tcg_const_i64(0);
1938                     tcg_gen_helper_1_1(helper_s_to_memory, tmp1, tmp2);
1939                     tcg_temp_free(tmp2);
1940                 }
1941                 tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
1942                 tcg_temp_free(tmp1);
1943             }
1944             break;
1945         default:
1946             goto invalid_opc;
1947         }
1948         break;
1949     case 0x1D:
1950         /* HW_MTPR (PALcode) */
1951 #if defined (CONFIG_USER_ONLY)
1952         goto invalid_opc;
1953 #else
1954         if (!ctx->pal_mode)
1955             goto invalid_opc;
1956         else {
1957             TCGv tmp1 = tcg_const_i32(insn & 0xFF);
1958             if (ra != 31)
1959                 tcg_gen_helper(helper_mtpr, tmp1, cpu_ir[ra]);
1960             else {
1961                 TCGv tmp2 = tcg_const_i64(0);
1962                 tcg_gen_helper(helper_mtpr, tmp1, tmp2);
1963                 tcg_temp_free(tmp2);
1964             }
1965             tcg_temp_free(tmp1);
1966             ret = 2;
1967         }
1968         break;
1969 #endif
1970     case 0x1E:
1971         /* HW_REI (PALcode) */
1972 #if defined (CONFIG_USER_ONLY)
1973         goto invalid_opc;
1974 #else
1975         if (!ctx->pal_mode)
1976             goto invalid_opc;
1977         if (rb == 31) {
1978             /* "Old" alpha */
1979             tcg_gen_helper_0_0(helper_hw_rei);
1980         } else {
1981             TCGv tmp;
1982
1983             if (ra != 31) {
1984                 tmp = tcg_temp_new(TCG_TYPE_I64);
1985                 tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
1986             } else
1987                 tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
1988             tcg_gen_helper_0_1(helper_hw_ret, tmp);
1989             tcg_temp_free(tmp);
1990         }
1991         ret = 2;
1992         break;
1993 #endif
1994     case 0x1F:
1995         /* HW_ST (PALcode) */
1996 #if defined (CONFIG_USER_ONLY)
1997         goto invalid_opc;
1998 #else
1999         if (!ctx->pal_mode)
2000             goto invalid_opc;
2001         else {
2002             TCGv addr, val;
2003             addr = tcg_temp_new(TCG_TYPE_I64);
2004             if (rb != 31)
2005                 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2006             else
2007                 tcg_gen_movi_i64(addr, disp12);
2008             if (ra != 31)
2009                 val = cpu_ir[ra];
2010             else {
2011                 val = tcg_temp_new(TCG_TYPE_I64);
2012                 tcg_gen_movi_i64(val, 0);
2013             }
2014             switch ((insn >> 12) & 0xF) {
2015             case 0x0:
2016                 /* Longword physical access */
2017                 tcg_gen_helper_0_2(helper_stl_raw, val, addr);
2018                 break;
2019             case 0x1:
2020                 /* Quadword physical access */
2021                 tcg_gen_helper_0_2(helper_stq_raw, val, addr);
2022                 break;
2023             case 0x2:
2024                 /* Longword physical access with lock */
2025                 tcg_gen_helper_1_2(helper_stl_c_raw, val, val, addr);
2026                 break;
2027             case 0x3:
2028                 /* Quadword physical access with lock */
2029                 tcg_gen_helper_1_2(helper_stq_c_raw, val, val, addr);
2030                 break;
2031             case 0x4:
2032                 /* Longword virtual access */
2033                 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
2034                 tcg_gen_helper_0_2(helper_stl_raw, val, addr);
2035                 break;
2036             case 0x5:
2037                 /* Quadword virtual access */
2038                 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
2039                 tcg_gen_helper_0_2(helper_stq_raw, val, addr);
2040                 break;
2041             case 0x6:
2042                 /* Invalid */
2043                 goto invalid_opc;
2044             case 0x7:
2045                 /* Invalid */
2046                 goto invalid_opc;
2047             case 0x8:
2048                 /* Invalid */
2049                 goto invalid_opc;
2050             case 0x9:
2051                 /* Invalid */
2052                 goto invalid_opc;
2053             case 0xA:
2054                 /* Invalid */
2055                 goto invalid_opc;
2056             case 0xB:
2057                 /* Invalid */
2058                 goto invalid_opc;
2059             case 0xC:
2060                 /* Longword virtual access with alternate access mode */
2061                 tcg_gen_helper_0_0(helper_set_alt_mode);
2062                 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
2063                 tcg_gen_helper_0_2(helper_stl_raw, val, addr);
2064                 tcg_gen_helper_0_0(helper_restore_mode);
2065                 break;
2066             case 0xD:
2067                 /* Quadword virtual access with alternate access mode */
2068                 tcg_gen_helper_0_0(helper_set_alt_mode);
2069                 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
2070                 tcg_gen_helper_0_2(helper_stl_raw, val, addr);
2071                 tcg_gen_helper_0_0(helper_restore_mode);
2072                 break;
2073             case 0xE:
2074                 /* Invalid */
2075                 goto invalid_opc;
2076             case 0xF:
2077                 /* Invalid */
2078                 goto invalid_opc;
2079             }
2080             if (ra != 31)
2081                 tcg_temp_free(val);
2082             tcg_temp_free(addr);
2083         }
2084         ret = 2;
2085         break;
2086 #endif
2087     case 0x20:
2088         /* LDF */
2089         gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2090         break;
2091     case 0x21:
2092         /* LDG */
2093         gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2094         break;
2095     case 0x22:
2096         /* LDS */
2097         gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2098         break;
2099     case 0x23:
2100         /* LDT */
2101         gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2102         break;
2103     case 0x24:
2104         /* STF */
2105         gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0);
2106         break;
2107     case 0x25:
2108         /* STG */
2109         gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0);
2110         break;
2111     case 0x26:
2112         /* STS */
2113         gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0);
2114         break;
2115     case 0x27:
2116         /* STT */
2117         gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0);
2118         break;
2119     case 0x28:
2120         /* LDL */
2121         gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2122         break;
2123     case 0x29:
2124         /* LDQ */
2125         gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2126         break;
2127     case 0x2A:
2128         /* LDL_L */
2129         gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2130         break;
2131     case 0x2B:
2132         /* LDQ_L */
2133         gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2134         break;
2135     case 0x2C:
2136         /* STL */
2137         gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0);
2138         break;
2139     case 0x2D:
2140         /* STQ */
2141         gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0);
2142         break;
2143     case 0x2E:
2144         /* STL_C */
2145         gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0);
2146         break;
2147     case 0x2F:
2148         /* STQ_C */
2149         gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0);
2150         break;
2151     case 0x30:
2152         /* BR */
2153         if (ra != 31)
2154             tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2155         tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2156         ret = 1;
2157         break;
2158     case 0x31:
2159         /* FBEQ */
2160         gen_fbcond(ctx, &helper_cmpfeq, ra, disp16);
2161         ret = 1;
2162         break;
2163     case 0x32:
2164         /* FBLT */
2165         gen_fbcond(ctx, &helper_cmpflt, ra, disp16);
2166         ret = 1;
2167         break;
2168     case 0x33:
2169         /* FBLE */
2170         gen_fbcond(ctx, &helper_cmpfle, ra, disp16);
2171         ret = 1;
2172         break;
2173     case 0x34:
2174         /* BSR */
2175         if (ra != 31)
2176             tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2177         tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2178         ret = 1;
2179         break;
2180     case 0x35:
2181         /* FBNE */
2182         gen_fbcond(ctx, &helper_cmpfne, ra, disp16);
2183         ret = 1;
2184         break;
2185     case 0x36:
2186         /* FBGE */
2187         gen_fbcond(ctx, &helper_cmpfge, ra, disp16);
2188         ret = 1;
2189         break;
2190     case 0x37:
2191         /* FBGT */
2192         gen_fbcond(ctx, &helper_cmpfgt, ra, disp16);
2193         ret = 1;
2194         break;
2195     case 0x38:
2196         /* BLBC */
2197         gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 1);
2198         ret = 1;
2199         break;
2200     case 0x39:
2201         /* BEQ */
2202         gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 0);
2203         ret = 1;
2204         break;
2205     case 0x3A:
2206         /* BLT */
2207         gen_bcond(ctx, TCG_COND_LT, ra, disp16, 0);
2208         ret = 1;
2209         break;
2210     case 0x3B:
2211         /* BLE */
2212         gen_bcond(ctx, TCG_COND_LE, ra, disp16, 0);
2213         ret = 1;
2214         break;
2215     case 0x3C:
2216         /* BLBS */
2217         gen_bcond(ctx, TCG_COND_NE, ra, disp16, 1);
2218         ret = 1;
2219         break;
2220     case 0x3D:
2221         /* BNE */
2222         gen_bcond(ctx, TCG_COND_NE, ra, disp16, 0);
2223         ret = 1;
2224         break;
2225     case 0x3E:
2226         /* BGE */
2227         gen_bcond(ctx, TCG_COND_GE, ra, disp16, 0);
2228         ret = 1;
2229         break;
2230     case 0x3F:
2231         /* BGT */
2232         gen_bcond(ctx, TCG_COND_GT, ra, disp16, 0);
2233         ret = 1;
2234         break;
2235     invalid_opc:
2236         gen_invalid(ctx);
2237         ret = 3;
2238         break;
2239     }
2240
2241     return ret;
2242 }
2243
2244 static always_inline void gen_intermediate_code_internal (CPUState *env,
2245                                                           TranslationBlock *tb,
2246                                                           int search_pc)
2247 {
2248 #if defined ALPHA_DEBUG_DISAS
2249     static int insn_count;
2250 #endif
2251     DisasContext ctx, *ctxp = &ctx;
2252     target_ulong pc_start;
2253     uint32_t insn;
2254     uint16_t *gen_opc_end;
2255     int j, lj = -1;
2256     int ret;
2257     int num_insns;
2258     int max_insns;
2259
2260     pc_start = tb->pc;
2261     gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2262     ctx.pc = pc_start;
2263     ctx.amask = env->amask;
2264 #if defined (CONFIG_USER_ONLY)
2265     ctx.mem_idx = 0;
2266 #else
2267     ctx.mem_idx = ((env->ps >> 3) & 3);
2268     ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2269 #endif
2270     num_insns = 0;
2271     max_insns = tb->cflags & CF_COUNT_MASK;
2272     if (max_insns == 0)
2273         max_insns = CF_COUNT_MASK;
2274
2275     gen_icount_start();
2276     for (ret = 0; ret == 0;) {
2277         if (env->nb_breakpoints > 0) {
2278             for(j = 0; j < env->nb_breakpoints; j++) {
2279                 if (env->breakpoints[j] == ctx.pc) {
2280                     gen_excp(&ctx, EXCP_DEBUG, 0);
2281                     break;
2282                 }
2283             }
2284         }
2285         if (search_pc) {
2286             j = gen_opc_ptr - gen_opc_buf;
2287             if (lj < j) {
2288                 lj++;
2289                 while (lj < j)
2290                     gen_opc_instr_start[lj++] = 0;
2291                 gen_opc_pc[lj] = ctx.pc;
2292                 gen_opc_instr_start[lj] = 1;
2293                 gen_opc_icount[lj] = num_insns;
2294             }
2295         }
2296         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2297             gen_io_start();
2298 #if defined ALPHA_DEBUG_DISAS
2299         insn_count++;
2300         if (logfile != NULL) {
2301             fprintf(logfile, "pc " TARGET_FMT_lx " mem_idx %d\n",
2302                     ctx.pc, ctx.mem_idx);
2303         }
2304 #endif
2305         insn = ldl_code(ctx.pc);
2306 #if defined ALPHA_DEBUG_DISAS
2307         insn_count++;
2308         if (logfile != NULL) {
2309             fprintf(logfile, "opcode %08x %d\n", insn, insn_count);
2310         }
2311 #endif
2312         num_insns++;
2313         ctx.pc += 4;
2314         ret = translate_one(ctxp, insn);
2315         if (ret != 0)
2316             break;
2317         /* if we reach a page boundary or are single stepping, stop
2318          * generation
2319          */
2320         if (((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0) ||
2321             (env->singlestep_enabled) ||
2322             num_insns >= max_insns) {
2323             break;
2324         }
2325 #if defined (DO_SINGLE_STEP)
2326         break;
2327 #endif
2328     }
2329     if (ret != 1 && ret != 3) {
2330         tcg_gen_movi_i64(cpu_pc, ctx.pc);
2331     }
2332 #if defined (DO_TB_FLUSH)
2333     tcg_gen_helper_0_0(helper_tb_flush);
2334 #endif
2335     if (tb->cflags & CF_LAST_IO)
2336         gen_io_end();
2337     /* Generate the return instruction */
2338     tcg_gen_exit_tb(0);
2339     gen_icount_end(tb, num_insns);
2340     *gen_opc_ptr = INDEX_op_end;
2341     if (search_pc) {
2342         j = gen_opc_ptr - gen_opc_buf;
2343         lj++;
2344         while (lj <= j)
2345             gen_opc_instr_start[lj++] = 0;
2346     } else {
2347         tb->size = ctx.pc - pc_start;
2348         tb->icount = num_insns;
2349     }
2350 #if defined ALPHA_DEBUG_DISAS
2351     if (loglevel & CPU_LOG_TB_CPU) {
2352         cpu_dump_state(env, logfile, fprintf, 0);
2353     }
2354     if (loglevel & CPU_LOG_TB_IN_ASM) {
2355         fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2356         target_disas(logfile, pc_start, ctx.pc - pc_start, 1);
2357         fprintf(logfile, "\n");
2358     }
2359 #endif
2360 }
2361
2362 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2363 {
2364     gen_intermediate_code_internal(env, tb, 0);
2365 }
2366
2367 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2368 {
2369     gen_intermediate_code_internal(env, tb, 1);
2370 }
2371
2372 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2373 {
2374     CPUAlphaState *env;
2375     uint64_t hwpcb;
2376
2377     env = qemu_mallocz(sizeof(CPUAlphaState));
2378     if (!env)
2379         return NULL;
2380     cpu_exec_init(env);
2381     alpha_translate_init();
2382     tlb_flush(env, 1);
2383     /* XXX: should not be hardcoded */
2384     env->implver = IMPLVER_2106x;
2385     env->ps = 0x1F00;
2386 #if defined (CONFIG_USER_ONLY)
2387     env->ps |= 1 << 3;
2388 #endif
2389     pal_init(env);
2390     /* Initialize IPR */
2391     hwpcb = env->ipr[IPR_PCBB];
2392     env->ipr[IPR_ASN] = 0;
2393     env->ipr[IPR_ASTEN] = 0;
2394     env->ipr[IPR_ASTSR] = 0;
2395     env->ipr[IPR_DATFX] = 0;
2396     /* XXX: fix this */
2397     //    env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2398     //    env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2399     //    env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2400     //    env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2401     env->ipr[IPR_FEN] = 0;
2402     env->ipr[IPR_IPL] = 31;
2403     env->ipr[IPR_MCES] = 0;
2404     env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2405     //    env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2406     env->ipr[IPR_SISR] = 0;
2407     env->ipr[IPR_VIRBND] = -1ULL;
2408
2409     return env;
2410 }
2411
2412 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2413                 unsigned long searched_pc, int pc_pos, void *puc)
2414 {
2415     env->pc = gen_opc_pc[pc_pos];
2416 }