xref: /qemu/target/alpha/translate.c (revision 0955d66e)
1 /*
2  *  Alpha emulation cpu translation for qemu.
3  *
4  *  Copyright (c) 2007 Jocelyn Mayer
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18  */
19 
20 #include "qemu/osdep.h"
21 #include "cpu.h"
22 #include "sysemu/cpus.h"
23 #include "sysemu/cpu-timers.h"
24 #include "disas/disas.h"
25 #include "qemu/host-utils.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 #include "exec/cpu_ldst.h"
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
31 #include "exec/translator.h"
32 #include "exec/log.h"
33 
34 
35 #undef ALPHA_DEBUG_DISAS
36 #define CONFIG_SOFTFLOAT_INLINE
37 
38 #ifdef ALPHA_DEBUG_DISAS
39 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
40 #else
41 #  define LOG_DISAS(...) do { } while (0)
42 #endif
43 
44 typedef struct DisasContext DisasContext;
45 struct DisasContext {
46     DisasContextBase base;
47 
48 #ifndef CONFIG_USER_ONLY
49     uint64_t palbr;
50 #endif
51     uint32_t tbflags;
52     int mem_idx;
53 
54     /* implver and amask values for this CPU.  */
55     int implver;
56     int amask;
57 
58     /* Current rounding mode for this TB.  */
59     int tb_rm;
60     /* Current flush-to-zero setting for this TB.  */
61     int tb_ftz;
62 
63     /* The set of registers active in the current context.  */
64     TCGv *ir;
65 
66     /* Temporaries for $31 and $f31 as source and destination.  */
67     TCGv zero;
68     TCGv sink;
69 };
70 
71 /* Target-specific return values from translate_one, indicating the
72    state of the TB.  Note that DISAS_NEXT indicates that we are not
73    exiting the TB.  */
74 #define DISAS_PC_UPDATED_NOCHAIN  DISAS_TARGET_0
75 #define DISAS_PC_UPDATED          DISAS_TARGET_1
76 #define DISAS_PC_STALE            DISAS_TARGET_2
77 
78 /* global register indexes */
79 static TCGv cpu_std_ir[31];
80 static TCGv cpu_fir[31];
81 static TCGv cpu_pc;
82 static TCGv cpu_lock_addr;
83 static TCGv cpu_lock_value;
84 
85 #ifndef CONFIG_USER_ONLY
86 static TCGv cpu_pal_ir[31];
87 #endif
88 
89 #include "exec/gen-icount.h"
90 
91 void alpha_translate_init(void)
92 {
93 #define DEF_VAR(V)  { &cpu_##V, #V, offsetof(CPUAlphaState, V) }
94 
95     typedef struct { TCGv *var; const char *name; int ofs; } GlobalVar;
96     static const GlobalVar vars[] = {
97         DEF_VAR(pc),
98         DEF_VAR(lock_addr),
99         DEF_VAR(lock_value),
100     };
101 
102 #undef DEF_VAR
103 
104     /* Use the symbolic register names that match the disassembler.  */
105     static const char greg_names[31][4] = {
106         "v0", "t0", "t1", "t2", "t3", "t4", "t5", "t6",
107         "t7", "s0", "s1", "s2", "s3", "s4", "s5", "fp",
108         "a0", "a1", "a2", "a3", "a4", "a5", "t8", "t9",
109         "t10", "t11", "ra", "t12", "at", "gp", "sp"
110     };
111     static const char freg_names[31][4] = {
112         "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
113         "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
114         "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
115         "f24", "f25", "f26", "f27", "f28", "f29", "f30"
116     };
117 #ifndef CONFIG_USER_ONLY
118     static const char shadow_names[8][8] = {
119         "pal_t7", "pal_s0", "pal_s1", "pal_s2",
120         "pal_s3", "pal_s4", "pal_s5", "pal_t11"
121     };
122 #endif
123 
124     int i;
125 
126     for (i = 0; i < 31; i++) {
127         cpu_std_ir[i] = tcg_global_mem_new_i64(cpu_env,
128                                                offsetof(CPUAlphaState, ir[i]),
129                                                greg_names[i]);
130     }
131 
132     for (i = 0; i < 31; i++) {
133         cpu_fir[i] = tcg_global_mem_new_i64(cpu_env,
134                                             offsetof(CPUAlphaState, fir[i]),
135                                             freg_names[i]);
136     }
137 
138 #ifndef CONFIG_USER_ONLY
139     memcpy(cpu_pal_ir, cpu_std_ir, sizeof(cpu_pal_ir));
140     for (i = 0; i < 8; i++) {
141         int r = (i == 7 ? 25 : i + 8);
142         cpu_pal_ir[r] = tcg_global_mem_new_i64(cpu_env,
143                                                offsetof(CPUAlphaState,
144                                                         shadow[i]),
145                                                shadow_names[i]);
146     }
147 #endif
148 
149     for (i = 0; i < ARRAY_SIZE(vars); ++i) {
150         const GlobalVar *v = &vars[i];
151         *v->var = tcg_global_mem_new_i64(cpu_env, v->ofs, v->name);
152     }
153 }
154 
155 static TCGv load_zero(DisasContext *ctx)
156 {
157     if (!ctx->zero) {
158         ctx->zero = tcg_constant_i64(0);
159     }
160     return ctx->zero;
161 }
162 
163 static TCGv dest_sink(DisasContext *ctx)
164 {
165     if (!ctx->sink) {
166         ctx->sink = tcg_temp_new();
167     }
168     return ctx->sink;
169 }
170 
171 static void free_context_temps(DisasContext *ctx)
172 {
173     if (ctx->sink) {
174         tcg_gen_discard_i64(ctx->sink);
175         tcg_temp_free(ctx->sink);
176         ctx->sink = NULL;
177     }
178 }
179 
180 static TCGv load_gpr(DisasContext *ctx, unsigned reg)
181 {
182     if (likely(reg < 31)) {
183         return ctx->ir[reg];
184     } else {
185         return load_zero(ctx);
186     }
187 }
188 
189 static TCGv load_gpr_lit(DisasContext *ctx, unsigned reg,
190                          uint8_t lit, bool islit)
191 {
192     if (islit) {
193         return tcg_constant_i64(lit);
194     } else if (likely(reg < 31)) {
195         return ctx->ir[reg];
196     } else {
197         return load_zero(ctx);
198     }
199 }
200 
201 static TCGv dest_gpr(DisasContext *ctx, unsigned reg)
202 {
203     if (likely(reg < 31)) {
204         return ctx->ir[reg];
205     } else {
206         return dest_sink(ctx);
207     }
208 }
209 
210 static TCGv load_fpr(DisasContext *ctx, unsigned reg)
211 {
212     if (likely(reg < 31)) {
213         return cpu_fir[reg];
214     } else {
215         return load_zero(ctx);
216     }
217 }
218 
219 static TCGv dest_fpr(DisasContext *ctx, unsigned reg)
220 {
221     if (likely(reg < 31)) {
222         return cpu_fir[reg];
223     } else {
224         return dest_sink(ctx);
225     }
226 }
227 
228 static int get_flag_ofs(unsigned shift)
229 {
230     int ofs = offsetof(CPUAlphaState, flags);
231 #ifdef HOST_WORDS_BIGENDIAN
232     ofs += 3 - (shift / 8);
233 #else
234     ofs += shift / 8;
235 #endif
236     return ofs;
237 }
238 
239 static void ld_flag_byte(TCGv val, unsigned shift)
240 {
241     tcg_gen_ld8u_i64(val, cpu_env, get_flag_ofs(shift));
242 }
243 
244 static void st_flag_byte(TCGv val, unsigned shift)
245 {
246     tcg_gen_st8_i64(val, cpu_env, get_flag_ofs(shift));
247 }
248 
249 static void gen_excp_1(int exception, int error_code)
250 {
251     TCGv_i32 tmp1, tmp2;
252 
253     tmp1 = tcg_constant_i32(exception);
254     tmp2 = tcg_constant_i32(error_code);
255     gen_helper_excp(cpu_env, tmp1, tmp2);
256 }
257 
258 static DisasJumpType gen_excp(DisasContext *ctx, int exception, int error_code)
259 {
260     tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
261     gen_excp_1(exception, error_code);
262     return DISAS_NORETURN;
263 }
264 
265 static inline DisasJumpType gen_invalid(DisasContext *ctx)
266 {
267     return gen_excp(ctx, EXCP_OPCDEC, 0);
268 }
269 
270 static void gen_ldf(DisasContext *ctx, TCGv dest, TCGv addr)
271 {
272     TCGv_i32 tmp32 = tcg_temp_new_i32();
273     tcg_gen_qemu_ld_i32(tmp32, addr, ctx->mem_idx, MO_LEUL);
274     gen_helper_memory_to_f(dest, tmp32);
275     tcg_temp_free_i32(tmp32);
276 }
277 
278 static void gen_ldg(DisasContext *ctx, TCGv dest, TCGv addr)
279 {
280     TCGv tmp = tcg_temp_new();
281     tcg_gen_qemu_ld_i64(tmp, addr, ctx->mem_idx, MO_LEQ);
282     gen_helper_memory_to_g(dest, tmp);
283     tcg_temp_free(tmp);
284 }
285 
286 static void gen_lds(DisasContext *ctx, TCGv dest, TCGv addr)
287 {
288     TCGv_i32 tmp32 = tcg_temp_new_i32();
289     tcg_gen_qemu_ld_i32(tmp32, addr, ctx->mem_idx, MO_LEUL);
290     gen_helper_memory_to_s(dest, tmp32);
291     tcg_temp_free_i32(tmp32);
292 }
293 
294 static void gen_ldt(DisasContext *ctx, TCGv dest, TCGv addr)
295 {
296     tcg_gen_qemu_ld_i64(dest, addr, ctx->mem_idx, MO_LEQ);
297 }
298 
299 static void gen_load_fp(DisasContext *ctx, int ra, int rb, int32_t disp16,
300                         void (*func)(DisasContext *, TCGv, TCGv))
301 {
302     /* Loads to $f31 are prefetches, which we can treat as nops. */
303     if (likely(ra != 31)) {
304         TCGv addr = tcg_temp_new();
305         tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
306         func(ctx, cpu_fir[ra], addr);
307         tcg_temp_free(addr);
308     }
309 }
310 
311 static void gen_load_int(DisasContext *ctx, int ra, int rb, int32_t disp16,
312                          MemOp op, bool clear, bool locked)
313 {
314     TCGv addr, dest;
315 
316     /* LDQ_U with ra $31 is UNOP.  Other various loads are forms of
317        prefetches, which we can treat as nops.  No worries about
318        missed exceptions here.  */
319     if (unlikely(ra == 31)) {
320         return;
321     }
322 
323     addr = tcg_temp_new();
324     tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
325     if (clear) {
326         tcg_gen_andi_i64(addr, addr, ~0x7);
327     }
328 
329     dest = ctx->ir[ra];
330     tcg_gen_qemu_ld_i64(dest, addr, ctx->mem_idx, op);
331 
332     if (locked) {
333         tcg_gen_mov_i64(cpu_lock_addr, addr);
334         tcg_gen_mov_i64(cpu_lock_value, dest);
335     }
336     tcg_temp_free(addr);
337 }
338 
339 static void gen_stf(DisasContext *ctx, TCGv src, TCGv addr)
340 {
341     TCGv_i32 tmp32 = tcg_temp_new_i32();
342     gen_helper_f_to_memory(tmp32, addr);
343     tcg_gen_qemu_st_i32(tmp32, addr, ctx->mem_idx, MO_LEUL);
344     tcg_temp_free_i32(tmp32);
345 }
346 
347 static void gen_stg(DisasContext *ctx, TCGv src, TCGv addr)
348 {
349     TCGv tmp = tcg_temp_new();
350     gen_helper_g_to_memory(tmp, src);
351     tcg_gen_qemu_st_i64(tmp, addr, ctx->mem_idx, MO_LEQ);
352     tcg_temp_free(tmp);
353 }
354 
355 static void gen_sts(DisasContext *ctx, TCGv src, TCGv addr)
356 {
357     TCGv_i32 tmp32 = tcg_temp_new_i32();
358     gen_helper_s_to_memory(tmp32, src);
359     tcg_gen_qemu_st_i32(tmp32, addr, ctx->mem_idx, MO_LEUL);
360     tcg_temp_free_i32(tmp32);
361 }
362 
363 static void gen_stt(DisasContext *ctx, TCGv src, TCGv addr)
364 {
365     tcg_gen_qemu_st_i64(src, addr, ctx->mem_idx, MO_LEQ);
366 }
367 
368 static void gen_store_fp(DisasContext *ctx, int ra, int rb, int32_t disp16,
369                          void (*func)(DisasContext *, TCGv, TCGv))
370 {
371     TCGv addr = tcg_temp_new();
372     tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
373     func(ctx, load_fpr(ctx, ra), addr);
374     tcg_temp_free(addr);
375 }
376 
377 static void gen_store_int(DisasContext *ctx, int ra, int rb, int32_t disp16,
378                           MemOp op, bool clear)
379 {
380     TCGv addr, src;
381 
382     addr = tcg_temp_new();
383     tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
384     if (clear) {
385         tcg_gen_andi_i64(addr, addr, ~0x7);
386     }
387 
388     src = load_gpr(ctx, ra);
389     tcg_gen_qemu_st_i64(src, addr, ctx->mem_idx, op);
390 
391     tcg_temp_free(addr);
392 }
393 
394 static DisasJumpType gen_store_conditional(DisasContext *ctx, int ra, int rb,
395                                            int32_t disp16, int mem_idx,
396                                            MemOp op)
397 {
398     TCGLabel *lab_fail, *lab_done;
399     TCGv addr, val;
400 
401     addr = tcg_temp_new_i64();
402     tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
403     free_context_temps(ctx);
404 
405     lab_fail = gen_new_label();
406     lab_done = gen_new_label();
407     tcg_gen_brcond_i64(TCG_COND_NE, addr, cpu_lock_addr, lab_fail);
408     tcg_temp_free_i64(addr);
409 
410     val = tcg_temp_new_i64();
411     tcg_gen_atomic_cmpxchg_i64(val, cpu_lock_addr, cpu_lock_value,
412                                load_gpr(ctx, ra), mem_idx, op);
413     free_context_temps(ctx);
414 
415     if (ra != 31) {
416         tcg_gen_setcond_i64(TCG_COND_EQ, ctx->ir[ra], val, cpu_lock_value);
417     }
418     tcg_temp_free_i64(val);
419     tcg_gen_br(lab_done);
420 
421     gen_set_label(lab_fail);
422     if (ra != 31) {
423         tcg_gen_movi_i64(ctx->ir[ra], 0);
424     }
425 
426     gen_set_label(lab_done);
427     tcg_gen_movi_i64(cpu_lock_addr, -1);
428     return DISAS_NEXT;
429 }
430 
431 static bool use_goto_tb(DisasContext *ctx, uint64_t dest)
432 {
433     return translator_use_goto_tb(&ctx->base, dest);
434 }
435 
436 static DisasJumpType gen_bdirect(DisasContext *ctx, int ra, int32_t disp)
437 {
438     uint64_t dest = ctx->base.pc_next + (disp << 2);
439 
440     if (ra != 31) {
441         tcg_gen_movi_i64(ctx->ir[ra], ctx->base.pc_next);
442     }
443 
444     /* Notice branch-to-next; used to initialize RA with the PC.  */
445     if (disp == 0) {
446         return 0;
447     } else if (use_goto_tb(ctx, dest)) {
448         tcg_gen_goto_tb(0);
449         tcg_gen_movi_i64(cpu_pc, dest);
450         tcg_gen_exit_tb(ctx->base.tb, 0);
451         return DISAS_NORETURN;
452     } else {
453         tcg_gen_movi_i64(cpu_pc, dest);
454         return DISAS_PC_UPDATED;
455     }
456 }
457 
458 static DisasJumpType gen_bcond_internal(DisasContext *ctx, TCGCond cond,
459                                         TCGv cmp, int32_t disp)
460 {
461     uint64_t dest = ctx->base.pc_next + (disp << 2);
462     TCGLabel *lab_true = gen_new_label();
463 
464     if (use_goto_tb(ctx, dest)) {
465         tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
466 
467         tcg_gen_goto_tb(0);
468         tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
469         tcg_gen_exit_tb(ctx->base.tb, 0);
470 
471         gen_set_label(lab_true);
472         tcg_gen_goto_tb(1);
473         tcg_gen_movi_i64(cpu_pc, dest);
474         tcg_gen_exit_tb(ctx->base.tb, 1);
475 
476         return DISAS_NORETURN;
477     } else {
478         TCGv_i64 z = load_zero(ctx);
479         TCGv_i64 d = tcg_constant_i64(dest);
480         TCGv_i64 p = tcg_constant_i64(ctx->base.pc_next);
481 
482         tcg_gen_movcond_i64(cond, cpu_pc, cmp, z, d, p);
483         return DISAS_PC_UPDATED;
484     }
485 }
486 
487 static DisasJumpType gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
488                                int32_t disp, int mask)
489 {
490     if (mask) {
491         TCGv tmp = tcg_temp_new();
492         DisasJumpType ret;
493 
494         tcg_gen_andi_i64(tmp, load_gpr(ctx, ra), 1);
495         ret = gen_bcond_internal(ctx, cond, tmp, disp);
496         tcg_temp_free(tmp);
497         return ret;
498     }
499     return gen_bcond_internal(ctx, cond, load_gpr(ctx, ra), disp);
500 }
501 
502 /* Fold -0.0 for comparison with COND.  */
503 
504 static void gen_fold_mzero(TCGCond cond, TCGv dest, TCGv src)
505 {
506     uint64_t mzero = 1ull << 63;
507 
508     switch (cond) {
509     case TCG_COND_LE:
510     case TCG_COND_GT:
511         /* For <= or >, the -0.0 value directly compares the way we want.  */
512         tcg_gen_mov_i64(dest, src);
513         break;
514 
515     case TCG_COND_EQ:
516     case TCG_COND_NE:
517         /* For == or !=, we can simply mask off the sign bit and compare.  */
518         tcg_gen_andi_i64(dest, src, mzero - 1);
519         break;
520 
521     case TCG_COND_GE:
522     case TCG_COND_LT:
523         /* For >= or <, map -0.0 to +0.0 via comparison and mask.  */
524         tcg_gen_setcondi_i64(TCG_COND_NE, dest, src, mzero);
525         tcg_gen_neg_i64(dest, dest);
526         tcg_gen_and_i64(dest, dest, src);
527         break;
528 
529     default:
530         abort();
531     }
532 }
533 
534 static DisasJumpType gen_fbcond(DisasContext *ctx, TCGCond cond, int ra,
535                                 int32_t disp)
536 {
537     TCGv cmp_tmp = tcg_temp_new();
538     DisasJumpType ret;
539 
540     gen_fold_mzero(cond, cmp_tmp, load_fpr(ctx, ra));
541     ret = gen_bcond_internal(ctx, cond, cmp_tmp, disp);
542     tcg_temp_free(cmp_tmp);
543     return ret;
544 }
545 
546 static void gen_fcmov(DisasContext *ctx, TCGCond cond, int ra, int rb, int rc)
547 {
548     TCGv_i64 va, vb, z;
549 
550     z = load_zero(ctx);
551     vb = load_fpr(ctx, rb);
552     va = tcg_temp_new();
553     gen_fold_mzero(cond, va, load_fpr(ctx, ra));
554 
555     tcg_gen_movcond_i64(cond, dest_fpr(ctx, rc), va, z, vb, load_fpr(ctx, rc));
556 
557     tcg_temp_free(va);
558 }
559 
560 #define QUAL_RM_N       0x080   /* Round mode nearest even */
561 #define QUAL_RM_C       0x000   /* Round mode chopped */
562 #define QUAL_RM_M       0x040   /* Round mode minus infinity */
563 #define QUAL_RM_D       0x0c0   /* Round mode dynamic */
564 #define QUAL_RM_MASK    0x0c0
565 
566 #define QUAL_U          0x100   /* Underflow enable (fp output) */
567 #define QUAL_V          0x100   /* Overflow enable (int output) */
568 #define QUAL_S          0x400   /* Software completion enable */
569 #define QUAL_I          0x200   /* Inexact detection enable */
570 
571 static void gen_qual_roundmode(DisasContext *ctx, int fn11)
572 {
573     TCGv_i32 tmp;
574 
575     fn11 &= QUAL_RM_MASK;
576     if (fn11 == ctx->tb_rm) {
577         return;
578     }
579     ctx->tb_rm = fn11;
580 
581     tmp = tcg_temp_new_i32();
582     switch (fn11) {
583     case QUAL_RM_N:
584         tcg_gen_movi_i32(tmp, float_round_nearest_even);
585         break;
586     case QUAL_RM_C:
587         tcg_gen_movi_i32(tmp, float_round_to_zero);
588         break;
589     case QUAL_RM_M:
590         tcg_gen_movi_i32(tmp, float_round_down);
591         break;
592     case QUAL_RM_D:
593         tcg_gen_ld8u_i32(tmp, cpu_env,
594                          offsetof(CPUAlphaState, fpcr_dyn_round));
595         break;
596     }
597 
598 #if defined(CONFIG_SOFTFLOAT_INLINE)
599     /* ??? The "fpu/softfloat.h" interface is to call set_float_rounding_mode.
600        With CONFIG_SOFTFLOAT that expands to an out-of-line call that just
601        sets the one field.  */
602     tcg_gen_st8_i32(tmp, cpu_env,
603                     offsetof(CPUAlphaState, fp_status.float_rounding_mode));
604 #else
605     gen_helper_setroundmode(tmp);
606 #endif
607 
608     tcg_temp_free_i32(tmp);
609 }
610 
611 static void gen_qual_flushzero(DisasContext *ctx, int fn11)
612 {
613     TCGv_i32 tmp;
614 
615     fn11 &= QUAL_U;
616     if (fn11 == ctx->tb_ftz) {
617         return;
618     }
619     ctx->tb_ftz = fn11;
620 
621     tmp = tcg_temp_new_i32();
622     if (fn11) {
623         /* Underflow is enabled, use the FPCR setting.  */
624         tcg_gen_ld8u_i32(tmp, cpu_env,
625                          offsetof(CPUAlphaState, fpcr_flush_to_zero));
626     } else {
627         /* Underflow is disabled, force flush-to-zero.  */
628         tcg_gen_movi_i32(tmp, 1);
629     }
630 
631 #if defined(CONFIG_SOFTFLOAT_INLINE)
632     tcg_gen_st8_i32(tmp, cpu_env,
633                     offsetof(CPUAlphaState, fp_status.flush_to_zero));
634 #else
635     gen_helper_setflushzero(tmp);
636 #endif
637 
638     tcg_temp_free_i32(tmp);
639 }
640 
641 static TCGv gen_ieee_input(DisasContext *ctx, int reg, int fn11, int is_cmp)
642 {
643     TCGv val;
644 
645     if (unlikely(reg == 31)) {
646         val = load_zero(ctx);
647     } else {
648         val = cpu_fir[reg];
649         if ((fn11 & QUAL_S) == 0) {
650             if (is_cmp) {
651                 gen_helper_ieee_input_cmp(cpu_env, val);
652             } else {
653                 gen_helper_ieee_input(cpu_env, val);
654             }
655         } else {
656 #ifndef CONFIG_USER_ONLY
657             /* In system mode, raise exceptions for denormals like real
658                hardware.  In user mode, proceed as if the OS completion
659                handler is handling the denormal as per spec.  */
660             gen_helper_ieee_input_s(cpu_env, val);
661 #endif
662         }
663     }
664     return val;
665 }
666 
667 static void gen_fp_exc_raise(int rc, int fn11)
668 {
669     /* ??? We ought to be able to do something with imprecise exceptions.
670        E.g. notice we're still in the trap shadow of something within the
671        TB and do not generate the code to signal the exception; end the TB
672        when an exception is forced to arrive, either by consumption of a
673        register value or TRAPB or EXCB.  */
674     TCGv_i32 reg, ign;
675     uint32_t ignore = 0;
676 
677     if (!(fn11 & QUAL_U)) {
678         /* Note that QUAL_U == QUAL_V, so ignore either.  */
679         ignore |= FPCR_UNF | FPCR_IOV;
680     }
681     if (!(fn11 & QUAL_I)) {
682         ignore |= FPCR_INE;
683     }
684     ign = tcg_constant_i32(ignore);
685 
686     /* ??? Pass in the regno of the destination so that the helper can
687        set EXC_MASK, which contains a bitmask of destination registers
688        that have caused arithmetic traps.  A simple userspace emulation
689        does not require this.  We do need it for a guest kernel's entArith,
690        or if we were to do something clever with imprecise exceptions.  */
691     reg = tcg_constant_i32(rc + 32);
692     if (fn11 & QUAL_S) {
693         gen_helper_fp_exc_raise_s(cpu_env, ign, reg);
694     } else {
695         gen_helper_fp_exc_raise(cpu_env, ign, reg);
696     }
697 }
698 
699 static void gen_cvtlq(TCGv vc, TCGv vb)
700 {
701     TCGv tmp = tcg_temp_new();
702 
703     /* The arithmetic right shift here, plus the sign-extended mask below
704        yields a sign-extended result without an explicit ext32s_i64.  */
705     tcg_gen_shri_i64(tmp, vb, 29);
706     tcg_gen_sari_i64(vc, vb, 32);
707     tcg_gen_deposit_i64(vc, vc, tmp, 0, 30);
708 
709     tcg_temp_free(tmp);
710 }
711 
712 static void gen_ieee_arith2(DisasContext *ctx,
713                             void (*helper)(TCGv, TCGv_ptr, TCGv),
714                             int rb, int rc, int fn11)
715 {
716     TCGv vb;
717 
718     gen_qual_roundmode(ctx, fn11);
719     gen_qual_flushzero(ctx, fn11);
720 
721     vb = gen_ieee_input(ctx, rb, fn11, 0);
722     helper(dest_fpr(ctx, rc), cpu_env, vb);
723 
724     gen_fp_exc_raise(rc, fn11);
725 }
726 
727 #define IEEE_ARITH2(name)                                       \
728 static inline void glue(gen_, name)(DisasContext *ctx,          \
729                                     int rb, int rc, int fn11)   \
730 {                                                               \
731     gen_ieee_arith2(ctx, gen_helper_##name, rb, rc, fn11);      \
732 }
733 IEEE_ARITH2(sqrts)
734 IEEE_ARITH2(sqrtt)
735 IEEE_ARITH2(cvtst)
736 IEEE_ARITH2(cvtts)
737 
738 static void gen_cvttq(DisasContext *ctx, int rb, int rc, int fn11)
739 {
740     TCGv vb, vc;
741 
742     /* No need to set flushzero, since we have an integer output.  */
743     vb = gen_ieee_input(ctx, rb, fn11, 0);
744     vc = dest_fpr(ctx, rc);
745 
746     /* Almost all integer conversions use cropped rounding;
747        special case that.  */
748     if ((fn11 & QUAL_RM_MASK) == QUAL_RM_C) {
749         gen_helper_cvttq_c(vc, cpu_env, vb);
750     } else {
751         gen_qual_roundmode(ctx, fn11);
752         gen_helper_cvttq(vc, cpu_env, vb);
753     }
754     gen_fp_exc_raise(rc, fn11);
755 }
756 
757 static void gen_ieee_intcvt(DisasContext *ctx,
758                             void (*helper)(TCGv, TCGv_ptr, TCGv),
759                             int rb, int rc, int fn11)
760 {
761     TCGv vb, vc;
762 
763     gen_qual_roundmode(ctx, fn11);
764     vb = load_fpr(ctx, rb);
765     vc = dest_fpr(ctx, rc);
766 
767     /* The only exception that can be raised by integer conversion
768        is inexact.  Thus we only need to worry about exceptions when
769        inexact handling is requested.  */
770     if (fn11 & QUAL_I) {
771         helper(vc, cpu_env, vb);
772         gen_fp_exc_raise(rc, fn11);
773     } else {
774         helper(vc, cpu_env, vb);
775     }
776 }
777 
778 #define IEEE_INTCVT(name)                                       \
779 static inline void glue(gen_, name)(DisasContext *ctx,          \
780                                     int rb, int rc, int fn11)   \
781 {                                                               \
782     gen_ieee_intcvt(ctx, gen_helper_##name, rb, rc, fn11);      \
783 }
784 IEEE_INTCVT(cvtqs)
785 IEEE_INTCVT(cvtqt)
786 
787 static void gen_cpy_mask(TCGv vc, TCGv va, TCGv vb, bool inv_a, uint64_t mask)
788 {
789     TCGv vmask = tcg_constant_i64(mask);
790     TCGv tmp = tcg_temp_new_i64();
791 
792     if (inv_a) {
793         tcg_gen_andc_i64(tmp, vmask, va);
794     } else {
795         tcg_gen_and_i64(tmp, va, vmask);
796     }
797 
798     tcg_gen_andc_i64(vc, vb, vmask);
799     tcg_gen_or_i64(vc, vc, tmp);
800 
801     tcg_temp_free(tmp);
802 }
803 
804 static void gen_ieee_arith3(DisasContext *ctx,
805                             void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
806                             int ra, int rb, int rc, int fn11)
807 {
808     TCGv va, vb, vc;
809 
810     gen_qual_roundmode(ctx, fn11);
811     gen_qual_flushzero(ctx, fn11);
812 
813     va = gen_ieee_input(ctx, ra, fn11, 0);
814     vb = gen_ieee_input(ctx, rb, fn11, 0);
815     vc = dest_fpr(ctx, rc);
816     helper(vc, cpu_env, va, vb);
817 
818     gen_fp_exc_raise(rc, fn11);
819 }
820 
821 #define IEEE_ARITH3(name)                                               \
822 static inline void glue(gen_, name)(DisasContext *ctx,                  \
823                                     int ra, int rb, int rc, int fn11)   \
824 {                                                                       \
825     gen_ieee_arith3(ctx, gen_helper_##name, ra, rb, rc, fn11);          \
826 }
827 IEEE_ARITH3(adds)
828 IEEE_ARITH3(subs)
829 IEEE_ARITH3(muls)
830 IEEE_ARITH3(divs)
831 IEEE_ARITH3(addt)
832 IEEE_ARITH3(subt)
833 IEEE_ARITH3(mult)
834 IEEE_ARITH3(divt)
835 
836 static void gen_ieee_compare(DisasContext *ctx,
837                              void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
838                              int ra, int rb, int rc, int fn11)
839 {
840     TCGv va, vb, vc;
841 
842     va = gen_ieee_input(ctx, ra, fn11, 1);
843     vb = gen_ieee_input(ctx, rb, fn11, 1);
844     vc = dest_fpr(ctx, rc);
845     helper(vc, cpu_env, va, vb);
846 
847     gen_fp_exc_raise(rc, fn11);
848 }
849 
850 #define IEEE_CMP3(name)                                                 \
851 static inline void glue(gen_, name)(DisasContext *ctx,                  \
852                                     int ra, int rb, int rc, int fn11)   \
853 {                                                                       \
854     gen_ieee_compare(ctx, gen_helper_##name, ra, rb, rc, fn11);         \
855 }
856 IEEE_CMP3(cmptun)
857 IEEE_CMP3(cmpteq)
858 IEEE_CMP3(cmptlt)
859 IEEE_CMP3(cmptle)
860 
861 static inline uint64_t zapnot_mask(uint8_t lit)
862 {
863     uint64_t mask = 0;
864     int i;
865 
866     for (i = 0; i < 8; ++i) {
867         if ((lit >> i) & 1) {
868             mask |= 0xffull << (i * 8);
869         }
870     }
871     return mask;
872 }
873 
874 /* Implement zapnot with an immediate operand, which expands to some
875    form of immediate AND.  This is a basic building block in the
876    definition of many of the other byte manipulation instructions.  */
877 static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
878 {
879     switch (lit) {
880     case 0x00:
881         tcg_gen_movi_i64(dest, 0);
882         break;
883     case 0x01:
884         tcg_gen_ext8u_i64(dest, src);
885         break;
886     case 0x03:
887         tcg_gen_ext16u_i64(dest, src);
888         break;
889     case 0x0f:
890         tcg_gen_ext32u_i64(dest, src);
891         break;
892     case 0xff:
893         tcg_gen_mov_i64(dest, src);
894         break;
895     default:
896         tcg_gen_andi_i64(dest, src, zapnot_mask(lit));
897         break;
898     }
899 }
900 
901 /* EXTWH, EXTLH, EXTQH */
902 static void gen_ext_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
903                       uint8_t lit, uint8_t byte_mask)
904 {
905     if (islit) {
906         int pos = (64 - lit * 8) & 0x3f;
907         int len = cto32(byte_mask) * 8;
908         if (pos < len) {
909             tcg_gen_deposit_z_i64(vc, va, pos, len - pos);
910         } else {
911             tcg_gen_movi_i64(vc, 0);
912         }
913     } else {
914         TCGv tmp = tcg_temp_new();
915         tcg_gen_shli_i64(tmp, load_gpr(ctx, rb), 3);
916         tcg_gen_neg_i64(tmp, tmp);
917         tcg_gen_andi_i64(tmp, tmp, 0x3f);
918         tcg_gen_shl_i64(vc, va, tmp);
919         tcg_temp_free(tmp);
920     }
921     gen_zapnoti(vc, vc, byte_mask);
922 }
923 
924 /* EXTBL, EXTWL, EXTLL, EXTQL */
925 static void gen_ext_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
926                       uint8_t lit, uint8_t byte_mask)
927 {
928     if (islit) {
929         int pos = (lit & 7) * 8;
930         int len = cto32(byte_mask) * 8;
931         if (pos + len >= 64) {
932             len = 64 - pos;
933         }
934         tcg_gen_extract_i64(vc, va, pos, len);
935     } else {
936         TCGv tmp = tcg_temp_new();
937         tcg_gen_andi_i64(tmp, load_gpr(ctx, rb), 7);
938         tcg_gen_shli_i64(tmp, tmp, 3);
939         tcg_gen_shr_i64(vc, va, tmp);
940         tcg_temp_free(tmp);
941         gen_zapnoti(vc, vc, byte_mask);
942     }
943 }
944 
945 /* INSWH, INSLH, INSQH */
946 static void gen_ins_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
947                       uint8_t lit, uint8_t byte_mask)
948 {
949     if (islit) {
950         int pos = 64 - (lit & 7) * 8;
951         int len = cto32(byte_mask) * 8;
952         if (pos < len) {
953             tcg_gen_extract_i64(vc, va, pos, len - pos);
954         } else {
955             tcg_gen_movi_i64(vc, 0);
956         }
957     } else {
958         TCGv tmp = tcg_temp_new();
959         TCGv shift = tcg_temp_new();
960 
961         /* The instruction description has us left-shift the byte mask
962            and extract bits <15:8> and apply that zap at the end.  This
963            is equivalent to simply performing the zap first and shifting
964            afterward.  */
965         gen_zapnoti(tmp, va, byte_mask);
966 
967         /* If (B & 7) == 0, we need to shift by 64 and leave a zero.  Do this
968            portably by splitting the shift into two parts: shift_count-1 and 1.
969            Arrange for the -1 by using ones-complement instead of
970            twos-complement in the negation: ~(B * 8) & 63.  */
971 
972         tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
973         tcg_gen_not_i64(shift, shift);
974         tcg_gen_andi_i64(shift, shift, 0x3f);
975 
976         tcg_gen_shr_i64(vc, tmp, shift);
977         tcg_gen_shri_i64(vc, vc, 1);
978         tcg_temp_free(shift);
979         tcg_temp_free(tmp);
980     }
981 }
982 
983 /* INSBL, INSWL, INSLL, INSQL */
984 static void gen_ins_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
985                       uint8_t lit, uint8_t byte_mask)
986 {
987     if (islit) {
988         int pos = (lit & 7) * 8;
989         int len = cto32(byte_mask) * 8;
990         if (pos + len > 64) {
991             len = 64 - pos;
992         }
993         tcg_gen_deposit_z_i64(vc, va, pos, len);
994     } else {
995         TCGv tmp = tcg_temp_new();
996         TCGv shift = tcg_temp_new();
997 
998         /* The instruction description has us left-shift the byte mask
999            and extract bits <15:8> and apply that zap at the end.  This
1000            is equivalent to simply performing the zap first and shifting
1001            afterward.  */
1002         gen_zapnoti(tmp, va, byte_mask);
1003 
1004         tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1005         tcg_gen_shli_i64(shift, shift, 3);
1006         tcg_gen_shl_i64(vc, tmp, shift);
1007         tcg_temp_free(shift);
1008         tcg_temp_free(tmp);
1009     }
1010 }
1011 
1012 /* MSKWH, MSKLH, MSKQH */
1013 static void gen_msk_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1014                       uint8_t lit, uint8_t byte_mask)
1015 {
1016     if (islit) {
1017         gen_zapnoti(vc, va, ~((byte_mask << (lit & 7)) >> 8));
1018     } else {
1019         TCGv shift = tcg_temp_new();
1020         TCGv mask = tcg_temp_new();
1021 
1022         /* The instruction description is as above, where the byte_mask
1023            is shifted left, and then we extract bits <15:8>.  This can be
1024            emulated with a right-shift on the expanded byte mask.  This
1025            requires extra care because for an input <2:0> == 0 we need a
1026            shift of 64 bits in order to generate a zero.  This is done by
1027            splitting the shift into two parts, the variable shift - 1
1028            followed by a constant 1 shift.  The code we expand below is
1029            equivalent to ~(B * 8) & 63.  */
1030 
1031         tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1032         tcg_gen_not_i64(shift, shift);
1033         tcg_gen_andi_i64(shift, shift, 0x3f);
1034         tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
1035         tcg_gen_shr_i64(mask, mask, shift);
1036         tcg_gen_shri_i64(mask, mask, 1);
1037 
1038         tcg_gen_andc_i64(vc, va, mask);
1039 
1040         tcg_temp_free(mask);
1041         tcg_temp_free(shift);
1042     }
1043 }
1044 
1045 /* MSKBL, MSKWL, MSKLL, MSKQL */
1046 static void gen_msk_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1047                       uint8_t lit, uint8_t byte_mask)
1048 {
1049     if (islit) {
1050         gen_zapnoti(vc, va, ~(byte_mask << (lit & 7)));
1051     } else {
1052         TCGv shift = tcg_temp_new();
1053         TCGv mask = tcg_temp_new();
1054 
1055         tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1056         tcg_gen_shli_i64(shift, shift, 3);
1057         tcg_gen_movi_i64(mask, zapnot_mask(byte_mask));
1058         tcg_gen_shl_i64(mask, mask, shift);
1059 
1060         tcg_gen_andc_i64(vc, va, mask);
1061 
1062         tcg_temp_free(mask);
1063         tcg_temp_free(shift);
1064     }
1065 }
1066 
1067 static void gen_rx(DisasContext *ctx, int ra, int set)
1068 {
1069     if (ra != 31) {
1070         ld_flag_byte(ctx->ir[ra], ENV_FLAG_RX_SHIFT);
1071     }
1072 
1073     st_flag_byte(tcg_constant_i64(set), ENV_FLAG_RX_SHIFT);
1074 }
1075 
1076 static DisasJumpType gen_call_pal(DisasContext *ctx, int palcode)
1077 {
1078     /* We're emulating OSF/1 PALcode.  Many of these are trivial access
1079        to internal cpu registers.  */
1080 
1081     /* Unprivileged PAL call */
1082     if (palcode >= 0x80 && palcode < 0xC0) {
1083         switch (palcode) {
1084         case 0x86:
1085             /* IMB */
1086             /* No-op inside QEMU.  */
1087             break;
1088         case 0x9E:
1089             /* RDUNIQUE */
1090             tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1091                            offsetof(CPUAlphaState, unique));
1092             break;
1093         case 0x9F:
1094             /* WRUNIQUE */
1095             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1096                            offsetof(CPUAlphaState, unique));
1097             break;
1098         default:
1099             palcode &= 0xbf;
1100             goto do_call_pal;
1101         }
1102         return DISAS_NEXT;
1103     }
1104 
1105 #ifndef CONFIG_USER_ONLY
1106     /* Privileged PAL code */
1107     if (palcode < 0x40 && (ctx->tbflags & ENV_FLAG_PS_USER) == 0) {
1108         switch (palcode) {
1109         case 0x01:
1110             /* CFLUSH */
1111             /* No-op inside QEMU.  */
1112             break;
1113         case 0x02:
1114             /* DRAINA */
1115             /* No-op inside QEMU.  */
1116             break;
1117         case 0x2D:
1118             /* WRVPTPTR */
1119             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1120                            offsetof(CPUAlphaState, vptptr));
1121             break;
1122         case 0x31:
1123             /* WRVAL */
1124             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1125                            offsetof(CPUAlphaState, sysval));
1126             break;
1127         case 0x32:
1128             /* RDVAL */
1129             tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1130                            offsetof(CPUAlphaState, sysval));
1131             break;
1132 
1133         case 0x35:
1134             /* SWPIPL */
1135             /* Note that we already know we're in kernel mode, so we know
1136                that PS only contains the 3 IPL bits.  */
1137             ld_flag_byte(ctx->ir[IR_V0], ENV_FLAG_PS_SHIFT);
1138 
1139             /* But make sure and store only the 3 IPL bits from the user.  */
1140             {
1141                 TCGv tmp = tcg_temp_new();
1142                 tcg_gen_andi_i64(tmp, ctx->ir[IR_A0], PS_INT_MASK);
1143                 st_flag_byte(tmp, ENV_FLAG_PS_SHIFT);
1144                 tcg_temp_free(tmp);
1145             }
1146 
1147             /* Allow interrupts to be recognized right away.  */
1148             tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
1149             return DISAS_PC_UPDATED_NOCHAIN;
1150 
1151         case 0x36:
1152             /* RDPS */
1153             ld_flag_byte(ctx->ir[IR_V0], ENV_FLAG_PS_SHIFT);
1154             break;
1155 
1156         case 0x38:
1157             /* WRUSP */
1158             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1159                            offsetof(CPUAlphaState, usp));
1160             break;
1161         case 0x3A:
1162             /* RDUSP */
1163             tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1164                            offsetof(CPUAlphaState, usp));
1165             break;
1166         case 0x3C:
1167             /* WHAMI */
1168             tcg_gen_ld32s_i64(ctx->ir[IR_V0], cpu_env,
1169                 -offsetof(AlphaCPU, env) + offsetof(CPUState, cpu_index));
1170             break;
1171 
1172         case 0x3E:
1173             /* WTINT */
1174             tcg_gen_st_i32(tcg_constant_i32(1), cpu_env,
1175                            -offsetof(AlphaCPU, env) +
1176                            offsetof(CPUState, halted));
1177             tcg_gen_movi_i64(ctx->ir[IR_V0], 0);
1178             return gen_excp(ctx, EXCP_HALTED, 0);
1179 
1180         default:
1181             palcode &= 0x3f;
1182             goto do_call_pal;
1183         }
1184         return DISAS_NEXT;
1185     }
1186 #endif
1187     return gen_invalid(ctx);
1188 
1189  do_call_pal:
1190 #ifdef CONFIG_USER_ONLY
1191     return gen_excp(ctx, EXCP_CALL_PAL, palcode);
1192 #else
1193     {
1194         TCGv tmp = tcg_temp_new();
1195         uint64_t exc_addr = ctx->base.pc_next;
1196         uint64_t entry = ctx->palbr;
1197 
1198         if (ctx->tbflags & ENV_FLAG_PAL_MODE) {
1199             exc_addr |= 1;
1200         } else {
1201             tcg_gen_movi_i64(tmp, 1);
1202             st_flag_byte(tmp, ENV_FLAG_PAL_SHIFT);
1203         }
1204 
1205         tcg_gen_movi_i64(tmp, exc_addr);
1206         tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUAlphaState, exc_addr));
1207         tcg_temp_free(tmp);
1208 
1209         entry += (palcode & 0x80
1210                   ? 0x2000 + (palcode - 0x80) * 64
1211                   : 0x1000 + palcode * 64);
1212 
1213         tcg_gen_movi_i64(cpu_pc, entry);
1214         return DISAS_PC_UPDATED;
1215     }
1216 #endif
1217 }
1218 
1219 #ifndef CONFIG_USER_ONLY
1220 
1221 #define PR_LONG         0x200000
1222 
1223 static int cpu_pr_data(int pr)
1224 {
1225     switch (pr) {
1226     case  2: return offsetof(CPUAlphaState, pcc_ofs) | PR_LONG;
1227     case  3: return offsetof(CPUAlphaState, trap_arg0);
1228     case  4: return offsetof(CPUAlphaState, trap_arg1);
1229     case  5: return offsetof(CPUAlphaState, trap_arg2);
1230     case  6: return offsetof(CPUAlphaState, exc_addr);
1231     case  7: return offsetof(CPUAlphaState, palbr);
1232     case  8: return offsetof(CPUAlphaState, ptbr);
1233     case  9: return offsetof(CPUAlphaState, vptptr);
1234     case 10: return offsetof(CPUAlphaState, unique);
1235     case 11: return offsetof(CPUAlphaState, sysval);
1236     case 12: return offsetof(CPUAlphaState, usp);
1237 
1238     case 40 ... 63:
1239         return offsetof(CPUAlphaState, scratch[pr - 40]);
1240 
1241     case 251:
1242         return offsetof(CPUAlphaState, alarm_expire);
1243     }
1244     return 0;
1245 }
1246 
1247 static DisasJumpType gen_mfpr(DisasContext *ctx, TCGv va, int regno)
1248 {
1249     void (*helper)(TCGv);
1250     int data;
1251 
1252     switch (regno) {
1253     case 32 ... 39:
1254         /* Accessing the "non-shadow" general registers.  */
1255         regno = regno == 39 ? 25 : regno - 32 + 8;
1256         tcg_gen_mov_i64(va, cpu_std_ir[regno]);
1257         break;
1258 
1259     case 250: /* WALLTIME */
1260         helper = gen_helper_get_walltime;
1261         goto do_helper;
1262     case 249: /* VMTIME */
1263         helper = gen_helper_get_vmtime;
1264     do_helper:
1265         if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
1266             gen_io_start();
1267             helper(va);
1268             return DISAS_PC_STALE;
1269         } else {
1270             helper(va);
1271         }
1272         break;
1273 
1274     case 0: /* PS */
1275         ld_flag_byte(va, ENV_FLAG_PS_SHIFT);
1276         break;
1277     case 1: /* FEN */
1278         ld_flag_byte(va, ENV_FLAG_FEN_SHIFT);
1279         break;
1280 
1281     default:
1282         /* The basic registers are data only, and unknown registers
1283            are read-zero, write-ignore.  */
1284         data = cpu_pr_data(regno);
1285         if (data == 0) {
1286             tcg_gen_movi_i64(va, 0);
1287         } else if (data & PR_LONG) {
1288             tcg_gen_ld32s_i64(va, cpu_env, data & ~PR_LONG);
1289         } else {
1290             tcg_gen_ld_i64(va, cpu_env, data);
1291         }
1292         break;
1293     }
1294 
1295     return DISAS_NEXT;
1296 }
1297 
1298 static DisasJumpType gen_mtpr(DisasContext *ctx, TCGv vb, int regno)
1299 {
1300     int data;
1301     DisasJumpType ret = DISAS_NEXT;
1302 
1303     switch (regno) {
1304     case 255:
1305         /* TBIA */
1306         gen_helper_tbia(cpu_env);
1307         break;
1308 
1309     case 254:
1310         /* TBIS */
1311         gen_helper_tbis(cpu_env, vb);
1312         break;
1313 
1314     case 253:
1315         /* WAIT */
1316         tcg_gen_st_i32(tcg_constant_i32(1), cpu_env,
1317                        -offsetof(AlphaCPU, env) + offsetof(CPUState, halted));
1318         return gen_excp(ctx, EXCP_HALTED, 0);
1319 
1320     case 252:
1321         /* HALT */
1322         gen_helper_halt(vb);
1323         return DISAS_PC_STALE;
1324 
1325     case 251:
1326         /* ALARM */
1327         if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
1328             gen_io_start();
1329             ret = DISAS_PC_STALE;
1330         }
1331         gen_helper_set_alarm(cpu_env, vb);
1332         break;
1333 
1334     case 7:
1335         /* PALBR */
1336         tcg_gen_st_i64(vb, cpu_env, offsetof(CPUAlphaState, palbr));
1337         /* Changing the PAL base register implies un-chaining all of the TBs
1338            that ended with a CALL_PAL.  Since the base register usually only
1339            changes during boot, flushing everything works well.  */
1340         gen_helper_tb_flush(cpu_env);
1341         return DISAS_PC_STALE;
1342 
1343     case 32 ... 39:
1344         /* Accessing the "non-shadow" general registers.  */
1345         regno = regno == 39 ? 25 : regno - 32 + 8;
1346         tcg_gen_mov_i64(cpu_std_ir[regno], vb);
1347         break;
1348 
1349     case 0: /* PS */
1350         st_flag_byte(vb, ENV_FLAG_PS_SHIFT);
1351         break;
1352     case 1: /* FEN */
1353         st_flag_byte(vb, ENV_FLAG_FEN_SHIFT);
1354         break;
1355 
1356     default:
1357         /* The basic registers are data only, and unknown registers
1358            are read-zero, write-ignore.  */
1359         data = cpu_pr_data(regno);
1360         if (data != 0) {
1361             if (data & PR_LONG) {
1362                 tcg_gen_st32_i64(vb, cpu_env, data & ~PR_LONG);
1363             } else {
1364                 tcg_gen_st_i64(vb, cpu_env, data);
1365             }
1366         }
1367         break;
1368     }
1369 
1370     return ret;
1371 }
1372 #endif /* !USER_ONLY*/
1373 
1374 #define REQUIRE_NO_LIT                          \
1375     do {                                        \
1376         if (real_islit) {                       \
1377             goto invalid_opc;                   \
1378         }                                       \
1379     } while (0)
1380 
1381 #define REQUIRE_AMASK(FLAG)                     \
1382     do {                                        \
1383         if ((ctx->amask & AMASK_##FLAG) == 0) { \
1384             goto invalid_opc;                   \
1385         }                                       \
1386     } while (0)
1387 
1388 #define REQUIRE_TB_FLAG(FLAG)                   \
1389     do {                                        \
1390         if ((ctx->tbflags & (FLAG)) == 0) {     \
1391             goto invalid_opc;                   \
1392         }                                       \
1393     } while (0)
1394 
1395 #define REQUIRE_REG_31(WHICH)                   \
1396     do {                                        \
1397         if (WHICH != 31) {                      \
1398             goto invalid_opc;                   \
1399         }                                       \
1400     } while (0)
1401 
1402 #define REQUIRE_FEN                             \
1403     do {                                        \
1404         if (!(ctx->tbflags & ENV_FLAG_FEN)) {   \
1405             goto raise_fen;                     \
1406         }                                       \
1407     } while (0)
1408 
1409 static DisasJumpType translate_one(DisasContext *ctx, uint32_t insn)
1410 {
1411     int32_t disp21, disp16, disp12 __attribute__((unused));
1412     uint16_t fn11;
1413     uint8_t opc, ra, rb, rc, fpfn, fn7, lit;
1414     bool islit, real_islit;
1415     TCGv va, vb, vc, tmp, tmp2;
1416     TCGv_i32 t32;
1417     DisasJumpType ret;
1418 
1419     /* Decode all instruction fields */
1420     opc = extract32(insn, 26, 6);
1421     ra = extract32(insn, 21, 5);
1422     rb = extract32(insn, 16, 5);
1423     rc = extract32(insn, 0, 5);
1424     real_islit = islit = extract32(insn, 12, 1);
1425     lit = extract32(insn, 13, 8);
1426 
1427     disp21 = sextract32(insn, 0, 21);
1428     disp16 = sextract32(insn, 0, 16);
1429     disp12 = sextract32(insn, 0, 12);
1430 
1431     fn11 = extract32(insn, 5, 11);
1432     fpfn = extract32(insn, 5, 6);
1433     fn7 = extract32(insn, 5, 7);
1434 
1435     if (rb == 31 && !islit) {
1436         islit = true;
1437         lit = 0;
1438     }
1439 
1440     ret = DISAS_NEXT;
1441     switch (opc) {
1442     case 0x00:
1443         /* CALL_PAL */
1444         ret = gen_call_pal(ctx, insn & 0x03ffffff);
1445         break;
1446     case 0x01:
1447         /* OPC01 */
1448         goto invalid_opc;
1449     case 0x02:
1450         /* OPC02 */
1451         goto invalid_opc;
1452     case 0x03:
1453         /* OPC03 */
1454         goto invalid_opc;
1455     case 0x04:
1456         /* OPC04 */
1457         goto invalid_opc;
1458     case 0x05:
1459         /* OPC05 */
1460         goto invalid_opc;
1461     case 0x06:
1462         /* OPC06 */
1463         goto invalid_opc;
1464     case 0x07:
1465         /* OPC07 */
1466         goto invalid_opc;
1467 
1468     case 0x09:
1469         /* LDAH */
1470         disp16 = (uint32_t)disp16 << 16;
1471         /* fall through */
1472     case 0x08:
1473         /* LDA */
1474         va = dest_gpr(ctx, ra);
1475         /* It's worth special-casing immediate loads.  */
1476         if (rb == 31) {
1477             tcg_gen_movi_i64(va, disp16);
1478         } else {
1479             tcg_gen_addi_i64(va, load_gpr(ctx, rb), disp16);
1480         }
1481         break;
1482 
1483     case 0x0A:
1484         /* LDBU */
1485         REQUIRE_AMASK(BWX);
1486         gen_load_int(ctx, ra, rb, disp16, MO_UB, 0, 0);
1487         break;
1488     case 0x0B:
1489         /* LDQ_U */
1490         gen_load_int(ctx, ra, rb, disp16, MO_LEQ, 1, 0);
1491         break;
1492     case 0x0C:
1493         /* LDWU */
1494         REQUIRE_AMASK(BWX);
1495         gen_load_int(ctx, ra, rb, disp16, MO_LEUW, 0, 0);
1496         break;
1497     case 0x0D:
1498         /* STW */
1499         REQUIRE_AMASK(BWX);
1500         gen_store_int(ctx, ra, rb, disp16, MO_LEUW, 0);
1501         break;
1502     case 0x0E:
1503         /* STB */
1504         REQUIRE_AMASK(BWX);
1505         gen_store_int(ctx, ra, rb, disp16, MO_UB, 0);
1506         break;
1507     case 0x0F:
1508         /* STQ_U */
1509         gen_store_int(ctx, ra, rb, disp16, MO_LEQ, 1);
1510         break;
1511 
1512     case 0x10:
1513         vc = dest_gpr(ctx, rc);
1514         vb = load_gpr_lit(ctx, rb, lit, islit);
1515 
1516         if (ra == 31) {
1517             if (fn7 == 0x00) {
1518                 /* Special case ADDL as SEXTL.  */
1519                 tcg_gen_ext32s_i64(vc, vb);
1520                 break;
1521             }
1522             if (fn7 == 0x29) {
1523                 /* Special case SUBQ as NEGQ.  */
1524                 tcg_gen_neg_i64(vc, vb);
1525                 break;
1526             }
1527         }
1528 
1529         va = load_gpr(ctx, ra);
1530         switch (fn7) {
1531         case 0x00:
1532             /* ADDL */
1533             tcg_gen_add_i64(vc, va, vb);
1534             tcg_gen_ext32s_i64(vc, vc);
1535             break;
1536         case 0x02:
1537             /* S4ADDL */
1538             tmp = tcg_temp_new();
1539             tcg_gen_shli_i64(tmp, va, 2);
1540             tcg_gen_add_i64(tmp, tmp, vb);
1541             tcg_gen_ext32s_i64(vc, tmp);
1542             tcg_temp_free(tmp);
1543             break;
1544         case 0x09:
1545             /* SUBL */
1546             tcg_gen_sub_i64(vc, va, vb);
1547             tcg_gen_ext32s_i64(vc, vc);
1548             break;
1549         case 0x0B:
1550             /* S4SUBL */
1551             tmp = tcg_temp_new();
1552             tcg_gen_shli_i64(tmp, va, 2);
1553             tcg_gen_sub_i64(tmp, tmp, vb);
1554             tcg_gen_ext32s_i64(vc, tmp);
1555             tcg_temp_free(tmp);
1556             break;
1557         case 0x0F:
1558             /* CMPBGE */
1559             if (ra == 31) {
1560                 /* Special case 0 >= X as X == 0.  */
1561                 gen_helper_cmpbe0(vc, vb);
1562             } else {
1563                 gen_helper_cmpbge(vc, va, vb);
1564             }
1565             break;
1566         case 0x12:
1567             /* S8ADDL */
1568             tmp = tcg_temp_new();
1569             tcg_gen_shli_i64(tmp, va, 3);
1570             tcg_gen_add_i64(tmp, tmp, vb);
1571             tcg_gen_ext32s_i64(vc, tmp);
1572             tcg_temp_free(tmp);
1573             break;
1574         case 0x1B:
1575             /* S8SUBL */
1576             tmp = tcg_temp_new();
1577             tcg_gen_shli_i64(tmp, va, 3);
1578             tcg_gen_sub_i64(tmp, tmp, vb);
1579             tcg_gen_ext32s_i64(vc, tmp);
1580             tcg_temp_free(tmp);
1581             break;
1582         case 0x1D:
1583             /* CMPULT */
1584             tcg_gen_setcond_i64(TCG_COND_LTU, vc, va, vb);
1585             break;
1586         case 0x20:
1587             /* ADDQ */
1588             tcg_gen_add_i64(vc, va, vb);
1589             break;
1590         case 0x22:
1591             /* S4ADDQ */
1592             tmp = tcg_temp_new();
1593             tcg_gen_shli_i64(tmp, va, 2);
1594             tcg_gen_add_i64(vc, tmp, vb);
1595             tcg_temp_free(tmp);
1596             break;
1597         case 0x29:
1598             /* SUBQ */
1599             tcg_gen_sub_i64(vc, va, vb);
1600             break;
1601         case 0x2B:
1602             /* S4SUBQ */
1603             tmp = tcg_temp_new();
1604             tcg_gen_shli_i64(tmp, va, 2);
1605             tcg_gen_sub_i64(vc, tmp, vb);
1606             tcg_temp_free(tmp);
1607             break;
1608         case 0x2D:
1609             /* CMPEQ */
1610             tcg_gen_setcond_i64(TCG_COND_EQ, vc, va, vb);
1611             break;
1612         case 0x32:
1613             /* S8ADDQ */
1614             tmp = tcg_temp_new();
1615             tcg_gen_shli_i64(tmp, va, 3);
1616             tcg_gen_add_i64(vc, tmp, vb);
1617             tcg_temp_free(tmp);
1618             break;
1619         case 0x3B:
1620             /* S8SUBQ */
1621             tmp = tcg_temp_new();
1622             tcg_gen_shli_i64(tmp, va, 3);
1623             tcg_gen_sub_i64(vc, tmp, vb);
1624             tcg_temp_free(tmp);
1625             break;
1626         case 0x3D:
1627             /* CMPULE */
1628             tcg_gen_setcond_i64(TCG_COND_LEU, vc, va, vb);
1629             break;
1630         case 0x40:
1631             /* ADDL/V */
1632             tmp = tcg_temp_new();
1633             tcg_gen_ext32s_i64(tmp, va);
1634             tcg_gen_ext32s_i64(vc, vb);
1635             tcg_gen_add_i64(tmp, tmp, vc);
1636             tcg_gen_ext32s_i64(vc, tmp);
1637             gen_helper_check_overflow(cpu_env, vc, tmp);
1638             tcg_temp_free(tmp);
1639             break;
1640         case 0x49:
1641             /* SUBL/V */
1642             tmp = tcg_temp_new();
1643             tcg_gen_ext32s_i64(tmp, va);
1644             tcg_gen_ext32s_i64(vc, vb);
1645             tcg_gen_sub_i64(tmp, tmp, vc);
1646             tcg_gen_ext32s_i64(vc, tmp);
1647             gen_helper_check_overflow(cpu_env, vc, tmp);
1648             tcg_temp_free(tmp);
1649             break;
1650         case 0x4D:
1651             /* CMPLT */
1652             tcg_gen_setcond_i64(TCG_COND_LT, vc, va, vb);
1653             break;
1654         case 0x60:
1655             /* ADDQ/V */
1656             tmp = tcg_temp_new();
1657             tmp2 = tcg_temp_new();
1658             tcg_gen_eqv_i64(tmp, va, vb);
1659             tcg_gen_mov_i64(tmp2, va);
1660             tcg_gen_add_i64(vc, va, vb);
1661             tcg_gen_xor_i64(tmp2, tmp2, vc);
1662             tcg_gen_and_i64(tmp, tmp, tmp2);
1663             tcg_gen_shri_i64(tmp, tmp, 63);
1664             tcg_gen_movi_i64(tmp2, 0);
1665             gen_helper_check_overflow(cpu_env, tmp, tmp2);
1666             tcg_temp_free(tmp);
1667             tcg_temp_free(tmp2);
1668             break;
1669         case 0x69:
1670             /* SUBQ/V */
1671             tmp = tcg_temp_new();
1672             tmp2 = tcg_temp_new();
1673             tcg_gen_xor_i64(tmp, va, vb);
1674             tcg_gen_mov_i64(tmp2, va);
1675             tcg_gen_sub_i64(vc, va, vb);
1676             tcg_gen_xor_i64(tmp2, tmp2, vc);
1677             tcg_gen_and_i64(tmp, tmp, tmp2);
1678             tcg_gen_shri_i64(tmp, tmp, 63);
1679             tcg_gen_movi_i64(tmp2, 0);
1680             gen_helper_check_overflow(cpu_env, tmp, tmp2);
1681             tcg_temp_free(tmp);
1682             tcg_temp_free(tmp2);
1683             break;
1684         case 0x6D:
1685             /* CMPLE */
1686             tcg_gen_setcond_i64(TCG_COND_LE, vc, va, vb);
1687             break;
1688         default:
1689             goto invalid_opc;
1690         }
1691         break;
1692 
1693     case 0x11:
1694         if (fn7 == 0x20) {
1695             if (rc == 31) {
1696                 /* Special case BIS as NOP.  */
1697                 break;
1698             }
1699             if (ra == 31) {
1700                 /* Special case BIS as MOV.  */
1701                 vc = dest_gpr(ctx, rc);
1702                 if (islit) {
1703                     tcg_gen_movi_i64(vc, lit);
1704                 } else {
1705                     tcg_gen_mov_i64(vc, load_gpr(ctx, rb));
1706                 }
1707                 break;
1708             }
1709         }
1710 
1711         vc = dest_gpr(ctx, rc);
1712         vb = load_gpr_lit(ctx, rb, lit, islit);
1713 
1714         if (fn7 == 0x28 && ra == 31) {
1715             /* Special case ORNOT as NOT.  */
1716             tcg_gen_not_i64(vc, vb);
1717             break;
1718         }
1719 
1720         va = load_gpr(ctx, ra);
1721         switch (fn7) {
1722         case 0x00:
1723             /* AND */
1724             tcg_gen_and_i64(vc, va, vb);
1725             break;
1726         case 0x08:
1727             /* BIC */
1728             tcg_gen_andc_i64(vc, va, vb);
1729             break;
1730         case 0x14:
1731             /* CMOVLBS */
1732             tmp = tcg_temp_new();
1733             tcg_gen_andi_i64(tmp, va, 1);
1734             tcg_gen_movcond_i64(TCG_COND_NE, vc, tmp, load_zero(ctx),
1735                                 vb, load_gpr(ctx, rc));
1736             tcg_temp_free(tmp);
1737             break;
1738         case 0x16:
1739             /* CMOVLBC */
1740             tmp = tcg_temp_new();
1741             tcg_gen_andi_i64(tmp, va, 1);
1742             tcg_gen_movcond_i64(TCG_COND_EQ, vc, tmp, load_zero(ctx),
1743                                 vb, load_gpr(ctx, rc));
1744             tcg_temp_free(tmp);
1745             break;
1746         case 0x20:
1747             /* BIS */
1748             tcg_gen_or_i64(vc, va, vb);
1749             break;
1750         case 0x24:
1751             /* CMOVEQ */
1752             tcg_gen_movcond_i64(TCG_COND_EQ, vc, va, load_zero(ctx),
1753                                 vb, load_gpr(ctx, rc));
1754             break;
1755         case 0x26:
1756             /* CMOVNE */
1757             tcg_gen_movcond_i64(TCG_COND_NE, vc, va, load_zero(ctx),
1758                                 vb, load_gpr(ctx, rc));
1759             break;
1760         case 0x28:
1761             /* ORNOT */
1762             tcg_gen_orc_i64(vc, va, vb);
1763             break;
1764         case 0x40:
1765             /* XOR */
1766             tcg_gen_xor_i64(vc, va, vb);
1767             break;
1768         case 0x44:
1769             /* CMOVLT */
1770             tcg_gen_movcond_i64(TCG_COND_LT, vc, va, load_zero(ctx),
1771                                 vb, load_gpr(ctx, rc));
1772             break;
1773         case 0x46:
1774             /* CMOVGE */
1775             tcg_gen_movcond_i64(TCG_COND_GE, vc, va, load_zero(ctx),
1776                                 vb, load_gpr(ctx, rc));
1777             break;
1778         case 0x48:
1779             /* EQV */
1780             tcg_gen_eqv_i64(vc, va, vb);
1781             break;
1782         case 0x61:
1783             /* AMASK */
1784             REQUIRE_REG_31(ra);
1785             tcg_gen_andi_i64(vc, vb, ~ctx->amask);
1786             break;
1787         case 0x64:
1788             /* CMOVLE */
1789             tcg_gen_movcond_i64(TCG_COND_LE, vc, va, load_zero(ctx),
1790                                 vb, load_gpr(ctx, rc));
1791             break;
1792         case 0x66:
1793             /* CMOVGT */
1794             tcg_gen_movcond_i64(TCG_COND_GT, vc, va, load_zero(ctx),
1795                                 vb, load_gpr(ctx, rc));
1796             break;
1797         case 0x6C:
1798             /* IMPLVER */
1799             REQUIRE_REG_31(ra);
1800             tcg_gen_movi_i64(vc, ctx->implver);
1801             break;
1802         default:
1803             goto invalid_opc;
1804         }
1805         break;
1806 
1807     case 0x12:
1808         vc = dest_gpr(ctx, rc);
1809         va = load_gpr(ctx, ra);
1810         switch (fn7) {
1811         case 0x02:
1812             /* MSKBL */
1813             gen_msk_l(ctx, vc, va, rb, islit, lit, 0x01);
1814             break;
1815         case 0x06:
1816             /* EXTBL */
1817             gen_ext_l(ctx, vc, va, rb, islit, lit, 0x01);
1818             break;
1819         case 0x0B:
1820             /* INSBL */
1821             gen_ins_l(ctx, vc, va, rb, islit, lit, 0x01);
1822             break;
1823         case 0x12:
1824             /* MSKWL */
1825             gen_msk_l(ctx, vc, va, rb, islit, lit, 0x03);
1826             break;
1827         case 0x16:
1828             /* EXTWL */
1829             gen_ext_l(ctx, vc, va, rb, islit, lit, 0x03);
1830             break;
1831         case 0x1B:
1832             /* INSWL */
1833             gen_ins_l(ctx, vc, va, rb, islit, lit, 0x03);
1834             break;
1835         case 0x22:
1836             /* MSKLL */
1837             gen_msk_l(ctx, vc, va, rb, islit, lit, 0x0f);
1838             break;
1839         case 0x26:
1840             /* EXTLL */
1841             gen_ext_l(ctx, vc, va, rb, islit, lit, 0x0f);
1842             break;
1843         case 0x2B:
1844             /* INSLL */
1845             gen_ins_l(ctx, vc, va, rb, islit, lit, 0x0f);
1846             break;
1847         case 0x30:
1848             /* ZAP */
1849             if (islit) {
1850                 gen_zapnoti(vc, va, ~lit);
1851             } else {
1852                 gen_helper_zap(vc, va, load_gpr(ctx, rb));
1853             }
1854             break;
1855         case 0x31:
1856             /* ZAPNOT */
1857             if (islit) {
1858                 gen_zapnoti(vc, va, lit);
1859             } else {
1860                 gen_helper_zapnot(vc, va, load_gpr(ctx, rb));
1861             }
1862             break;
1863         case 0x32:
1864             /* MSKQL */
1865             gen_msk_l(ctx, vc, va, rb, islit, lit, 0xff);
1866             break;
1867         case 0x34:
1868             /* SRL */
1869             if (islit) {
1870                 tcg_gen_shri_i64(vc, va, lit & 0x3f);
1871             } else {
1872                 tmp = tcg_temp_new();
1873                 vb = load_gpr(ctx, rb);
1874                 tcg_gen_andi_i64(tmp, vb, 0x3f);
1875                 tcg_gen_shr_i64(vc, va, tmp);
1876                 tcg_temp_free(tmp);
1877             }
1878             break;
1879         case 0x36:
1880             /* EXTQL */
1881             gen_ext_l(ctx, vc, va, rb, islit, lit, 0xff);
1882             break;
1883         case 0x39:
1884             /* SLL */
1885             if (islit) {
1886                 tcg_gen_shli_i64(vc, va, lit & 0x3f);
1887             } else {
1888                 tmp = tcg_temp_new();
1889                 vb = load_gpr(ctx, rb);
1890                 tcg_gen_andi_i64(tmp, vb, 0x3f);
1891                 tcg_gen_shl_i64(vc, va, tmp);
1892                 tcg_temp_free(tmp);
1893             }
1894             break;
1895         case 0x3B:
1896             /* INSQL */
1897             gen_ins_l(ctx, vc, va, rb, islit, lit, 0xff);
1898             break;
1899         case 0x3C:
1900             /* SRA */
1901             if (islit) {
1902                 tcg_gen_sari_i64(vc, va, lit & 0x3f);
1903             } else {
1904                 tmp = tcg_temp_new();
1905                 vb = load_gpr(ctx, rb);
1906                 tcg_gen_andi_i64(tmp, vb, 0x3f);
1907                 tcg_gen_sar_i64(vc, va, tmp);
1908                 tcg_temp_free(tmp);
1909             }
1910             break;
1911         case 0x52:
1912             /* MSKWH */
1913             gen_msk_h(ctx, vc, va, rb, islit, lit, 0x03);
1914             break;
1915         case 0x57:
1916             /* INSWH */
1917             gen_ins_h(ctx, vc, va, rb, islit, lit, 0x03);
1918             break;
1919         case 0x5A:
1920             /* EXTWH */
1921             gen_ext_h(ctx, vc, va, rb, islit, lit, 0x03);
1922             break;
1923         case 0x62:
1924             /* MSKLH */
1925             gen_msk_h(ctx, vc, va, rb, islit, lit, 0x0f);
1926             break;
1927         case 0x67:
1928             /* INSLH */
1929             gen_ins_h(ctx, vc, va, rb, islit, lit, 0x0f);
1930             break;
1931         case 0x6A:
1932             /* EXTLH */
1933             gen_ext_h(ctx, vc, va, rb, islit, lit, 0x0f);
1934             break;
1935         case 0x72:
1936             /* MSKQH */
1937             gen_msk_h(ctx, vc, va, rb, islit, lit, 0xff);
1938             break;
1939         case 0x77:
1940             /* INSQH */
1941             gen_ins_h(ctx, vc, va, rb, islit, lit, 0xff);
1942             break;
1943         case 0x7A:
1944             /* EXTQH */
1945             gen_ext_h(ctx, vc, va, rb, islit, lit, 0xff);
1946             break;
1947         default:
1948             goto invalid_opc;
1949         }
1950         break;
1951 
1952     case 0x13:
1953         vc = dest_gpr(ctx, rc);
1954         vb = load_gpr_lit(ctx, rb, lit, islit);
1955         va = load_gpr(ctx, ra);
1956         switch (fn7) {
1957         case 0x00:
1958             /* MULL */
1959             tcg_gen_mul_i64(vc, va, vb);
1960             tcg_gen_ext32s_i64(vc, vc);
1961             break;
1962         case 0x20:
1963             /* MULQ */
1964             tcg_gen_mul_i64(vc, va, vb);
1965             break;
1966         case 0x30:
1967             /* UMULH */
1968             tmp = tcg_temp_new();
1969             tcg_gen_mulu2_i64(tmp, vc, va, vb);
1970             tcg_temp_free(tmp);
1971             break;
1972         case 0x40:
1973             /* MULL/V */
1974             tmp = tcg_temp_new();
1975             tcg_gen_ext32s_i64(tmp, va);
1976             tcg_gen_ext32s_i64(vc, vb);
1977             tcg_gen_mul_i64(tmp, tmp, vc);
1978             tcg_gen_ext32s_i64(vc, tmp);
1979             gen_helper_check_overflow(cpu_env, vc, tmp);
1980             tcg_temp_free(tmp);
1981             break;
1982         case 0x60:
1983             /* MULQ/V */
1984             tmp = tcg_temp_new();
1985             tmp2 = tcg_temp_new();
1986             tcg_gen_muls2_i64(vc, tmp, va, vb);
1987             tcg_gen_sari_i64(tmp2, vc, 63);
1988             gen_helper_check_overflow(cpu_env, tmp, tmp2);
1989             tcg_temp_free(tmp);
1990             tcg_temp_free(tmp2);
1991             break;
1992         default:
1993             goto invalid_opc;
1994         }
1995         break;
1996 
1997     case 0x14:
1998         REQUIRE_AMASK(FIX);
1999         vc = dest_fpr(ctx, rc);
2000         switch (fpfn) { /* fn11 & 0x3F */
2001         case 0x04:
2002             /* ITOFS */
2003             REQUIRE_REG_31(rb);
2004             REQUIRE_FEN;
2005             t32 = tcg_temp_new_i32();
2006             va = load_gpr(ctx, ra);
2007             tcg_gen_extrl_i64_i32(t32, va);
2008             gen_helper_memory_to_s(vc, t32);
2009             tcg_temp_free_i32(t32);
2010             break;
2011         case 0x0A:
2012             /* SQRTF */
2013             REQUIRE_REG_31(ra);
2014             REQUIRE_FEN;
2015             vb = load_fpr(ctx, rb);
2016             gen_helper_sqrtf(vc, cpu_env, vb);
2017             break;
2018         case 0x0B:
2019             /* SQRTS */
2020             REQUIRE_REG_31(ra);
2021             REQUIRE_FEN;
2022             gen_sqrts(ctx, rb, rc, fn11);
2023             break;
2024         case 0x14:
2025             /* ITOFF */
2026             REQUIRE_REG_31(rb);
2027             REQUIRE_FEN;
2028             t32 = tcg_temp_new_i32();
2029             va = load_gpr(ctx, ra);
2030             tcg_gen_extrl_i64_i32(t32, va);
2031             gen_helper_memory_to_f(vc, t32);
2032             tcg_temp_free_i32(t32);
2033             break;
2034         case 0x24:
2035             /* ITOFT */
2036             REQUIRE_REG_31(rb);
2037             REQUIRE_FEN;
2038             va = load_gpr(ctx, ra);
2039             tcg_gen_mov_i64(vc, va);
2040             break;
2041         case 0x2A:
2042             /* SQRTG */
2043             REQUIRE_REG_31(ra);
2044             REQUIRE_FEN;
2045             vb = load_fpr(ctx, rb);
2046             gen_helper_sqrtg(vc, cpu_env, vb);
2047             break;
2048         case 0x02B:
2049             /* SQRTT */
2050             REQUIRE_REG_31(ra);
2051             REQUIRE_FEN;
2052             gen_sqrtt(ctx, rb, rc, fn11);
2053             break;
2054         default:
2055             goto invalid_opc;
2056         }
2057         break;
2058 
2059     case 0x15:
2060         /* VAX floating point */
2061         /* XXX: rounding mode and trap are ignored (!) */
2062         vc = dest_fpr(ctx, rc);
2063         vb = load_fpr(ctx, rb);
2064         va = load_fpr(ctx, ra);
2065         switch (fpfn) { /* fn11 & 0x3F */
2066         case 0x00:
2067             /* ADDF */
2068             REQUIRE_FEN;
2069             gen_helper_addf(vc, cpu_env, va, vb);
2070             break;
2071         case 0x01:
2072             /* SUBF */
2073             REQUIRE_FEN;
2074             gen_helper_subf(vc, cpu_env, va, vb);
2075             break;
2076         case 0x02:
2077             /* MULF */
2078             REQUIRE_FEN;
2079             gen_helper_mulf(vc, cpu_env, va, vb);
2080             break;
2081         case 0x03:
2082             /* DIVF */
2083             REQUIRE_FEN;
2084             gen_helper_divf(vc, cpu_env, va, vb);
2085             break;
2086         case 0x1E:
2087             /* CVTDG -- TODO */
2088             REQUIRE_REG_31(ra);
2089             goto invalid_opc;
2090         case 0x20:
2091             /* ADDG */
2092             REQUIRE_FEN;
2093             gen_helper_addg(vc, cpu_env, va, vb);
2094             break;
2095         case 0x21:
2096             /* SUBG */
2097             REQUIRE_FEN;
2098             gen_helper_subg(vc, cpu_env, va, vb);
2099             break;
2100         case 0x22:
2101             /* MULG */
2102             REQUIRE_FEN;
2103             gen_helper_mulg(vc, cpu_env, va, vb);
2104             break;
2105         case 0x23:
2106             /* DIVG */
2107             REQUIRE_FEN;
2108             gen_helper_divg(vc, cpu_env, va, vb);
2109             break;
2110         case 0x25:
2111             /* CMPGEQ */
2112             REQUIRE_FEN;
2113             gen_helper_cmpgeq(vc, cpu_env, va, vb);
2114             break;
2115         case 0x26:
2116             /* CMPGLT */
2117             REQUIRE_FEN;
2118             gen_helper_cmpglt(vc, cpu_env, va, vb);
2119             break;
2120         case 0x27:
2121             /* CMPGLE */
2122             REQUIRE_FEN;
2123             gen_helper_cmpgle(vc, cpu_env, va, vb);
2124             break;
2125         case 0x2C:
2126             /* CVTGF */
2127             REQUIRE_REG_31(ra);
2128             REQUIRE_FEN;
2129             gen_helper_cvtgf(vc, cpu_env, vb);
2130             break;
2131         case 0x2D:
2132             /* CVTGD -- TODO */
2133             REQUIRE_REG_31(ra);
2134             goto invalid_opc;
2135         case 0x2F:
2136             /* CVTGQ */
2137             REQUIRE_REG_31(ra);
2138             REQUIRE_FEN;
2139             gen_helper_cvtgq(vc, cpu_env, vb);
2140             break;
2141         case 0x3C:
2142             /* CVTQF */
2143             REQUIRE_REG_31(ra);
2144             REQUIRE_FEN;
2145             gen_helper_cvtqf(vc, cpu_env, vb);
2146             break;
2147         case 0x3E:
2148             /* CVTQG */
2149             REQUIRE_REG_31(ra);
2150             REQUIRE_FEN;
2151             gen_helper_cvtqg(vc, cpu_env, vb);
2152             break;
2153         default:
2154             goto invalid_opc;
2155         }
2156         break;
2157 
2158     case 0x16:
2159         /* IEEE floating-point */
2160         switch (fpfn) { /* fn11 & 0x3F */
2161         case 0x00:
2162             /* ADDS */
2163             REQUIRE_FEN;
2164             gen_adds(ctx, ra, rb, rc, fn11);
2165             break;
2166         case 0x01:
2167             /* SUBS */
2168             REQUIRE_FEN;
2169             gen_subs(ctx, ra, rb, rc, fn11);
2170             break;
2171         case 0x02:
2172             /* MULS */
2173             REQUIRE_FEN;
2174             gen_muls(ctx, ra, rb, rc, fn11);
2175             break;
2176         case 0x03:
2177             /* DIVS */
2178             REQUIRE_FEN;
2179             gen_divs(ctx, ra, rb, rc, fn11);
2180             break;
2181         case 0x20:
2182             /* ADDT */
2183             REQUIRE_FEN;
2184             gen_addt(ctx, ra, rb, rc, fn11);
2185             break;
2186         case 0x21:
2187             /* SUBT */
2188             REQUIRE_FEN;
2189             gen_subt(ctx, ra, rb, rc, fn11);
2190             break;
2191         case 0x22:
2192             /* MULT */
2193             REQUIRE_FEN;
2194             gen_mult(ctx, ra, rb, rc, fn11);
2195             break;
2196         case 0x23:
2197             /* DIVT */
2198             REQUIRE_FEN;
2199             gen_divt(ctx, ra, rb, rc, fn11);
2200             break;
2201         case 0x24:
2202             /* CMPTUN */
2203             REQUIRE_FEN;
2204             gen_cmptun(ctx, ra, rb, rc, fn11);
2205             break;
2206         case 0x25:
2207             /* CMPTEQ */
2208             REQUIRE_FEN;
2209             gen_cmpteq(ctx, ra, rb, rc, fn11);
2210             break;
2211         case 0x26:
2212             /* CMPTLT */
2213             REQUIRE_FEN;
2214             gen_cmptlt(ctx, ra, rb, rc, fn11);
2215             break;
2216         case 0x27:
2217             /* CMPTLE */
2218             REQUIRE_FEN;
2219             gen_cmptle(ctx, ra, rb, rc, fn11);
2220             break;
2221         case 0x2C:
2222             REQUIRE_REG_31(ra);
2223             REQUIRE_FEN;
2224             if (fn11 == 0x2AC || fn11 == 0x6AC) {
2225                 /* CVTST */
2226                 gen_cvtst(ctx, rb, rc, fn11);
2227             } else {
2228                 /* CVTTS */
2229                 gen_cvtts(ctx, rb, rc, fn11);
2230             }
2231             break;
2232         case 0x2F:
2233             /* CVTTQ */
2234             REQUIRE_REG_31(ra);
2235             REQUIRE_FEN;
2236             gen_cvttq(ctx, rb, rc, fn11);
2237             break;
2238         case 0x3C:
2239             /* CVTQS */
2240             REQUIRE_REG_31(ra);
2241             REQUIRE_FEN;
2242             gen_cvtqs(ctx, rb, rc, fn11);
2243             break;
2244         case 0x3E:
2245             /* CVTQT */
2246             REQUIRE_REG_31(ra);
2247             REQUIRE_FEN;
2248             gen_cvtqt(ctx, rb, rc, fn11);
2249             break;
2250         default:
2251             goto invalid_opc;
2252         }
2253         break;
2254 
2255     case 0x17:
2256         switch (fn11) {
2257         case 0x010:
2258             /* CVTLQ */
2259             REQUIRE_REG_31(ra);
2260             REQUIRE_FEN;
2261             vc = dest_fpr(ctx, rc);
2262             vb = load_fpr(ctx, rb);
2263             gen_cvtlq(vc, vb);
2264             break;
2265         case 0x020:
2266             /* CPYS */
2267             REQUIRE_FEN;
2268             if (rc == 31) {
2269                 /* Special case CPYS as FNOP.  */
2270             } else {
2271                 vc = dest_fpr(ctx, rc);
2272                 va = load_fpr(ctx, ra);
2273                 if (ra == rb) {
2274                     /* Special case CPYS as FMOV.  */
2275                     tcg_gen_mov_i64(vc, va);
2276                 } else {
2277                     vb = load_fpr(ctx, rb);
2278                     gen_cpy_mask(vc, va, vb, 0, 0x8000000000000000ULL);
2279                 }
2280             }
2281             break;
2282         case 0x021:
2283             /* CPYSN */
2284             REQUIRE_FEN;
2285             vc = dest_fpr(ctx, rc);
2286             vb = load_fpr(ctx, rb);
2287             va = load_fpr(ctx, ra);
2288             gen_cpy_mask(vc, va, vb, 1, 0x8000000000000000ULL);
2289             break;
2290         case 0x022:
2291             /* CPYSE */
2292             REQUIRE_FEN;
2293             vc = dest_fpr(ctx, rc);
2294             vb = load_fpr(ctx, rb);
2295             va = load_fpr(ctx, ra);
2296             gen_cpy_mask(vc, va, vb, 0, 0xFFF0000000000000ULL);
2297             break;
2298         case 0x024:
2299             /* MT_FPCR */
2300             REQUIRE_FEN;
2301             va = load_fpr(ctx, ra);
2302             gen_helper_store_fpcr(cpu_env, va);
2303             if (ctx->tb_rm == QUAL_RM_D) {
2304                 /* Re-do the copy of the rounding mode to fp_status
2305                    the next time we use dynamic rounding.  */
2306                 ctx->tb_rm = -1;
2307             }
2308             break;
2309         case 0x025:
2310             /* MF_FPCR */
2311             REQUIRE_FEN;
2312             va = dest_fpr(ctx, ra);
2313             gen_helper_load_fpcr(va, cpu_env);
2314             break;
2315         case 0x02A:
2316             /* FCMOVEQ */
2317             REQUIRE_FEN;
2318             gen_fcmov(ctx, TCG_COND_EQ, ra, rb, rc);
2319             break;
2320         case 0x02B:
2321             /* FCMOVNE */
2322             REQUIRE_FEN;
2323             gen_fcmov(ctx, TCG_COND_NE, ra, rb, rc);
2324             break;
2325         case 0x02C:
2326             /* FCMOVLT */
2327             REQUIRE_FEN;
2328             gen_fcmov(ctx, TCG_COND_LT, ra, rb, rc);
2329             break;
2330         case 0x02D:
2331             /* FCMOVGE */
2332             REQUIRE_FEN;
2333             gen_fcmov(ctx, TCG_COND_GE, ra, rb, rc);
2334             break;
2335         case 0x02E:
2336             /* FCMOVLE */
2337             REQUIRE_FEN;
2338             gen_fcmov(ctx, TCG_COND_LE, ra, rb, rc);
2339             break;
2340         case 0x02F:
2341             /* FCMOVGT */
2342             REQUIRE_FEN;
2343             gen_fcmov(ctx, TCG_COND_GT, ra, rb, rc);
2344             break;
2345         case 0x030: /* CVTQL */
2346         case 0x130: /* CVTQL/V */
2347         case 0x530: /* CVTQL/SV */
2348             REQUIRE_REG_31(ra);
2349             REQUIRE_FEN;
2350             vc = dest_fpr(ctx, rc);
2351             vb = load_fpr(ctx, rb);
2352             gen_helper_cvtql(vc, cpu_env, vb);
2353             gen_fp_exc_raise(rc, fn11);
2354             break;
2355         default:
2356             goto invalid_opc;
2357         }
2358         break;
2359 
2360     case 0x18:
2361         switch ((uint16_t)disp16) {
2362         case 0x0000:
2363             /* TRAPB */
2364             /* No-op.  */
2365             break;
2366         case 0x0400:
2367             /* EXCB */
2368             /* No-op.  */
2369             break;
2370         case 0x4000:
2371             /* MB */
2372             tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
2373             break;
2374         case 0x4400:
2375             /* WMB */
2376             tcg_gen_mb(TCG_MO_ST_ST | TCG_BAR_SC);
2377             break;
2378         case 0x8000:
2379             /* FETCH */
2380             /* No-op */
2381             break;
2382         case 0xA000:
2383             /* FETCH_M */
2384             /* No-op */
2385             break;
2386         case 0xC000:
2387             /* RPCC */
2388             va = dest_gpr(ctx, ra);
2389             if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
2390                 gen_io_start();
2391                 gen_helper_load_pcc(va, cpu_env);
2392                 ret = DISAS_PC_STALE;
2393             } else {
2394                 gen_helper_load_pcc(va, cpu_env);
2395             }
2396             break;
2397         case 0xE000:
2398             /* RC */
2399             gen_rx(ctx, ra, 0);
2400             break;
2401         case 0xE800:
2402             /* ECB */
2403             break;
2404         case 0xF000:
2405             /* RS */
2406             gen_rx(ctx, ra, 1);
2407             break;
2408         case 0xF800:
2409             /* WH64 */
2410             /* No-op */
2411             break;
2412         case 0xFC00:
2413             /* WH64EN */
2414             /* No-op */
2415             break;
2416         default:
2417             goto invalid_opc;
2418         }
2419         break;
2420 
2421     case 0x19:
2422         /* HW_MFPR (PALcode) */
2423 #ifndef CONFIG_USER_ONLY
2424         REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2425         va = dest_gpr(ctx, ra);
2426         ret = gen_mfpr(ctx, va, insn & 0xffff);
2427         break;
2428 #else
2429         goto invalid_opc;
2430 #endif
2431 
2432     case 0x1A:
2433         /* JMP, JSR, RET, JSR_COROUTINE.  These only differ by the branch
2434            prediction stack action, which of course we don't implement.  */
2435         vb = load_gpr(ctx, rb);
2436         tcg_gen_andi_i64(cpu_pc, vb, ~3);
2437         if (ra != 31) {
2438             tcg_gen_movi_i64(ctx->ir[ra], ctx->base.pc_next);
2439         }
2440         ret = DISAS_PC_UPDATED;
2441         break;
2442 
2443     case 0x1B:
2444         /* HW_LD (PALcode) */
2445 #ifndef CONFIG_USER_ONLY
2446         REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2447         {
2448             TCGv addr = tcg_temp_new();
2449             vb = load_gpr(ctx, rb);
2450             va = dest_gpr(ctx, ra);
2451 
2452             tcg_gen_addi_i64(addr, vb, disp12);
2453             switch ((insn >> 12) & 0xF) {
2454             case 0x0:
2455                 /* Longword physical access (hw_ldl/p) */
2456                 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL);
2457                 break;
2458             case 0x1:
2459                 /* Quadword physical access (hw_ldq/p) */
2460                 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEQ);
2461                 break;
2462             case 0x2:
2463                 /* Longword physical access with lock (hw_ldl_l/p) */
2464                 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL);
2465                 tcg_gen_mov_i64(cpu_lock_addr, addr);
2466                 tcg_gen_mov_i64(cpu_lock_value, va);
2467                 break;
2468             case 0x3:
2469                 /* Quadword physical access with lock (hw_ldq_l/p) */
2470                 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEQ);
2471                 tcg_gen_mov_i64(cpu_lock_addr, addr);
2472                 tcg_gen_mov_i64(cpu_lock_value, va);
2473                 break;
2474             case 0x4:
2475                 /* Longword virtual PTE fetch (hw_ldl/v) */
2476                 goto invalid_opc;
2477             case 0x5:
2478                 /* Quadword virtual PTE fetch (hw_ldq/v) */
2479                 goto invalid_opc;
2480                 break;
2481             case 0x6:
2482                 /* Invalid */
2483                 goto invalid_opc;
2484             case 0x7:
2485                 /* Invaliid */
2486                 goto invalid_opc;
2487             case 0x8:
2488                 /* Longword virtual access (hw_ldl) */
2489                 goto invalid_opc;
2490             case 0x9:
2491                 /* Quadword virtual access (hw_ldq) */
2492                 goto invalid_opc;
2493             case 0xA:
2494                 /* Longword virtual access with protection check (hw_ldl/w) */
2495                 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LESL);
2496                 break;
2497             case 0xB:
2498                 /* Quadword virtual access with protection check (hw_ldq/w) */
2499                 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LEQ);
2500                 break;
2501             case 0xC:
2502                 /* Longword virtual access with alt access mode (hw_ldl/a)*/
2503                 goto invalid_opc;
2504             case 0xD:
2505                 /* Quadword virtual access with alt access mode (hw_ldq/a) */
2506                 goto invalid_opc;
2507             case 0xE:
2508                 /* Longword virtual access with alternate access mode and
2509                    protection checks (hw_ldl/wa) */
2510                 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LESL);
2511                 break;
2512             case 0xF:
2513                 /* Quadword virtual access with alternate access mode and
2514                    protection checks (hw_ldq/wa) */
2515                 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LEQ);
2516                 break;
2517             }
2518             tcg_temp_free(addr);
2519             break;
2520         }
2521 #else
2522         goto invalid_opc;
2523 #endif
2524 
2525     case 0x1C:
2526         vc = dest_gpr(ctx, rc);
2527         if (fn7 == 0x70) {
2528             /* FTOIT */
2529             REQUIRE_AMASK(FIX);
2530             REQUIRE_REG_31(rb);
2531             va = load_fpr(ctx, ra);
2532             tcg_gen_mov_i64(vc, va);
2533             break;
2534         } else if (fn7 == 0x78) {
2535             /* FTOIS */
2536             REQUIRE_AMASK(FIX);
2537             REQUIRE_REG_31(rb);
2538             t32 = tcg_temp_new_i32();
2539             va = load_fpr(ctx, ra);
2540             gen_helper_s_to_memory(t32, va);
2541             tcg_gen_ext_i32_i64(vc, t32);
2542             tcg_temp_free_i32(t32);
2543             break;
2544         }
2545 
2546         vb = load_gpr_lit(ctx, rb, lit, islit);
2547         switch (fn7) {
2548         case 0x00:
2549             /* SEXTB */
2550             REQUIRE_AMASK(BWX);
2551             REQUIRE_REG_31(ra);
2552             tcg_gen_ext8s_i64(vc, vb);
2553             break;
2554         case 0x01:
2555             /* SEXTW */
2556             REQUIRE_AMASK(BWX);
2557             REQUIRE_REG_31(ra);
2558             tcg_gen_ext16s_i64(vc, vb);
2559             break;
2560         case 0x30:
2561             /* CTPOP */
2562             REQUIRE_AMASK(CIX);
2563             REQUIRE_REG_31(ra);
2564             REQUIRE_NO_LIT;
2565             tcg_gen_ctpop_i64(vc, vb);
2566             break;
2567         case 0x31:
2568             /* PERR */
2569             REQUIRE_AMASK(MVI);
2570             REQUIRE_NO_LIT;
2571             va = load_gpr(ctx, ra);
2572             gen_helper_perr(vc, va, vb);
2573             break;
2574         case 0x32:
2575             /* CTLZ */
2576             REQUIRE_AMASK(CIX);
2577             REQUIRE_REG_31(ra);
2578             REQUIRE_NO_LIT;
2579             tcg_gen_clzi_i64(vc, vb, 64);
2580             break;
2581         case 0x33:
2582             /* CTTZ */
2583             REQUIRE_AMASK(CIX);
2584             REQUIRE_REG_31(ra);
2585             REQUIRE_NO_LIT;
2586             tcg_gen_ctzi_i64(vc, vb, 64);
2587             break;
2588         case 0x34:
2589             /* UNPKBW */
2590             REQUIRE_AMASK(MVI);
2591             REQUIRE_REG_31(ra);
2592             REQUIRE_NO_LIT;
2593             gen_helper_unpkbw(vc, vb);
2594             break;
2595         case 0x35:
2596             /* UNPKBL */
2597             REQUIRE_AMASK(MVI);
2598             REQUIRE_REG_31(ra);
2599             REQUIRE_NO_LIT;
2600             gen_helper_unpkbl(vc, vb);
2601             break;
2602         case 0x36:
2603             /* PKWB */
2604             REQUIRE_AMASK(MVI);
2605             REQUIRE_REG_31(ra);
2606             REQUIRE_NO_LIT;
2607             gen_helper_pkwb(vc, vb);
2608             break;
2609         case 0x37:
2610             /* PKLB */
2611             REQUIRE_AMASK(MVI);
2612             REQUIRE_REG_31(ra);
2613             REQUIRE_NO_LIT;
2614             gen_helper_pklb(vc, vb);
2615             break;
2616         case 0x38:
2617             /* MINSB8 */
2618             REQUIRE_AMASK(MVI);
2619             va = load_gpr(ctx, ra);
2620             gen_helper_minsb8(vc, va, vb);
2621             break;
2622         case 0x39:
2623             /* MINSW4 */
2624             REQUIRE_AMASK(MVI);
2625             va = load_gpr(ctx, ra);
2626             gen_helper_minsw4(vc, va, vb);
2627             break;
2628         case 0x3A:
2629             /* MINUB8 */
2630             REQUIRE_AMASK(MVI);
2631             va = load_gpr(ctx, ra);
2632             gen_helper_minub8(vc, va, vb);
2633             break;
2634         case 0x3B:
2635             /* MINUW4 */
2636             REQUIRE_AMASK(MVI);
2637             va = load_gpr(ctx, ra);
2638             gen_helper_minuw4(vc, va, vb);
2639             break;
2640         case 0x3C:
2641             /* MAXUB8 */
2642             REQUIRE_AMASK(MVI);
2643             va = load_gpr(ctx, ra);
2644             gen_helper_maxub8(vc, va, vb);
2645             break;
2646         case 0x3D:
2647             /* MAXUW4 */
2648             REQUIRE_AMASK(MVI);
2649             va = load_gpr(ctx, ra);
2650             gen_helper_maxuw4(vc, va, vb);
2651             break;
2652         case 0x3E:
2653             /* MAXSB8 */
2654             REQUIRE_AMASK(MVI);
2655             va = load_gpr(ctx, ra);
2656             gen_helper_maxsb8(vc, va, vb);
2657             break;
2658         case 0x3F:
2659             /* MAXSW4 */
2660             REQUIRE_AMASK(MVI);
2661             va = load_gpr(ctx, ra);
2662             gen_helper_maxsw4(vc, va, vb);
2663             break;
2664         default:
2665             goto invalid_opc;
2666         }
2667         break;
2668 
2669     case 0x1D:
2670         /* HW_MTPR (PALcode) */
2671 #ifndef CONFIG_USER_ONLY
2672         REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2673         vb = load_gpr(ctx, rb);
2674         ret = gen_mtpr(ctx, vb, insn & 0xffff);
2675         break;
2676 #else
2677         goto invalid_opc;
2678 #endif
2679 
2680     case 0x1E:
2681         /* HW_RET (PALcode) */
2682 #ifndef CONFIG_USER_ONLY
2683         REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2684         if (rb == 31) {
2685             /* Pre-EV6 CPUs interpreted this as HW_REI, loading the return
2686                address from EXC_ADDR.  This turns out to be useful for our
2687                emulation PALcode, so continue to accept it.  */
2688             vb = dest_sink(ctx);
2689             tcg_gen_ld_i64(vb, cpu_env, offsetof(CPUAlphaState, exc_addr));
2690         } else {
2691             vb = load_gpr(ctx, rb);
2692         }
2693         tcg_gen_movi_i64(cpu_lock_addr, -1);
2694         st_flag_byte(load_zero(ctx), ENV_FLAG_RX_SHIFT);
2695         tmp = tcg_temp_new();
2696         tcg_gen_andi_i64(tmp, vb, 1);
2697         st_flag_byte(tmp, ENV_FLAG_PAL_SHIFT);
2698         tcg_temp_free(tmp);
2699         tcg_gen_andi_i64(cpu_pc, vb, ~3);
2700         /* Allow interrupts to be recognized right away.  */
2701         ret = DISAS_PC_UPDATED_NOCHAIN;
2702         break;
2703 #else
2704         goto invalid_opc;
2705 #endif
2706 
2707     case 0x1F:
2708         /* HW_ST (PALcode) */
2709 #ifndef CONFIG_USER_ONLY
2710         REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2711         {
2712             switch ((insn >> 12) & 0xF) {
2713             case 0x0:
2714                 /* Longword physical access */
2715                 va = load_gpr(ctx, ra);
2716                 vb = load_gpr(ctx, rb);
2717                 tmp = tcg_temp_new();
2718                 tcg_gen_addi_i64(tmp, vb, disp12);
2719                 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LESL);
2720                 tcg_temp_free(tmp);
2721                 break;
2722             case 0x1:
2723                 /* Quadword physical access */
2724                 va = load_gpr(ctx, ra);
2725                 vb = load_gpr(ctx, rb);
2726                 tmp = tcg_temp_new();
2727                 tcg_gen_addi_i64(tmp, vb, disp12);
2728                 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LEQ);
2729                 tcg_temp_free(tmp);
2730                 break;
2731             case 0x2:
2732                 /* Longword physical access with lock */
2733                 ret = gen_store_conditional(ctx, ra, rb, disp12,
2734                                             MMU_PHYS_IDX, MO_LESL);
2735                 break;
2736             case 0x3:
2737                 /* Quadword physical access with lock */
2738                 ret = gen_store_conditional(ctx, ra, rb, disp12,
2739                                             MMU_PHYS_IDX, MO_LEQ);
2740                 break;
2741             case 0x4:
2742                 /* Longword virtual access */
2743                 goto invalid_opc;
2744             case 0x5:
2745                 /* Quadword virtual access */
2746                 goto invalid_opc;
2747             case 0x6:
2748                 /* Invalid */
2749                 goto invalid_opc;
2750             case 0x7:
2751                 /* Invalid */
2752                 goto invalid_opc;
2753             case 0x8:
2754                 /* Invalid */
2755                 goto invalid_opc;
2756             case 0x9:
2757                 /* Invalid */
2758                 goto invalid_opc;
2759             case 0xA:
2760                 /* Invalid */
2761                 goto invalid_opc;
2762             case 0xB:
2763                 /* Invalid */
2764                 goto invalid_opc;
2765             case 0xC:
2766                 /* Longword virtual access with alternate access mode */
2767                 goto invalid_opc;
2768             case 0xD:
2769                 /* Quadword virtual access with alternate access mode */
2770                 goto invalid_opc;
2771             case 0xE:
2772                 /* Invalid */
2773                 goto invalid_opc;
2774             case 0xF:
2775                 /* Invalid */
2776                 goto invalid_opc;
2777             }
2778             break;
2779         }
2780 #else
2781         goto invalid_opc;
2782 #endif
2783     case 0x20:
2784         /* LDF */
2785         REQUIRE_FEN;
2786         gen_load_fp(ctx, ra, rb, disp16, gen_ldf);
2787         break;
2788     case 0x21:
2789         /* LDG */
2790         REQUIRE_FEN;
2791         gen_load_fp(ctx, ra, rb, disp16, gen_ldg);
2792         break;
2793     case 0x22:
2794         /* LDS */
2795         REQUIRE_FEN;
2796         gen_load_fp(ctx, ra, rb, disp16, gen_lds);
2797         break;
2798     case 0x23:
2799         /* LDT */
2800         REQUIRE_FEN;
2801         gen_load_fp(ctx, ra, rb, disp16, gen_ldt);
2802         break;
2803     case 0x24:
2804         /* STF */
2805         REQUIRE_FEN;
2806         gen_store_fp(ctx, ra, rb, disp16, gen_stf);
2807         break;
2808     case 0x25:
2809         /* STG */
2810         REQUIRE_FEN;
2811         gen_store_fp(ctx, ra, rb, disp16, gen_stg);
2812         break;
2813     case 0x26:
2814         /* STS */
2815         REQUIRE_FEN;
2816         gen_store_fp(ctx, ra, rb, disp16, gen_sts);
2817         break;
2818     case 0x27:
2819         /* STT */
2820         REQUIRE_FEN;
2821         gen_store_fp(ctx, ra, rb, disp16, gen_stt);
2822         break;
2823     case 0x28:
2824         /* LDL */
2825         gen_load_int(ctx, ra, rb, disp16, MO_LESL, 0, 0);
2826         break;
2827     case 0x29:
2828         /* LDQ */
2829         gen_load_int(ctx, ra, rb, disp16, MO_LEQ, 0, 0);
2830         break;
2831     case 0x2A:
2832         /* LDL_L */
2833         gen_load_int(ctx, ra, rb, disp16, MO_LESL, 0, 1);
2834         break;
2835     case 0x2B:
2836         /* LDQ_L */
2837         gen_load_int(ctx, ra, rb, disp16, MO_LEQ, 0, 1);
2838         break;
2839     case 0x2C:
2840         /* STL */
2841         gen_store_int(ctx, ra, rb, disp16, MO_LEUL, 0);
2842         break;
2843     case 0x2D:
2844         /* STQ */
2845         gen_store_int(ctx, ra, rb, disp16, MO_LEQ, 0);
2846         break;
2847     case 0x2E:
2848         /* STL_C */
2849         ret = gen_store_conditional(ctx, ra, rb, disp16,
2850                                     ctx->mem_idx, MO_LESL);
2851         break;
2852     case 0x2F:
2853         /* STQ_C */
2854         ret = gen_store_conditional(ctx, ra, rb, disp16,
2855                                     ctx->mem_idx, MO_LEQ);
2856         break;
2857     case 0x30:
2858         /* BR */
2859         ret = gen_bdirect(ctx, ra, disp21);
2860         break;
2861     case 0x31: /* FBEQ */
2862         REQUIRE_FEN;
2863         ret = gen_fbcond(ctx, TCG_COND_EQ, ra, disp21);
2864         break;
2865     case 0x32: /* FBLT */
2866         REQUIRE_FEN;
2867         ret = gen_fbcond(ctx, TCG_COND_LT, ra, disp21);
2868         break;
2869     case 0x33: /* FBLE */
2870         REQUIRE_FEN;
2871         ret = gen_fbcond(ctx, TCG_COND_LE, ra, disp21);
2872         break;
2873     case 0x34:
2874         /* BSR */
2875         ret = gen_bdirect(ctx, ra, disp21);
2876         break;
2877     case 0x35: /* FBNE */
2878         REQUIRE_FEN;
2879         ret = gen_fbcond(ctx, TCG_COND_NE, ra, disp21);
2880         break;
2881     case 0x36: /* FBGE */
2882         REQUIRE_FEN;
2883         ret = gen_fbcond(ctx, TCG_COND_GE, ra, disp21);
2884         break;
2885     case 0x37: /* FBGT */
2886         REQUIRE_FEN;
2887         ret = gen_fbcond(ctx, TCG_COND_GT, ra, disp21);
2888         break;
2889     case 0x38:
2890         /* BLBC */
2891         ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2892         break;
2893     case 0x39:
2894         /* BEQ */
2895         ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2896         break;
2897     case 0x3A:
2898         /* BLT */
2899         ret = gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2900         break;
2901     case 0x3B:
2902         /* BLE */
2903         ret = gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2904         break;
2905     case 0x3C:
2906         /* BLBS */
2907         ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2908         break;
2909     case 0x3D:
2910         /* BNE */
2911         ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2912         break;
2913     case 0x3E:
2914         /* BGE */
2915         ret = gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2916         break;
2917     case 0x3F:
2918         /* BGT */
2919         ret = gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2920         break;
2921     invalid_opc:
2922         ret = gen_invalid(ctx);
2923         break;
2924     raise_fen:
2925         ret = gen_excp(ctx, EXCP_FEN, 0);
2926         break;
2927     }
2928 
2929     return ret;
2930 }
2931 
2932 static void alpha_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
2933 {
2934     DisasContext *ctx = container_of(dcbase, DisasContext, base);
2935     CPUAlphaState *env = cpu->env_ptr;
2936     int64_t bound;
2937 
2938     ctx->tbflags = ctx->base.tb->flags;
2939     ctx->mem_idx = cpu_mmu_index(env, false);
2940     ctx->implver = env->implver;
2941     ctx->amask = env->amask;
2942 
2943 #ifdef CONFIG_USER_ONLY
2944     ctx->ir = cpu_std_ir;
2945 #else
2946     ctx->palbr = env->palbr;
2947     ctx->ir = (ctx->tbflags & ENV_FLAG_PAL_MODE ? cpu_pal_ir : cpu_std_ir);
2948 #endif
2949 
2950     /* ??? Every TB begins with unset rounding mode, to be initialized on
2951        the first fp insn of the TB.  Alternately we could define a proper
2952        default for every TB (e.g. QUAL_RM_N or QUAL_RM_D) and make sure
2953        to reset the FP_STATUS to that default at the end of any TB that
2954        changes the default.  We could even (gasp) dynamiclly figure out
2955        what default would be most efficient given the running program.  */
2956     ctx->tb_rm = -1;
2957     /* Similarly for flush-to-zero.  */
2958     ctx->tb_ftz = -1;
2959 
2960     ctx->zero = NULL;
2961     ctx->sink = NULL;
2962 
2963     /* Bound the number of insns to execute to those left on the page.  */
2964     bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4;
2965     ctx->base.max_insns = MIN(ctx->base.max_insns, bound);
2966 }
2967 
2968 static void alpha_tr_tb_start(DisasContextBase *db, CPUState *cpu)
2969 {
2970 }
2971 
2972 static void alpha_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
2973 {
2974     tcg_gen_insn_start(dcbase->pc_next);
2975 }
2976 
2977 static void alpha_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
2978 {
2979     DisasContext *ctx = container_of(dcbase, DisasContext, base);
2980     CPUAlphaState *env = cpu->env_ptr;
2981     uint32_t insn = translator_ldl(env, &ctx->base, ctx->base.pc_next);
2982 
2983     ctx->base.pc_next += 4;
2984     ctx->base.is_jmp = translate_one(ctx, insn);
2985 
2986     free_context_temps(ctx);
2987     translator_loop_temp_check(&ctx->base);
2988 }
2989 
2990 static void alpha_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
2991 {
2992     DisasContext *ctx = container_of(dcbase, DisasContext, base);
2993 
2994     switch (ctx->base.is_jmp) {
2995     case DISAS_NORETURN:
2996         break;
2997     case DISAS_TOO_MANY:
2998         if (use_goto_tb(ctx, ctx->base.pc_next)) {
2999             tcg_gen_goto_tb(0);
3000             tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
3001             tcg_gen_exit_tb(ctx->base.tb, 0);
3002         }
3003         /* FALLTHRU */
3004     case DISAS_PC_STALE:
3005         tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
3006         /* FALLTHRU */
3007     case DISAS_PC_UPDATED:
3008         if (!ctx->base.singlestep_enabled) {
3009             tcg_gen_lookup_and_goto_ptr();
3010             break;
3011         }
3012         /* FALLTHRU */
3013     case DISAS_PC_UPDATED_NOCHAIN:
3014         if (ctx->base.singlestep_enabled) {
3015             gen_excp_1(EXCP_DEBUG, 0);
3016         } else {
3017             tcg_gen_exit_tb(NULL, 0);
3018         }
3019         break;
3020     default:
3021         g_assert_not_reached();
3022     }
3023 }
3024 
3025 static void alpha_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)
3026 {
3027     qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
3028     log_target_disas(cpu, dcbase->pc_first, dcbase->tb->size);
3029 }
3030 
3031 static const TranslatorOps alpha_tr_ops = {
3032     .init_disas_context = alpha_tr_init_disas_context,
3033     .tb_start           = alpha_tr_tb_start,
3034     .insn_start         = alpha_tr_insn_start,
3035     .translate_insn     = alpha_tr_translate_insn,
3036     .tb_stop            = alpha_tr_tb_stop,
3037     .disas_log          = alpha_tr_disas_log,
3038 };
3039 
3040 void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns)
3041 {
3042     DisasContext dc;
3043     translator_loop(&alpha_tr_ops, &dc.base, cpu, tb, max_insns);
3044 }
3045 
3046 void restore_state_to_opc(CPUAlphaState *env, TranslationBlock *tb,
3047                           target_ulong *data)
3048 {
3049     env->pc = data[0];
3050 }
3051