xref: /qemu/target/sparc/translate.c (revision ebda3036)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 #include "exec/cpu_ldst.h"
29 
30 #include "exec/helper-gen.h"
31 
32 #include "exec/translator.h"
33 #include "exec/log.h"
34 #include "asi.h"
35 
36 #define HELPER_H "helper.h"
37 #include "exec/helper-info.c.inc"
38 #undef  HELPER_H
39 
40 /* Dynamic PC, must exit to main loop. */
41 #define DYNAMIC_PC         1
42 /* Dynamic PC, one of two values according to jump_pc[T2]. */
43 #define JUMP_PC            2
44 /* Dynamic PC, may lookup next TB. */
45 #define DYNAMIC_PC_LOOKUP  3
46 
47 #define DISAS_EXIT  DISAS_TARGET_0
48 
49 /* global register indexes */
50 static TCGv_ptr cpu_regwptr;
51 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
52 static TCGv_i32 cpu_cc_op;
53 static TCGv_i32 cpu_psr;
54 static TCGv cpu_fsr, cpu_pc, cpu_npc;
55 static TCGv cpu_regs[32];
56 static TCGv cpu_y;
57 #ifndef CONFIG_USER_ONLY
58 static TCGv cpu_tbr;
59 #endif
60 static TCGv cpu_cond;
61 #ifdef TARGET_SPARC64
62 static TCGv_i32 cpu_xcc, cpu_fprs;
63 static TCGv cpu_gsr;
64 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
65 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
66 #else
67 static TCGv cpu_wim;
68 #endif
69 /* Floating point registers */
70 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
71 
72 typedef struct DisasContext {
73     DisasContextBase base;
74     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
75     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77     int mem_idx;
78     bool fpu_enabled;
79     bool address_mask_32bit;
80 #ifndef CONFIG_USER_ONLY
81     bool supervisor;
82 #ifdef TARGET_SPARC64
83     bool hypervisor;
84 #endif
85 #endif
86 
87     uint32_t cc_op;  /* current CC operation */
88     sparc_def_t *def;
89 #ifdef TARGET_SPARC64
90     int fprs_dirty;
91     int asi;
92 #endif
93 } DisasContext;
94 
95 typedef struct {
96     TCGCond cond;
97     bool is_bool;
98     TCGv c1, c2;
99 } DisasCompare;
100 
101 // This function uses non-native bit order
102 #define GET_FIELD(X, FROM, TO)                                  \
103     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
104 
105 // This function uses the order in the manuals, i.e. bit 0 is 2^0
106 #define GET_FIELD_SP(X, FROM, TO)               \
107     GET_FIELD(X, 31 - (TO), 31 - (FROM))
108 
109 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
110 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
111 
112 #ifdef TARGET_SPARC64
113 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
114 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
115 #else
116 #define DFPREG(r) (r & 0x1e)
117 #define QFPREG(r) (r & 0x1c)
118 #endif
119 
120 #define UA2005_HTRAP_MASK 0xff
121 #define V8_TRAP_MASK 0x7f
122 
123 static int sign_extend(int x, int len)
124 {
125     len = 32 - len;
126     return (x << len) >> len;
127 }
128 
129 #define IS_IMM (insn & (1<<13))
130 
131 static void gen_update_fprs_dirty(DisasContext *dc, int rd)
132 {
133 #if defined(TARGET_SPARC64)
134     int bit = (rd < 32) ? 1 : 2;
135     /* If we know we've already set this bit within the TB,
136        we can avoid setting it again.  */
137     if (!(dc->fprs_dirty & bit)) {
138         dc->fprs_dirty |= bit;
139         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
140     }
141 #endif
142 }
143 
144 /* floating point registers moves */
145 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
146 {
147     TCGv_i32 ret = tcg_temp_new_i32();
148     if (src & 1) {
149         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
150     } else {
151         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
152     }
153     return ret;
154 }
155 
156 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
157 {
158     TCGv_i64 t = tcg_temp_new_i64();
159 
160     tcg_gen_extu_i32_i64(t, v);
161     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
162                         (dst & 1 ? 0 : 32), 32);
163     gen_update_fprs_dirty(dc, dst);
164 }
165 
166 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
167 {
168     return tcg_temp_new_i32();
169 }
170 
171 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
172 {
173     src = DFPREG(src);
174     return cpu_fpr[src / 2];
175 }
176 
177 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
178 {
179     dst = DFPREG(dst);
180     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
181     gen_update_fprs_dirty(dc, dst);
182 }
183 
184 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
185 {
186     return cpu_fpr[DFPREG(dst) / 2];
187 }
188 
189 static void gen_op_load_fpr_QT0(unsigned int src)
190 {
191     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
192                    offsetof(CPU_QuadU, ll.upper));
193     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
194                    offsetof(CPU_QuadU, ll.lower));
195 }
196 
197 static void gen_op_load_fpr_QT1(unsigned int src)
198 {
199     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
200                    offsetof(CPU_QuadU, ll.upper));
201     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
202                    offsetof(CPU_QuadU, ll.lower));
203 }
204 
205 static void gen_op_store_QT0_fpr(unsigned int dst)
206 {
207     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
208                    offsetof(CPU_QuadU, ll.upper));
209     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
210                    offsetof(CPU_QuadU, ll.lower));
211 }
212 
213 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
214                             TCGv_i64 v1, TCGv_i64 v2)
215 {
216     dst = QFPREG(dst);
217 
218     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
219     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
220     gen_update_fprs_dirty(dc, dst);
221 }
222 
223 #ifdef TARGET_SPARC64
224 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
225 {
226     src = QFPREG(src);
227     return cpu_fpr[src / 2];
228 }
229 
230 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
231 {
232     src = QFPREG(src);
233     return cpu_fpr[src / 2 + 1];
234 }
235 
236 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
237 {
238     rd = QFPREG(rd);
239     rs = QFPREG(rs);
240 
241     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
242     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
243     gen_update_fprs_dirty(dc, rd);
244 }
245 #endif
246 
247 /* moves */
248 #ifdef CONFIG_USER_ONLY
249 #define supervisor(dc) 0
250 #ifdef TARGET_SPARC64
251 #define hypervisor(dc) 0
252 #endif
253 #else
254 #ifdef TARGET_SPARC64
255 #define hypervisor(dc) (dc->hypervisor)
256 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
257 #else
258 #define supervisor(dc) (dc->supervisor)
259 #endif
260 #endif
261 
262 #ifdef TARGET_SPARC64
263 #ifndef TARGET_ABI32
264 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
265 #else
266 #define AM_CHECK(dc) (1)
267 #endif
268 #endif
269 
270 static void gen_address_mask(DisasContext *dc, TCGv addr)
271 {
272 #ifdef TARGET_SPARC64
273     if (AM_CHECK(dc))
274         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
275 #endif
276 }
277 
278 static TCGv gen_load_gpr(DisasContext *dc, int reg)
279 {
280     if (reg > 0) {
281         assert(reg < 32);
282         return cpu_regs[reg];
283     } else {
284         TCGv t = tcg_temp_new();
285         tcg_gen_movi_tl(t, 0);
286         return t;
287     }
288 }
289 
290 static void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
291 {
292     if (reg > 0) {
293         assert(reg < 32);
294         tcg_gen_mov_tl(cpu_regs[reg], v);
295     }
296 }
297 
298 static TCGv gen_dest_gpr(DisasContext *dc, int reg)
299 {
300     if (reg > 0) {
301         assert(reg < 32);
302         return cpu_regs[reg];
303     } else {
304         return tcg_temp_new();
305     }
306 }
307 
308 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
309 {
310     return translator_use_goto_tb(&s->base, pc) &&
311            translator_use_goto_tb(&s->base, npc);
312 }
313 
314 static void gen_goto_tb(DisasContext *s, int tb_num,
315                         target_ulong pc, target_ulong npc)
316 {
317     if (use_goto_tb(s, pc, npc))  {
318         /* jump to same page: we can use a direct jump */
319         tcg_gen_goto_tb(tb_num);
320         tcg_gen_movi_tl(cpu_pc, pc);
321         tcg_gen_movi_tl(cpu_npc, npc);
322         tcg_gen_exit_tb(s->base.tb, tb_num);
323     } else {
324         /* jump to another page: we can use an indirect jump */
325         tcg_gen_movi_tl(cpu_pc, pc);
326         tcg_gen_movi_tl(cpu_npc, npc);
327         tcg_gen_lookup_and_goto_ptr();
328     }
329 }
330 
331 // XXX suboptimal
332 static void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
333 {
334     tcg_gen_extu_i32_tl(reg, src);
335     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
336 }
337 
338 static void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
339 {
340     tcg_gen_extu_i32_tl(reg, src);
341     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
342 }
343 
344 static void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
345 {
346     tcg_gen_extu_i32_tl(reg, src);
347     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
348 }
349 
350 static void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
351 {
352     tcg_gen_extu_i32_tl(reg, src);
353     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
354 }
355 
356 static void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
357 {
358     tcg_gen_mov_tl(cpu_cc_src, src1);
359     tcg_gen_mov_tl(cpu_cc_src2, src2);
360     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
361     tcg_gen_mov_tl(dst, cpu_cc_dst);
362 }
363 
364 static TCGv_i32 gen_add32_carry32(void)
365 {
366     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
367 
368     /* Carry is computed from a previous add: (dst < src)  */
369 #if TARGET_LONG_BITS == 64
370     cc_src1_32 = tcg_temp_new_i32();
371     cc_src2_32 = tcg_temp_new_i32();
372     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
373     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
374 #else
375     cc_src1_32 = cpu_cc_dst;
376     cc_src2_32 = cpu_cc_src;
377 #endif
378 
379     carry_32 = tcg_temp_new_i32();
380     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
381 
382     return carry_32;
383 }
384 
385 static TCGv_i32 gen_sub32_carry32(void)
386 {
387     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
388 
389     /* Carry is computed from a previous borrow: (src1 < src2)  */
390 #if TARGET_LONG_BITS == 64
391     cc_src1_32 = tcg_temp_new_i32();
392     cc_src2_32 = tcg_temp_new_i32();
393     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
394     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
395 #else
396     cc_src1_32 = cpu_cc_src;
397     cc_src2_32 = cpu_cc_src2;
398 #endif
399 
400     carry_32 = tcg_temp_new_i32();
401     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
402 
403     return carry_32;
404 }
405 
406 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
407                             TCGv src2, int update_cc)
408 {
409     TCGv_i32 carry_32;
410     TCGv carry;
411 
412     switch (dc->cc_op) {
413     case CC_OP_DIV:
414     case CC_OP_LOGIC:
415         /* Carry is known to be zero.  Fall back to plain ADD.  */
416         if (update_cc) {
417             gen_op_add_cc(dst, src1, src2);
418         } else {
419             tcg_gen_add_tl(dst, src1, src2);
420         }
421         return;
422 
423     case CC_OP_ADD:
424     case CC_OP_TADD:
425     case CC_OP_TADDTV:
426         if (TARGET_LONG_BITS == 32) {
427             /* We can re-use the host's hardware carry generation by using
428                an ADD2 opcode.  We discard the low part of the output.
429                Ideally we'd combine this operation with the add that
430                generated the carry in the first place.  */
431             carry = tcg_temp_new();
432             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
433             goto add_done;
434         }
435         carry_32 = gen_add32_carry32();
436         break;
437 
438     case CC_OP_SUB:
439     case CC_OP_TSUB:
440     case CC_OP_TSUBTV:
441         carry_32 = gen_sub32_carry32();
442         break;
443 
444     default:
445         /* We need external help to produce the carry.  */
446         carry_32 = tcg_temp_new_i32();
447         gen_helper_compute_C_icc(carry_32, cpu_env);
448         break;
449     }
450 
451 #if TARGET_LONG_BITS == 64
452     carry = tcg_temp_new();
453     tcg_gen_extu_i32_i64(carry, carry_32);
454 #else
455     carry = carry_32;
456 #endif
457 
458     tcg_gen_add_tl(dst, src1, src2);
459     tcg_gen_add_tl(dst, dst, carry);
460 
461  add_done:
462     if (update_cc) {
463         tcg_gen_mov_tl(cpu_cc_src, src1);
464         tcg_gen_mov_tl(cpu_cc_src2, src2);
465         tcg_gen_mov_tl(cpu_cc_dst, dst);
466         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
467         dc->cc_op = CC_OP_ADDX;
468     }
469 }
470 
471 static void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
472 {
473     tcg_gen_mov_tl(cpu_cc_src, src1);
474     tcg_gen_mov_tl(cpu_cc_src2, src2);
475     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
476     tcg_gen_mov_tl(dst, cpu_cc_dst);
477 }
478 
479 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
480                             TCGv src2, int update_cc)
481 {
482     TCGv_i32 carry_32;
483     TCGv carry;
484 
485     switch (dc->cc_op) {
486     case CC_OP_DIV:
487     case CC_OP_LOGIC:
488         /* Carry is known to be zero.  Fall back to plain SUB.  */
489         if (update_cc) {
490             gen_op_sub_cc(dst, src1, src2);
491         } else {
492             tcg_gen_sub_tl(dst, src1, src2);
493         }
494         return;
495 
496     case CC_OP_ADD:
497     case CC_OP_TADD:
498     case CC_OP_TADDTV:
499         carry_32 = gen_add32_carry32();
500         break;
501 
502     case CC_OP_SUB:
503     case CC_OP_TSUB:
504     case CC_OP_TSUBTV:
505         if (TARGET_LONG_BITS == 32) {
506             /* We can re-use the host's hardware carry generation by using
507                a SUB2 opcode.  We discard the low part of the output.
508                Ideally we'd combine this operation with the add that
509                generated the carry in the first place.  */
510             carry = tcg_temp_new();
511             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
512             goto sub_done;
513         }
514         carry_32 = gen_sub32_carry32();
515         break;
516 
517     default:
518         /* We need external help to produce the carry.  */
519         carry_32 = tcg_temp_new_i32();
520         gen_helper_compute_C_icc(carry_32, cpu_env);
521         break;
522     }
523 
524 #if TARGET_LONG_BITS == 64
525     carry = tcg_temp_new();
526     tcg_gen_extu_i32_i64(carry, carry_32);
527 #else
528     carry = carry_32;
529 #endif
530 
531     tcg_gen_sub_tl(dst, src1, src2);
532     tcg_gen_sub_tl(dst, dst, carry);
533 
534  sub_done:
535     if (update_cc) {
536         tcg_gen_mov_tl(cpu_cc_src, src1);
537         tcg_gen_mov_tl(cpu_cc_src2, src2);
538         tcg_gen_mov_tl(cpu_cc_dst, dst);
539         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
540         dc->cc_op = CC_OP_SUBX;
541     }
542 }
543 
544 static void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
545 {
546     TCGv r_temp, zero, t0;
547 
548     r_temp = tcg_temp_new();
549     t0 = tcg_temp_new();
550 
551     /* old op:
552     if (!(env->y & 1))
553         T1 = 0;
554     */
555     zero = tcg_constant_tl(0);
556     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
557     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
558     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
559     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
560                        zero, cpu_cc_src2);
561 
562     // b2 = T0 & 1;
563     // env->y = (b2 << 31) | (env->y >> 1);
564     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
565     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
566 
567     // b1 = N ^ V;
568     gen_mov_reg_N(t0, cpu_psr);
569     gen_mov_reg_V(r_temp, cpu_psr);
570     tcg_gen_xor_tl(t0, t0, r_temp);
571 
572     // T0 = (b1 << 31) | (T0 >> 1);
573     // src1 = T0;
574     tcg_gen_shli_tl(t0, t0, 31);
575     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
576     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
577 
578     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
579 
580     tcg_gen_mov_tl(dst, cpu_cc_dst);
581 }
582 
583 static void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
584 {
585 #if TARGET_LONG_BITS == 32
586     if (sign_ext) {
587         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
588     } else {
589         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
590     }
591 #else
592     TCGv t0 = tcg_temp_new_i64();
593     TCGv t1 = tcg_temp_new_i64();
594 
595     if (sign_ext) {
596         tcg_gen_ext32s_i64(t0, src1);
597         tcg_gen_ext32s_i64(t1, src2);
598     } else {
599         tcg_gen_ext32u_i64(t0, src1);
600         tcg_gen_ext32u_i64(t1, src2);
601     }
602 
603     tcg_gen_mul_i64(dst, t0, t1);
604     tcg_gen_shri_i64(cpu_y, dst, 32);
605 #endif
606 }
607 
608 static void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
609 {
610     /* zero-extend truncated operands before multiplication */
611     gen_op_multiply(dst, src1, src2, 0);
612 }
613 
614 static void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
615 {
616     /* sign-extend truncated operands before multiplication */
617     gen_op_multiply(dst, src1, src2, 1);
618 }
619 
620 // 1
621 static void gen_op_eval_ba(TCGv dst)
622 {
623     tcg_gen_movi_tl(dst, 1);
624 }
625 
626 // Z
627 static void gen_op_eval_be(TCGv dst, TCGv_i32 src)
628 {
629     gen_mov_reg_Z(dst, src);
630 }
631 
632 // Z | (N ^ V)
633 static void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
634 {
635     TCGv t0 = tcg_temp_new();
636     gen_mov_reg_N(t0, src);
637     gen_mov_reg_V(dst, src);
638     tcg_gen_xor_tl(dst, dst, t0);
639     gen_mov_reg_Z(t0, src);
640     tcg_gen_or_tl(dst, dst, t0);
641 }
642 
643 // N ^ V
644 static void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
645 {
646     TCGv t0 = tcg_temp_new();
647     gen_mov_reg_V(t0, src);
648     gen_mov_reg_N(dst, src);
649     tcg_gen_xor_tl(dst, dst, t0);
650 }
651 
652 // C | Z
653 static void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
654 {
655     TCGv t0 = tcg_temp_new();
656     gen_mov_reg_Z(t0, src);
657     gen_mov_reg_C(dst, src);
658     tcg_gen_or_tl(dst, dst, t0);
659 }
660 
661 // C
662 static void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
663 {
664     gen_mov_reg_C(dst, src);
665 }
666 
667 // V
668 static void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
669 {
670     gen_mov_reg_V(dst, src);
671 }
672 
673 // 0
674 static void gen_op_eval_bn(TCGv dst)
675 {
676     tcg_gen_movi_tl(dst, 0);
677 }
678 
679 // N
680 static void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
681 {
682     gen_mov_reg_N(dst, src);
683 }
684 
685 // !Z
686 static void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
687 {
688     gen_mov_reg_Z(dst, src);
689     tcg_gen_xori_tl(dst, dst, 0x1);
690 }
691 
692 // !(Z | (N ^ V))
693 static void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
694 {
695     gen_op_eval_ble(dst, src);
696     tcg_gen_xori_tl(dst, dst, 0x1);
697 }
698 
699 // !(N ^ V)
700 static void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
701 {
702     gen_op_eval_bl(dst, src);
703     tcg_gen_xori_tl(dst, dst, 0x1);
704 }
705 
706 // !(C | Z)
707 static void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
708 {
709     gen_op_eval_bleu(dst, src);
710     tcg_gen_xori_tl(dst, dst, 0x1);
711 }
712 
713 // !C
714 static void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
715 {
716     gen_mov_reg_C(dst, src);
717     tcg_gen_xori_tl(dst, dst, 0x1);
718 }
719 
720 // !N
721 static void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
722 {
723     gen_mov_reg_N(dst, src);
724     tcg_gen_xori_tl(dst, dst, 0x1);
725 }
726 
727 // !V
728 static void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
729 {
730     gen_mov_reg_V(dst, src);
731     tcg_gen_xori_tl(dst, dst, 0x1);
732 }
733 
734 /*
735   FPSR bit field FCC1 | FCC0:
736    0 =
737    1 <
738    2 >
739    3 unordered
740 */
741 static void gen_mov_reg_FCC0(TCGv reg, TCGv src,
742                                     unsigned int fcc_offset)
743 {
744     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
745     tcg_gen_andi_tl(reg, reg, 0x1);
746 }
747 
748 static void gen_mov_reg_FCC1(TCGv reg, TCGv src, unsigned int fcc_offset)
749 {
750     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
751     tcg_gen_andi_tl(reg, reg, 0x1);
752 }
753 
754 // !0: FCC0 | FCC1
755 static void gen_op_eval_fbne(TCGv dst, TCGv src, unsigned int fcc_offset)
756 {
757     TCGv t0 = tcg_temp_new();
758     gen_mov_reg_FCC0(dst, src, fcc_offset);
759     gen_mov_reg_FCC1(t0, src, fcc_offset);
760     tcg_gen_or_tl(dst, dst, t0);
761 }
762 
763 // 1 or 2: FCC0 ^ FCC1
764 static void gen_op_eval_fblg(TCGv dst, TCGv src, unsigned int fcc_offset)
765 {
766     TCGv t0 = tcg_temp_new();
767     gen_mov_reg_FCC0(dst, src, fcc_offset);
768     gen_mov_reg_FCC1(t0, src, fcc_offset);
769     tcg_gen_xor_tl(dst, dst, t0);
770 }
771 
772 // 1 or 3: FCC0
773 static void gen_op_eval_fbul(TCGv dst, TCGv src, unsigned int fcc_offset)
774 {
775     gen_mov_reg_FCC0(dst, src, fcc_offset);
776 }
777 
778 // 1: FCC0 & !FCC1
779 static void gen_op_eval_fbl(TCGv dst, TCGv src, unsigned int fcc_offset)
780 {
781     TCGv t0 = tcg_temp_new();
782     gen_mov_reg_FCC0(dst, src, fcc_offset);
783     gen_mov_reg_FCC1(t0, src, fcc_offset);
784     tcg_gen_andc_tl(dst, dst, t0);
785 }
786 
787 // 2 or 3: FCC1
788 static void gen_op_eval_fbug(TCGv dst, TCGv src, unsigned int fcc_offset)
789 {
790     gen_mov_reg_FCC1(dst, src, fcc_offset);
791 }
792 
793 // 2: !FCC0 & FCC1
794 static void gen_op_eval_fbg(TCGv dst, TCGv src, unsigned int fcc_offset)
795 {
796     TCGv t0 = tcg_temp_new();
797     gen_mov_reg_FCC0(dst, src, fcc_offset);
798     gen_mov_reg_FCC1(t0, src, fcc_offset);
799     tcg_gen_andc_tl(dst, t0, dst);
800 }
801 
802 // 3: FCC0 & FCC1
803 static void gen_op_eval_fbu(TCGv dst, TCGv src, unsigned int fcc_offset)
804 {
805     TCGv t0 = tcg_temp_new();
806     gen_mov_reg_FCC0(dst, src, fcc_offset);
807     gen_mov_reg_FCC1(t0, src, fcc_offset);
808     tcg_gen_and_tl(dst, dst, t0);
809 }
810 
811 // 0: !(FCC0 | FCC1)
812 static void gen_op_eval_fbe(TCGv dst, TCGv src, unsigned int fcc_offset)
813 {
814     TCGv t0 = tcg_temp_new();
815     gen_mov_reg_FCC0(dst, src, fcc_offset);
816     gen_mov_reg_FCC1(t0, src, fcc_offset);
817     tcg_gen_or_tl(dst, dst, t0);
818     tcg_gen_xori_tl(dst, dst, 0x1);
819 }
820 
821 // 0 or 3: !(FCC0 ^ FCC1)
822 static void gen_op_eval_fbue(TCGv dst, TCGv src, unsigned int fcc_offset)
823 {
824     TCGv t0 = tcg_temp_new();
825     gen_mov_reg_FCC0(dst, src, fcc_offset);
826     gen_mov_reg_FCC1(t0, src, fcc_offset);
827     tcg_gen_xor_tl(dst, dst, t0);
828     tcg_gen_xori_tl(dst, dst, 0x1);
829 }
830 
831 // 0 or 2: !FCC0
832 static void gen_op_eval_fbge(TCGv dst, TCGv src, unsigned int fcc_offset)
833 {
834     gen_mov_reg_FCC0(dst, src, fcc_offset);
835     tcg_gen_xori_tl(dst, dst, 0x1);
836 }
837 
838 // !1: !(FCC0 & !FCC1)
839 static void gen_op_eval_fbuge(TCGv dst, TCGv src, unsigned int fcc_offset)
840 {
841     TCGv t0 = tcg_temp_new();
842     gen_mov_reg_FCC0(dst, src, fcc_offset);
843     gen_mov_reg_FCC1(t0, src, fcc_offset);
844     tcg_gen_andc_tl(dst, dst, t0);
845     tcg_gen_xori_tl(dst, dst, 0x1);
846 }
847 
848 // 0 or 1: !FCC1
849 static void gen_op_eval_fble(TCGv dst, TCGv src, unsigned int fcc_offset)
850 {
851     gen_mov_reg_FCC1(dst, src, fcc_offset);
852     tcg_gen_xori_tl(dst, dst, 0x1);
853 }
854 
855 // !2: !(!FCC0 & FCC1)
856 static void gen_op_eval_fbule(TCGv dst, TCGv src, unsigned int fcc_offset)
857 {
858     TCGv t0 = tcg_temp_new();
859     gen_mov_reg_FCC0(dst, src, fcc_offset);
860     gen_mov_reg_FCC1(t0, src, fcc_offset);
861     tcg_gen_andc_tl(dst, t0, dst);
862     tcg_gen_xori_tl(dst, dst, 0x1);
863 }
864 
865 // !3: !(FCC0 & FCC1)
866 static void gen_op_eval_fbo(TCGv dst, TCGv src, unsigned int fcc_offset)
867 {
868     TCGv t0 = tcg_temp_new();
869     gen_mov_reg_FCC0(dst, src, fcc_offset);
870     gen_mov_reg_FCC1(t0, src, fcc_offset);
871     tcg_gen_and_tl(dst, dst, t0);
872     tcg_gen_xori_tl(dst, dst, 0x1);
873 }
874 
875 static void gen_branch2(DisasContext *dc, target_ulong pc1,
876                         target_ulong pc2, TCGv r_cond)
877 {
878     TCGLabel *l1 = gen_new_label();
879 
880     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
881 
882     gen_goto_tb(dc, 0, pc1, pc1 + 4);
883 
884     gen_set_label(l1);
885     gen_goto_tb(dc, 1, pc2, pc2 + 4);
886 }
887 
888 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
889 {
890     TCGLabel *l1 = gen_new_label();
891     target_ulong npc = dc->npc;
892 
893     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
894 
895     gen_goto_tb(dc, 0, npc, pc1);
896 
897     gen_set_label(l1);
898     gen_goto_tb(dc, 1, npc + 4, npc + 8);
899 
900     dc->base.is_jmp = DISAS_NORETURN;
901 }
902 
903 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
904 {
905     target_ulong npc = dc->npc;
906 
907     if (npc & 3) {
908         switch (npc) {
909         case DYNAMIC_PC:
910         case DYNAMIC_PC_LOOKUP:
911             tcg_gen_mov_tl(cpu_pc, cpu_npc);
912             tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
913             tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc,
914                                cpu_cond, tcg_constant_tl(0),
915                                tcg_constant_tl(pc1), cpu_npc);
916             dc->pc = npc;
917             break;
918         default:
919             g_assert_not_reached();
920         }
921     } else {
922         dc->pc = npc;
923         dc->jump_pc[0] = pc1;
924         dc->jump_pc[1] = npc + 4;
925         dc->npc = JUMP_PC;
926     }
927 }
928 
929 static void gen_generic_branch(DisasContext *dc)
930 {
931     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
932     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
933     TCGv zero = tcg_constant_tl(0);
934 
935     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
936 }
937 
938 /* call this function before using the condition register as it may
939    have been set for a jump */
940 static void flush_cond(DisasContext *dc)
941 {
942     if (dc->npc == JUMP_PC) {
943         gen_generic_branch(dc);
944         dc->npc = DYNAMIC_PC_LOOKUP;
945     }
946 }
947 
948 static void save_npc(DisasContext *dc)
949 {
950     if (dc->npc & 3) {
951         switch (dc->npc) {
952         case JUMP_PC:
953             gen_generic_branch(dc);
954             dc->npc = DYNAMIC_PC_LOOKUP;
955             break;
956         case DYNAMIC_PC:
957         case DYNAMIC_PC_LOOKUP:
958             break;
959         default:
960             g_assert_not_reached();
961         }
962     } else {
963         tcg_gen_movi_tl(cpu_npc, dc->npc);
964     }
965 }
966 
967 static void update_psr(DisasContext *dc)
968 {
969     if (dc->cc_op != CC_OP_FLAGS) {
970         dc->cc_op = CC_OP_FLAGS;
971         gen_helper_compute_psr(cpu_env);
972     }
973 }
974 
975 static void save_state(DisasContext *dc)
976 {
977     tcg_gen_movi_tl(cpu_pc, dc->pc);
978     save_npc(dc);
979 }
980 
981 static void gen_exception(DisasContext *dc, int which)
982 {
983     save_state(dc);
984     gen_helper_raise_exception(cpu_env, tcg_constant_i32(which));
985     dc->base.is_jmp = DISAS_NORETURN;
986 }
987 
988 static void gen_check_align(TCGv addr, int mask)
989 {
990     gen_helper_check_align(cpu_env, addr, tcg_constant_i32(mask));
991 }
992 
993 static void gen_mov_pc_npc(DisasContext *dc)
994 {
995     if (dc->npc & 3) {
996         switch (dc->npc) {
997         case JUMP_PC:
998             gen_generic_branch(dc);
999             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1000             dc->pc = DYNAMIC_PC_LOOKUP;
1001             break;
1002         case DYNAMIC_PC:
1003         case DYNAMIC_PC_LOOKUP:
1004             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1005             dc->pc = dc->npc;
1006             break;
1007         default:
1008             g_assert_not_reached();
1009         }
1010     } else {
1011         dc->pc = dc->npc;
1012     }
1013 }
1014 
1015 static void gen_op_next_insn(void)
1016 {
1017     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1018     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1019 }
1020 
1021 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1022                         DisasContext *dc)
1023 {
1024     static int subcc_cond[16] = {
1025         TCG_COND_NEVER,
1026         TCG_COND_EQ,
1027         TCG_COND_LE,
1028         TCG_COND_LT,
1029         TCG_COND_LEU,
1030         TCG_COND_LTU,
1031         -1, /* neg */
1032         -1, /* overflow */
1033         TCG_COND_ALWAYS,
1034         TCG_COND_NE,
1035         TCG_COND_GT,
1036         TCG_COND_GE,
1037         TCG_COND_GTU,
1038         TCG_COND_GEU,
1039         -1, /* pos */
1040         -1, /* no overflow */
1041     };
1042 
1043     static int logic_cond[16] = {
1044         TCG_COND_NEVER,
1045         TCG_COND_EQ,     /* eq:  Z */
1046         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1047         TCG_COND_LT,     /* lt:  N ^ V -> N */
1048         TCG_COND_EQ,     /* leu: C | Z -> Z */
1049         TCG_COND_NEVER,  /* ltu: C -> 0 */
1050         TCG_COND_LT,     /* neg: N */
1051         TCG_COND_NEVER,  /* vs:  V -> 0 */
1052         TCG_COND_ALWAYS,
1053         TCG_COND_NE,     /* ne:  !Z */
1054         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1055         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1056         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1057         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1058         TCG_COND_GE,     /* pos: !N */
1059         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1060     };
1061 
1062     TCGv_i32 r_src;
1063     TCGv r_dst;
1064 
1065 #ifdef TARGET_SPARC64
1066     if (xcc) {
1067         r_src = cpu_xcc;
1068     } else {
1069         r_src = cpu_psr;
1070     }
1071 #else
1072     r_src = cpu_psr;
1073 #endif
1074 
1075     switch (dc->cc_op) {
1076     case CC_OP_LOGIC:
1077         cmp->cond = logic_cond[cond];
1078     do_compare_dst_0:
1079         cmp->is_bool = false;
1080         cmp->c2 = tcg_constant_tl(0);
1081 #ifdef TARGET_SPARC64
1082         if (!xcc) {
1083             cmp->c1 = tcg_temp_new();
1084             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1085             break;
1086         }
1087 #endif
1088         cmp->c1 = cpu_cc_dst;
1089         break;
1090 
1091     case CC_OP_SUB:
1092         switch (cond) {
1093         case 6:  /* neg */
1094         case 14: /* pos */
1095             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1096             goto do_compare_dst_0;
1097 
1098         case 7: /* overflow */
1099         case 15: /* !overflow */
1100             goto do_dynamic;
1101 
1102         default:
1103             cmp->cond = subcc_cond[cond];
1104             cmp->is_bool = false;
1105 #ifdef TARGET_SPARC64
1106             if (!xcc) {
1107                 /* Note that sign-extension works for unsigned compares as
1108                    long as both operands are sign-extended.  */
1109                 cmp->c1 = tcg_temp_new();
1110                 cmp->c2 = tcg_temp_new();
1111                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1112                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1113                 break;
1114             }
1115 #endif
1116             cmp->c1 = cpu_cc_src;
1117             cmp->c2 = cpu_cc_src2;
1118             break;
1119         }
1120         break;
1121 
1122     default:
1123     do_dynamic:
1124         gen_helper_compute_psr(cpu_env);
1125         dc->cc_op = CC_OP_FLAGS;
1126         /* FALLTHRU */
1127 
1128     case CC_OP_FLAGS:
1129         /* We're going to generate a boolean result.  */
1130         cmp->cond = TCG_COND_NE;
1131         cmp->is_bool = true;
1132         cmp->c1 = r_dst = tcg_temp_new();
1133         cmp->c2 = tcg_constant_tl(0);
1134 
1135         switch (cond) {
1136         case 0x0:
1137             gen_op_eval_bn(r_dst);
1138             break;
1139         case 0x1:
1140             gen_op_eval_be(r_dst, r_src);
1141             break;
1142         case 0x2:
1143             gen_op_eval_ble(r_dst, r_src);
1144             break;
1145         case 0x3:
1146             gen_op_eval_bl(r_dst, r_src);
1147             break;
1148         case 0x4:
1149             gen_op_eval_bleu(r_dst, r_src);
1150             break;
1151         case 0x5:
1152             gen_op_eval_bcs(r_dst, r_src);
1153             break;
1154         case 0x6:
1155             gen_op_eval_bneg(r_dst, r_src);
1156             break;
1157         case 0x7:
1158             gen_op_eval_bvs(r_dst, r_src);
1159             break;
1160         case 0x8:
1161             gen_op_eval_ba(r_dst);
1162             break;
1163         case 0x9:
1164             gen_op_eval_bne(r_dst, r_src);
1165             break;
1166         case 0xa:
1167             gen_op_eval_bg(r_dst, r_src);
1168             break;
1169         case 0xb:
1170             gen_op_eval_bge(r_dst, r_src);
1171             break;
1172         case 0xc:
1173             gen_op_eval_bgu(r_dst, r_src);
1174             break;
1175         case 0xd:
1176             gen_op_eval_bcc(r_dst, r_src);
1177             break;
1178         case 0xe:
1179             gen_op_eval_bpos(r_dst, r_src);
1180             break;
1181         case 0xf:
1182             gen_op_eval_bvc(r_dst, r_src);
1183             break;
1184         }
1185         break;
1186     }
1187 }
1188 
1189 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1190 {
1191     unsigned int offset;
1192     TCGv r_dst;
1193 
1194     /* For now we still generate a straight boolean result.  */
1195     cmp->cond = TCG_COND_NE;
1196     cmp->is_bool = true;
1197     cmp->c1 = r_dst = tcg_temp_new();
1198     cmp->c2 = tcg_constant_tl(0);
1199 
1200     switch (cc) {
1201     default:
1202     case 0x0:
1203         offset = 0;
1204         break;
1205     case 0x1:
1206         offset = 32 - 10;
1207         break;
1208     case 0x2:
1209         offset = 34 - 10;
1210         break;
1211     case 0x3:
1212         offset = 36 - 10;
1213         break;
1214     }
1215 
1216     switch (cond) {
1217     case 0x0:
1218         gen_op_eval_bn(r_dst);
1219         break;
1220     case 0x1:
1221         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1222         break;
1223     case 0x2:
1224         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1225         break;
1226     case 0x3:
1227         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1228         break;
1229     case 0x4:
1230         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1231         break;
1232     case 0x5:
1233         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1234         break;
1235     case 0x6:
1236         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1237         break;
1238     case 0x7:
1239         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1240         break;
1241     case 0x8:
1242         gen_op_eval_ba(r_dst);
1243         break;
1244     case 0x9:
1245         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1246         break;
1247     case 0xa:
1248         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1249         break;
1250     case 0xb:
1251         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1252         break;
1253     case 0xc:
1254         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1255         break;
1256     case 0xd:
1257         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1258         break;
1259     case 0xe:
1260         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1261         break;
1262     case 0xf:
1263         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1264         break;
1265     }
1266 }
1267 
1268 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1269                      DisasContext *dc)
1270 {
1271     DisasCompare cmp;
1272     gen_compare(&cmp, cc, cond, dc);
1273 
1274     /* The interface is to return a boolean in r_dst.  */
1275     if (cmp.is_bool) {
1276         tcg_gen_mov_tl(r_dst, cmp.c1);
1277     } else {
1278         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1279     }
1280 }
1281 
1282 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1283 {
1284     DisasCompare cmp;
1285     gen_fcompare(&cmp, cc, cond);
1286 
1287     /* The interface is to return a boolean in r_dst.  */
1288     if (cmp.is_bool) {
1289         tcg_gen_mov_tl(r_dst, cmp.c1);
1290     } else {
1291         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1292     }
1293 }
1294 
1295 #ifdef TARGET_SPARC64
1296 // Inverted logic
1297 static const int gen_tcg_cond_reg[8] = {
1298     -1,
1299     TCG_COND_NE,
1300     TCG_COND_GT,
1301     TCG_COND_GE,
1302     -1,
1303     TCG_COND_EQ,
1304     TCG_COND_LE,
1305     TCG_COND_LT,
1306 };
1307 
1308 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1309 {
1310     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1311     cmp->is_bool = false;
1312     cmp->c1 = r_src;
1313     cmp->c2 = tcg_constant_tl(0);
1314 }
1315 
1316 static void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1317 {
1318     DisasCompare cmp;
1319     gen_compare_reg(&cmp, cond, r_src);
1320 
1321     /* The interface is to return a boolean in r_dst.  */
1322     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1323 }
1324 #endif
1325 
1326 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1327 {
1328     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1329     target_ulong target = dc->pc + offset;
1330 
1331 #ifdef TARGET_SPARC64
1332     if (unlikely(AM_CHECK(dc))) {
1333         target &= 0xffffffffULL;
1334     }
1335 #endif
1336     if (cond == 0x0) {
1337         /* unconditional not taken */
1338         if (a) {
1339             dc->pc = dc->npc + 4;
1340             dc->npc = dc->pc + 4;
1341         } else {
1342             dc->pc = dc->npc;
1343             dc->npc = dc->pc + 4;
1344         }
1345     } else if (cond == 0x8) {
1346         /* unconditional taken */
1347         if (a) {
1348             dc->pc = target;
1349             dc->npc = dc->pc + 4;
1350         } else {
1351             dc->pc = dc->npc;
1352             dc->npc = target;
1353             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1354         }
1355     } else {
1356         flush_cond(dc);
1357         gen_cond(cpu_cond, cc, cond, dc);
1358         if (a) {
1359             gen_branch_a(dc, target);
1360         } else {
1361             gen_branch_n(dc, target);
1362         }
1363     }
1364 }
1365 
1366 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1367 {
1368     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1369     target_ulong target = dc->pc + offset;
1370 
1371 #ifdef TARGET_SPARC64
1372     if (unlikely(AM_CHECK(dc))) {
1373         target &= 0xffffffffULL;
1374     }
1375 #endif
1376     if (cond == 0x0) {
1377         /* unconditional not taken */
1378         if (a) {
1379             dc->pc = dc->npc + 4;
1380             dc->npc = dc->pc + 4;
1381         } else {
1382             dc->pc = dc->npc;
1383             dc->npc = dc->pc + 4;
1384         }
1385     } else if (cond == 0x8) {
1386         /* unconditional taken */
1387         if (a) {
1388             dc->pc = target;
1389             dc->npc = dc->pc + 4;
1390         } else {
1391             dc->pc = dc->npc;
1392             dc->npc = target;
1393             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1394         }
1395     } else {
1396         flush_cond(dc);
1397         gen_fcond(cpu_cond, cc, cond);
1398         if (a) {
1399             gen_branch_a(dc, target);
1400         } else {
1401             gen_branch_n(dc, target);
1402         }
1403     }
1404 }
1405 
1406 #ifdef TARGET_SPARC64
1407 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1408                           TCGv r_reg)
1409 {
1410     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1411     target_ulong target = dc->pc + offset;
1412 
1413     if (unlikely(AM_CHECK(dc))) {
1414         target &= 0xffffffffULL;
1415     }
1416     flush_cond(dc);
1417     gen_cond_reg(cpu_cond, cond, r_reg);
1418     if (a) {
1419         gen_branch_a(dc, target);
1420     } else {
1421         gen_branch_n(dc, target);
1422     }
1423 }
1424 
1425 static void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1426 {
1427     switch (fccno) {
1428     case 0:
1429         gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1430         break;
1431     case 1:
1432         gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1433         break;
1434     case 2:
1435         gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1436         break;
1437     case 3:
1438         gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1439         break;
1440     }
1441 }
1442 
1443 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1444 {
1445     switch (fccno) {
1446     case 0:
1447         gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1448         break;
1449     case 1:
1450         gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1451         break;
1452     case 2:
1453         gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1454         break;
1455     case 3:
1456         gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1457         break;
1458     }
1459 }
1460 
1461 static void gen_op_fcmpq(int fccno)
1462 {
1463     switch (fccno) {
1464     case 0:
1465         gen_helper_fcmpq(cpu_fsr, cpu_env);
1466         break;
1467     case 1:
1468         gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1469         break;
1470     case 2:
1471         gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1472         break;
1473     case 3:
1474         gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1475         break;
1476     }
1477 }
1478 
1479 static void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1480 {
1481     switch (fccno) {
1482     case 0:
1483         gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1484         break;
1485     case 1:
1486         gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1487         break;
1488     case 2:
1489         gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1490         break;
1491     case 3:
1492         gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1493         break;
1494     }
1495 }
1496 
1497 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1498 {
1499     switch (fccno) {
1500     case 0:
1501         gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1502         break;
1503     case 1:
1504         gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1505         break;
1506     case 2:
1507         gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1508         break;
1509     case 3:
1510         gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1511         break;
1512     }
1513 }
1514 
1515 static void gen_op_fcmpeq(int fccno)
1516 {
1517     switch (fccno) {
1518     case 0:
1519         gen_helper_fcmpeq(cpu_fsr, cpu_env);
1520         break;
1521     case 1:
1522         gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1523         break;
1524     case 2:
1525         gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1526         break;
1527     case 3:
1528         gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1529         break;
1530     }
1531 }
1532 
1533 #else
1534 
1535 static void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1536 {
1537     gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1538 }
1539 
1540 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1541 {
1542     gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1543 }
1544 
1545 static void gen_op_fcmpq(int fccno)
1546 {
1547     gen_helper_fcmpq(cpu_fsr, cpu_env);
1548 }
1549 
1550 static void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1551 {
1552     gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1553 }
1554 
1555 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1556 {
1557     gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1558 }
1559 
1560 static void gen_op_fcmpeq(int fccno)
1561 {
1562     gen_helper_fcmpeq(cpu_fsr, cpu_env);
1563 }
1564 #endif
1565 
1566 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1567 {
1568     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1569     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1570     gen_exception(dc, TT_FP_EXCP);
1571 }
1572 
1573 static int gen_trap_ifnofpu(DisasContext *dc)
1574 {
1575 #if !defined(CONFIG_USER_ONLY)
1576     if (!dc->fpu_enabled) {
1577         gen_exception(dc, TT_NFPU_INSN);
1578         return 1;
1579     }
1580 #endif
1581     return 0;
1582 }
1583 
1584 static void gen_op_clear_ieee_excp_and_FTT(void)
1585 {
1586     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1587 }
1588 
1589 static void gen_fop_FF(DisasContext *dc, int rd, int rs,
1590                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1591 {
1592     TCGv_i32 dst, src;
1593 
1594     src = gen_load_fpr_F(dc, rs);
1595     dst = gen_dest_fpr_F(dc);
1596 
1597     gen(dst, cpu_env, src);
1598     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1599 
1600     gen_store_fpr_F(dc, rd, dst);
1601 }
1602 
1603 static void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1604                           void (*gen)(TCGv_i32, TCGv_i32))
1605 {
1606     TCGv_i32 dst, src;
1607 
1608     src = gen_load_fpr_F(dc, rs);
1609     dst = gen_dest_fpr_F(dc);
1610 
1611     gen(dst, src);
1612 
1613     gen_store_fpr_F(dc, rd, dst);
1614 }
1615 
1616 static void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1617                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1618 {
1619     TCGv_i32 dst, src1, src2;
1620 
1621     src1 = gen_load_fpr_F(dc, rs1);
1622     src2 = gen_load_fpr_F(dc, rs2);
1623     dst = gen_dest_fpr_F(dc);
1624 
1625     gen(dst, cpu_env, src1, src2);
1626     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1627 
1628     gen_store_fpr_F(dc, rd, dst);
1629 }
1630 
1631 #ifdef TARGET_SPARC64
1632 static void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1633                            void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1634 {
1635     TCGv_i32 dst, src1, src2;
1636 
1637     src1 = gen_load_fpr_F(dc, rs1);
1638     src2 = gen_load_fpr_F(dc, rs2);
1639     dst = gen_dest_fpr_F(dc);
1640 
1641     gen(dst, src1, src2);
1642 
1643     gen_store_fpr_F(dc, rd, dst);
1644 }
1645 #endif
1646 
1647 static void gen_fop_DD(DisasContext *dc, int rd, int rs,
1648                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1649 {
1650     TCGv_i64 dst, src;
1651 
1652     src = gen_load_fpr_D(dc, rs);
1653     dst = gen_dest_fpr_D(dc, rd);
1654 
1655     gen(dst, cpu_env, src);
1656     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1657 
1658     gen_store_fpr_D(dc, rd, dst);
1659 }
1660 
1661 #ifdef TARGET_SPARC64
1662 static void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1663                           void (*gen)(TCGv_i64, TCGv_i64))
1664 {
1665     TCGv_i64 dst, src;
1666 
1667     src = gen_load_fpr_D(dc, rs);
1668     dst = gen_dest_fpr_D(dc, rd);
1669 
1670     gen(dst, src);
1671 
1672     gen_store_fpr_D(dc, rd, dst);
1673 }
1674 #endif
1675 
1676 static void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1677                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1678 {
1679     TCGv_i64 dst, src1, src2;
1680 
1681     src1 = gen_load_fpr_D(dc, rs1);
1682     src2 = gen_load_fpr_D(dc, rs2);
1683     dst = gen_dest_fpr_D(dc, rd);
1684 
1685     gen(dst, cpu_env, src1, src2);
1686     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1687 
1688     gen_store_fpr_D(dc, rd, dst);
1689 }
1690 
1691 #ifdef TARGET_SPARC64
1692 static void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1693                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1694 {
1695     TCGv_i64 dst, src1, src2;
1696 
1697     src1 = gen_load_fpr_D(dc, rs1);
1698     src2 = gen_load_fpr_D(dc, rs2);
1699     dst = gen_dest_fpr_D(dc, rd);
1700 
1701     gen(dst, src1, src2);
1702 
1703     gen_store_fpr_D(dc, rd, dst);
1704 }
1705 
1706 static void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1707                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1708 {
1709     TCGv_i64 dst, src1, src2;
1710 
1711     src1 = gen_load_fpr_D(dc, rs1);
1712     src2 = gen_load_fpr_D(dc, rs2);
1713     dst = gen_dest_fpr_D(dc, rd);
1714 
1715     gen(dst, cpu_gsr, src1, src2);
1716 
1717     gen_store_fpr_D(dc, rd, dst);
1718 }
1719 
1720 static void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1721                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1722 {
1723     TCGv_i64 dst, src0, src1, src2;
1724 
1725     src1 = gen_load_fpr_D(dc, rs1);
1726     src2 = gen_load_fpr_D(dc, rs2);
1727     src0 = gen_load_fpr_D(dc, rd);
1728     dst = gen_dest_fpr_D(dc, rd);
1729 
1730     gen(dst, src0, src1, src2);
1731 
1732     gen_store_fpr_D(dc, rd, dst);
1733 }
1734 #endif
1735 
1736 static void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1737                        void (*gen)(TCGv_ptr))
1738 {
1739     gen_op_load_fpr_QT1(QFPREG(rs));
1740 
1741     gen(cpu_env);
1742     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1743 
1744     gen_op_store_QT0_fpr(QFPREG(rd));
1745     gen_update_fprs_dirty(dc, QFPREG(rd));
1746 }
1747 
1748 #ifdef TARGET_SPARC64
1749 static void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1750                           void (*gen)(TCGv_ptr))
1751 {
1752     gen_op_load_fpr_QT1(QFPREG(rs));
1753 
1754     gen(cpu_env);
1755 
1756     gen_op_store_QT0_fpr(QFPREG(rd));
1757     gen_update_fprs_dirty(dc, QFPREG(rd));
1758 }
1759 #endif
1760 
1761 static void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1762                         void (*gen)(TCGv_ptr))
1763 {
1764     gen_op_load_fpr_QT0(QFPREG(rs1));
1765     gen_op_load_fpr_QT1(QFPREG(rs2));
1766 
1767     gen(cpu_env);
1768     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1769 
1770     gen_op_store_QT0_fpr(QFPREG(rd));
1771     gen_update_fprs_dirty(dc, QFPREG(rd));
1772 }
1773 
1774 static void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1775                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1776 {
1777     TCGv_i64 dst;
1778     TCGv_i32 src1, src2;
1779 
1780     src1 = gen_load_fpr_F(dc, rs1);
1781     src2 = gen_load_fpr_F(dc, rs2);
1782     dst = gen_dest_fpr_D(dc, rd);
1783 
1784     gen(dst, cpu_env, src1, src2);
1785     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1786 
1787     gen_store_fpr_D(dc, rd, dst);
1788 }
1789 
1790 static void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1791                         void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1792 {
1793     TCGv_i64 src1, src2;
1794 
1795     src1 = gen_load_fpr_D(dc, rs1);
1796     src2 = gen_load_fpr_D(dc, rs2);
1797 
1798     gen(cpu_env, src1, src2);
1799     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1800 
1801     gen_op_store_QT0_fpr(QFPREG(rd));
1802     gen_update_fprs_dirty(dc, QFPREG(rd));
1803 }
1804 
1805 #ifdef TARGET_SPARC64
1806 static void gen_fop_DF(DisasContext *dc, int rd, int rs,
1807                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1808 {
1809     TCGv_i64 dst;
1810     TCGv_i32 src;
1811 
1812     src = gen_load_fpr_F(dc, rs);
1813     dst = gen_dest_fpr_D(dc, rd);
1814 
1815     gen(dst, cpu_env, src);
1816     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1817 
1818     gen_store_fpr_D(dc, rd, dst);
1819 }
1820 #endif
1821 
1822 static void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1823                           void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1824 {
1825     TCGv_i64 dst;
1826     TCGv_i32 src;
1827 
1828     src = gen_load_fpr_F(dc, rs);
1829     dst = gen_dest_fpr_D(dc, rd);
1830 
1831     gen(dst, cpu_env, src);
1832 
1833     gen_store_fpr_D(dc, rd, dst);
1834 }
1835 
1836 static void gen_fop_FD(DisasContext *dc, int rd, int rs,
1837                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1838 {
1839     TCGv_i32 dst;
1840     TCGv_i64 src;
1841 
1842     src = gen_load_fpr_D(dc, rs);
1843     dst = gen_dest_fpr_F(dc);
1844 
1845     gen(dst, cpu_env, src);
1846     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1847 
1848     gen_store_fpr_F(dc, rd, dst);
1849 }
1850 
1851 static void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1852                        void (*gen)(TCGv_i32, TCGv_ptr))
1853 {
1854     TCGv_i32 dst;
1855 
1856     gen_op_load_fpr_QT1(QFPREG(rs));
1857     dst = gen_dest_fpr_F(dc);
1858 
1859     gen(dst, cpu_env);
1860     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1861 
1862     gen_store_fpr_F(dc, rd, dst);
1863 }
1864 
1865 static void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1866                        void (*gen)(TCGv_i64, TCGv_ptr))
1867 {
1868     TCGv_i64 dst;
1869 
1870     gen_op_load_fpr_QT1(QFPREG(rs));
1871     dst = gen_dest_fpr_D(dc, rd);
1872 
1873     gen(dst, cpu_env);
1874     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1875 
1876     gen_store_fpr_D(dc, rd, dst);
1877 }
1878 
1879 static void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1880                           void (*gen)(TCGv_ptr, TCGv_i32))
1881 {
1882     TCGv_i32 src;
1883 
1884     src = gen_load_fpr_F(dc, rs);
1885 
1886     gen(cpu_env, src);
1887 
1888     gen_op_store_QT0_fpr(QFPREG(rd));
1889     gen_update_fprs_dirty(dc, QFPREG(rd));
1890 }
1891 
1892 static void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1893                           void (*gen)(TCGv_ptr, TCGv_i64))
1894 {
1895     TCGv_i64 src;
1896 
1897     src = gen_load_fpr_D(dc, rs);
1898 
1899     gen(cpu_env, src);
1900 
1901     gen_op_store_QT0_fpr(QFPREG(rd));
1902     gen_update_fprs_dirty(dc, QFPREG(rd));
1903 }
1904 
1905 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1906                      TCGv addr, int mmu_idx, MemOp memop)
1907 {
1908     gen_address_mask(dc, addr);
1909     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1910 }
1911 
1912 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1913 {
1914     TCGv m1 = tcg_constant_tl(0xff);
1915     gen_address_mask(dc, addr);
1916     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1917 }
1918 
1919 /* asi moves */
1920 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1921 typedef enum {
1922     GET_ASI_HELPER,
1923     GET_ASI_EXCP,
1924     GET_ASI_DIRECT,
1925     GET_ASI_DTWINX,
1926     GET_ASI_BLOCK,
1927     GET_ASI_SHORT,
1928     GET_ASI_BCOPY,
1929     GET_ASI_BFILL,
1930 } ASIType;
1931 
1932 typedef struct {
1933     ASIType type;
1934     int asi;
1935     int mem_idx;
1936     MemOp memop;
1937 } DisasASI;
1938 
1939 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1940 {
1941     int asi = GET_FIELD(insn, 19, 26);
1942     ASIType type = GET_ASI_HELPER;
1943     int mem_idx = dc->mem_idx;
1944 
1945 #ifndef TARGET_SPARC64
1946     /* Before v9, all asis are immediate and privileged.  */
1947     if (IS_IMM) {
1948         gen_exception(dc, TT_ILL_INSN);
1949         type = GET_ASI_EXCP;
1950     } else if (supervisor(dc)
1951                /* Note that LEON accepts ASI_USERDATA in user mode, for
1952                   use with CASA.  Also note that previous versions of
1953                   QEMU allowed (and old versions of gcc emitted) ASI_P
1954                   for LEON, which is incorrect.  */
1955                || (asi == ASI_USERDATA
1956                    && (dc->def->features & CPU_FEATURE_CASA))) {
1957         switch (asi) {
1958         case ASI_USERDATA:   /* User data access */
1959             mem_idx = MMU_USER_IDX;
1960             type = GET_ASI_DIRECT;
1961             break;
1962         case ASI_KERNELDATA: /* Supervisor data access */
1963             mem_idx = MMU_KERNEL_IDX;
1964             type = GET_ASI_DIRECT;
1965             break;
1966         case ASI_M_BYPASS:    /* MMU passthrough */
1967         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
1968             mem_idx = MMU_PHYS_IDX;
1969             type = GET_ASI_DIRECT;
1970             break;
1971         case ASI_M_BCOPY: /* Block copy, sta access */
1972             mem_idx = MMU_KERNEL_IDX;
1973             type = GET_ASI_BCOPY;
1974             break;
1975         case ASI_M_BFILL: /* Block fill, stda access */
1976             mem_idx = MMU_KERNEL_IDX;
1977             type = GET_ASI_BFILL;
1978             break;
1979         }
1980 
1981         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
1982          * permissions check in get_physical_address(..).
1983          */
1984         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
1985     } else {
1986         gen_exception(dc, TT_PRIV_INSN);
1987         type = GET_ASI_EXCP;
1988     }
1989 #else
1990     if (IS_IMM) {
1991         asi = dc->asi;
1992     }
1993     /* With v9, all asis below 0x80 are privileged.  */
1994     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
1995        down that bit into DisasContext.  For the moment that's ok,
1996        since the direct implementations below doesn't have any ASIs
1997        in the restricted [0x30, 0x7f] range, and the check will be
1998        done properly in the helper.  */
1999     if (!supervisor(dc) && asi < 0x80) {
2000         gen_exception(dc, TT_PRIV_ACT);
2001         type = GET_ASI_EXCP;
2002     } else {
2003         switch (asi) {
2004         case ASI_REAL:      /* Bypass */
2005         case ASI_REAL_IO:   /* Bypass, non-cacheable */
2006         case ASI_REAL_L:    /* Bypass LE */
2007         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2008         case ASI_TWINX_REAL:   /* Real address, twinx */
2009         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2010         case ASI_QUAD_LDD_PHYS:
2011         case ASI_QUAD_LDD_PHYS_L:
2012             mem_idx = MMU_PHYS_IDX;
2013             break;
2014         case ASI_N:  /* Nucleus */
2015         case ASI_NL: /* Nucleus LE */
2016         case ASI_TWINX_N:
2017         case ASI_TWINX_NL:
2018         case ASI_NUCLEUS_QUAD_LDD:
2019         case ASI_NUCLEUS_QUAD_LDD_L:
2020             if (hypervisor(dc)) {
2021                 mem_idx = MMU_PHYS_IDX;
2022             } else {
2023                 mem_idx = MMU_NUCLEUS_IDX;
2024             }
2025             break;
2026         case ASI_AIUP:  /* As if user primary */
2027         case ASI_AIUPL: /* As if user primary LE */
2028         case ASI_TWINX_AIUP:
2029         case ASI_TWINX_AIUP_L:
2030         case ASI_BLK_AIUP_4V:
2031         case ASI_BLK_AIUP_L_4V:
2032         case ASI_BLK_AIUP:
2033         case ASI_BLK_AIUPL:
2034             mem_idx = MMU_USER_IDX;
2035             break;
2036         case ASI_AIUS:  /* As if user secondary */
2037         case ASI_AIUSL: /* As if user secondary LE */
2038         case ASI_TWINX_AIUS:
2039         case ASI_TWINX_AIUS_L:
2040         case ASI_BLK_AIUS_4V:
2041         case ASI_BLK_AIUS_L_4V:
2042         case ASI_BLK_AIUS:
2043         case ASI_BLK_AIUSL:
2044             mem_idx = MMU_USER_SECONDARY_IDX;
2045             break;
2046         case ASI_S:  /* Secondary */
2047         case ASI_SL: /* Secondary LE */
2048         case ASI_TWINX_S:
2049         case ASI_TWINX_SL:
2050         case ASI_BLK_COMMIT_S:
2051         case ASI_BLK_S:
2052         case ASI_BLK_SL:
2053         case ASI_FL8_S:
2054         case ASI_FL8_SL:
2055         case ASI_FL16_S:
2056         case ASI_FL16_SL:
2057             if (mem_idx == MMU_USER_IDX) {
2058                 mem_idx = MMU_USER_SECONDARY_IDX;
2059             } else if (mem_idx == MMU_KERNEL_IDX) {
2060                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2061             }
2062             break;
2063         case ASI_P:  /* Primary */
2064         case ASI_PL: /* Primary LE */
2065         case ASI_TWINX_P:
2066         case ASI_TWINX_PL:
2067         case ASI_BLK_COMMIT_P:
2068         case ASI_BLK_P:
2069         case ASI_BLK_PL:
2070         case ASI_FL8_P:
2071         case ASI_FL8_PL:
2072         case ASI_FL16_P:
2073         case ASI_FL16_PL:
2074             break;
2075         }
2076         switch (asi) {
2077         case ASI_REAL:
2078         case ASI_REAL_IO:
2079         case ASI_REAL_L:
2080         case ASI_REAL_IO_L:
2081         case ASI_N:
2082         case ASI_NL:
2083         case ASI_AIUP:
2084         case ASI_AIUPL:
2085         case ASI_AIUS:
2086         case ASI_AIUSL:
2087         case ASI_S:
2088         case ASI_SL:
2089         case ASI_P:
2090         case ASI_PL:
2091             type = GET_ASI_DIRECT;
2092             break;
2093         case ASI_TWINX_REAL:
2094         case ASI_TWINX_REAL_L:
2095         case ASI_TWINX_N:
2096         case ASI_TWINX_NL:
2097         case ASI_TWINX_AIUP:
2098         case ASI_TWINX_AIUP_L:
2099         case ASI_TWINX_AIUS:
2100         case ASI_TWINX_AIUS_L:
2101         case ASI_TWINX_P:
2102         case ASI_TWINX_PL:
2103         case ASI_TWINX_S:
2104         case ASI_TWINX_SL:
2105         case ASI_QUAD_LDD_PHYS:
2106         case ASI_QUAD_LDD_PHYS_L:
2107         case ASI_NUCLEUS_QUAD_LDD:
2108         case ASI_NUCLEUS_QUAD_LDD_L:
2109             type = GET_ASI_DTWINX;
2110             break;
2111         case ASI_BLK_COMMIT_P:
2112         case ASI_BLK_COMMIT_S:
2113         case ASI_BLK_AIUP_4V:
2114         case ASI_BLK_AIUP_L_4V:
2115         case ASI_BLK_AIUP:
2116         case ASI_BLK_AIUPL:
2117         case ASI_BLK_AIUS_4V:
2118         case ASI_BLK_AIUS_L_4V:
2119         case ASI_BLK_AIUS:
2120         case ASI_BLK_AIUSL:
2121         case ASI_BLK_S:
2122         case ASI_BLK_SL:
2123         case ASI_BLK_P:
2124         case ASI_BLK_PL:
2125             type = GET_ASI_BLOCK;
2126             break;
2127         case ASI_FL8_S:
2128         case ASI_FL8_SL:
2129         case ASI_FL8_P:
2130         case ASI_FL8_PL:
2131             memop = MO_UB;
2132             type = GET_ASI_SHORT;
2133             break;
2134         case ASI_FL16_S:
2135         case ASI_FL16_SL:
2136         case ASI_FL16_P:
2137         case ASI_FL16_PL:
2138             memop = MO_TEUW;
2139             type = GET_ASI_SHORT;
2140             break;
2141         }
2142         /* The little-endian asis all have bit 3 set.  */
2143         if (asi & 8) {
2144             memop ^= MO_BSWAP;
2145         }
2146     }
2147 #endif
2148 
2149     return (DisasASI){ type, asi, mem_idx, memop };
2150 }
2151 
2152 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2153                        int insn, MemOp memop)
2154 {
2155     DisasASI da = get_asi(dc, insn, memop);
2156 
2157     switch (da.type) {
2158     case GET_ASI_EXCP:
2159         break;
2160     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2161         gen_exception(dc, TT_ILL_INSN);
2162         break;
2163     case GET_ASI_DIRECT:
2164         gen_address_mask(dc, addr);
2165         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2166         break;
2167     default:
2168         {
2169             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2170             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2171 
2172             save_state(dc);
2173 #ifdef TARGET_SPARC64
2174             gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2175 #else
2176             {
2177                 TCGv_i64 t64 = tcg_temp_new_i64();
2178                 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2179                 tcg_gen_trunc_i64_tl(dst, t64);
2180             }
2181 #endif
2182         }
2183         break;
2184     }
2185 }
2186 
2187 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2188                        int insn, MemOp memop)
2189 {
2190     DisasASI da = get_asi(dc, insn, memop);
2191 
2192     switch (da.type) {
2193     case GET_ASI_EXCP:
2194         break;
2195     case GET_ASI_DTWINX: /* Reserved for stda.  */
2196 #ifndef TARGET_SPARC64
2197         gen_exception(dc, TT_ILL_INSN);
2198         break;
2199 #else
2200         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2201             /* Pre OpenSPARC CPUs don't have these */
2202             gen_exception(dc, TT_ILL_INSN);
2203             return;
2204         }
2205         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2206          * are ST_BLKINIT_ ASIs */
2207 #endif
2208         /* fall through */
2209     case GET_ASI_DIRECT:
2210         gen_address_mask(dc, addr);
2211         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2212         break;
2213 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2214     case GET_ASI_BCOPY:
2215         /* Copy 32 bytes from the address in SRC to ADDR.  */
2216         /* ??? The original qemu code suggests 4-byte alignment, dropping
2217            the low bits, but the only place I can see this used is in the
2218            Linux kernel with 32 byte alignment, which would make more sense
2219            as a cacheline-style operation.  */
2220         {
2221             TCGv saddr = tcg_temp_new();
2222             TCGv daddr = tcg_temp_new();
2223             TCGv four = tcg_constant_tl(4);
2224             TCGv_i32 tmp = tcg_temp_new_i32();
2225             int i;
2226 
2227             tcg_gen_andi_tl(saddr, src, -4);
2228             tcg_gen_andi_tl(daddr, addr, -4);
2229             for (i = 0; i < 32; i += 4) {
2230                 /* Since the loads and stores are paired, allow the
2231                    copy to happen in the host endianness.  */
2232                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2233                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2234                 tcg_gen_add_tl(saddr, saddr, four);
2235                 tcg_gen_add_tl(daddr, daddr, four);
2236             }
2237         }
2238         break;
2239 #endif
2240     default:
2241         {
2242             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2243             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2244 
2245             save_state(dc);
2246 #ifdef TARGET_SPARC64
2247             gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2248 #else
2249             {
2250                 TCGv_i64 t64 = tcg_temp_new_i64();
2251                 tcg_gen_extu_tl_i64(t64, src);
2252                 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2253             }
2254 #endif
2255 
2256             /* A write to a TLB register may alter page maps.  End the TB. */
2257             dc->npc = DYNAMIC_PC;
2258         }
2259         break;
2260     }
2261 }
2262 
2263 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2264                          TCGv addr, int insn)
2265 {
2266     DisasASI da = get_asi(dc, insn, MO_TEUL);
2267 
2268     switch (da.type) {
2269     case GET_ASI_EXCP:
2270         break;
2271     case GET_ASI_DIRECT:
2272         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2273         break;
2274     default:
2275         /* ??? Should be DAE_invalid_asi.  */
2276         gen_exception(dc, TT_DATA_ACCESS);
2277         break;
2278     }
2279 }
2280 
2281 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2282                         int insn, int rd)
2283 {
2284     DisasASI da = get_asi(dc, insn, MO_TEUL);
2285     TCGv oldv;
2286 
2287     switch (da.type) {
2288     case GET_ASI_EXCP:
2289         return;
2290     case GET_ASI_DIRECT:
2291         oldv = tcg_temp_new();
2292         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2293                                   da.mem_idx, da.memop | MO_ALIGN);
2294         gen_store_gpr(dc, rd, oldv);
2295         break;
2296     default:
2297         /* ??? Should be DAE_invalid_asi.  */
2298         gen_exception(dc, TT_DATA_ACCESS);
2299         break;
2300     }
2301 }
2302 
2303 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2304 {
2305     DisasASI da = get_asi(dc, insn, MO_UB);
2306 
2307     switch (da.type) {
2308     case GET_ASI_EXCP:
2309         break;
2310     case GET_ASI_DIRECT:
2311         gen_ldstub(dc, dst, addr, da.mem_idx);
2312         break;
2313     default:
2314         /* ??? In theory, this should be raise DAE_invalid_asi.
2315            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2316         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2317             gen_helper_exit_atomic(cpu_env);
2318         } else {
2319             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2320             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2321             TCGv_i64 s64, t64;
2322 
2323             save_state(dc);
2324             t64 = tcg_temp_new_i64();
2325             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2326 
2327             s64 = tcg_constant_i64(0xff);
2328             gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2329 
2330             tcg_gen_trunc_i64_tl(dst, t64);
2331 
2332             /* End the TB.  */
2333             dc->npc = DYNAMIC_PC;
2334         }
2335         break;
2336     }
2337 }
2338 #endif
2339 
2340 #ifdef TARGET_SPARC64
2341 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2342                         int insn, int size, int rd)
2343 {
2344     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2345     TCGv_i32 d32;
2346     TCGv_i64 d64;
2347 
2348     switch (da.type) {
2349     case GET_ASI_EXCP:
2350         break;
2351 
2352     case GET_ASI_DIRECT:
2353         gen_address_mask(dc, addr);
2354         switch (size) {
2355         case 4:
2356             d32 = gen_dest_fpr_F(dc);
2357             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2358             gen_store_fpr_F(dc, rd, d32);
2359             break;
2360         case 8:
2361             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2362                                 da.memop | MO_ALIGN_4);
2363             break;
2364         case 16:
2365             d64 = tcg_temp_new_i64();
2366             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2367             tcg_gen_addi_tl(addr, addr, 8);
2368             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2369                                 da.memop | MO_ALIGN_4);
2370             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2371             break;
2372         default:
2373             g_assert_not_reached();
2374         }
2375         break;
2376 
2377     case GET_ASI_BLOCK:
2378         /* Valid for lddfa on aligned registers only.  */
2379         if (size == 8 && (rd & 7) == 0) {
2380             MemOp memop;
2381             TCGv eight;
2382             int i;
2383 
2384             gen_address_mask(dc, addr);
2385 
2386             /* The first operation checks required alignment.  */
2387             memop = da.memop | MO_ALIGN_64;
2388             eight = tcg_constant_tl(8);
2389             for (i = 0; ; ++i) {
2390                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2391                                     da.mem_idx, memop);
2392                 if (i == 7) {
2393                     break;
2394                 }
2395                 tcg_gen_add_tl(addr, addr, eight);
2396                 memop = da.memop;
2397             }
2398         } else {
2399             gen_exception(dc, TT_ILL_INSN);
2400         }
2401         break;
2402 
2403     case GET_ASI_SHORT:
2404         /* Valid for lddfa only.  */
2405         if (size == 8) {
2406             gen_address_mask(dc, addr);
2407             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2408                                 da.memop | MO_ALIGN);
2409         } else {
2410             gen_exception(dc, TT_ILL_INSN);
2411         }
2412         break;
2413 
2414     default:
2415         {
2416             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2417             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2418 
2419             save_state(dc);
2420             /* According to the table in the UA2011 manual, the only
2421                other asis that are valid for ldfa/lddfa/ldqfa are
2422                the NO_FAULT asis.  We still need a helper for these,
2423                but we can just use the integer asi helper for them.  */
2424             switch (size) {
2425             case 4:
2426                 d64 = tcg_temp_new_i64();
2427                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2428                 d32 = gen_dest_fpr_F(dc);
2429                 tcg_gen_extrl_i64_i32(d32, d64);
2430                 gen_store_fpr_F(dc, rd, d32);
2431                 break;
2432             case 8:
2433                 gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2434                 break;
2435             case 16:
2436                 d64 = tcg_temp_new_i64();
2437                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2438                 tcg_gen_addi_tl(addr, addr, 8);
2439                 gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2440                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2441                 break;
2442             default:
2443                 g_assert_not_reached();
2444             }
2445         }
2446         break;
2447     }
2448 }
2449 
2450 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2451                         int insn, int size, int rd)
2452 {
2453     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2454     TCGv_i32 d32;
2455 
2456     switch (da.type) {
2457     case GET_ASI_EXCP:
2458         break;
2459 
2460     case GET_ASI_DIRECT:
2461         gen_address_mask(dc, addr);
2462         switch (size) {
2463         case 4:
2464             d32 = gen_load_fpr_F(dc, rd);
2465             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2466             break;
2467         case 8:
2468             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2469                                 da.memop | MO_ALIGN_4);
2470             break;
2471         case 16:
2472             /* Only 4-byte alignment required.  However, it is legal for the
2473                cpu to signal the alignment fault, and the OS trap handler is
2474                required to fix it up.  Requiring 16-byte alignment here avoids
2475                having to probe the second page before performing the first
2476                write.  */
2477             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2478                                 da.memop | MO_ALIGN_16);
2479             tcg_gen_addi_tl(addr, addr, 8);
2480             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2481             break;
2482         default:
2483             g_assert_not_reached();
2484         }
2485         break;
2486 
2487     case GET_ASI_BLOCK:
2488         /* Valid for stdfa on aligned registers only.  */
2489         if (size == 8 && (rd & 7) == 0) {
2490             MemOp memop;
2491             TCGv eight;
2492             int i;
2493 
2494             gen_address_mask(dc, addr);
2495 
2496             /* The first operation checks required alignment.  */
2497             memop = da.memop | MO_ALIGN_64;
2498             eight = tcg_constant_tl(8);
2499             for (i = 0; ; ++i) {
2500                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2501                                     da.mem_idx, memop);
2502                 if (i == 7) {
2503                     break;
2504                 }
2505                 tcg_gen_add_tl(addr, addr, eight);
2506                 memop = da.memop;
2507             }
2508         } else {
2509             gen_exception(dc, TT_ILL_INSN);
2510         }
2511         break;
2512 
2513     case GET_ASI_SHORT:
2514         /* Valid for stdfa only.  */
2515         if (size == 8) {
2516             gen_address_mask(dc, addr);
2517             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2518                                 da.memop | MO_ALIGN);
2519         } else {
2520             gen_exception(dc, TT_ILL_INSN);
2521         }
2522         break;
2523 
2524     default:
2525         /* According to the table in the UA2011 manual, the only
2526            other asis that are valid for ldfa/lddfa/ldqfa are
2527            the PST* asis, which aren't currently handled.  */
2528         gen_exception(dc, TT_ILL_INSN);
2529         break;
2530     }
2531 }
2532 
2533 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2534 {
2535     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2536     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2537     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2538 
2539     switch (da.type) {
2540     case GET_ASI_EXCP:
2541         return;
2542 
2543     case GET_ASI_DTWINX:
2544         gen_address_mask(dc, addr);
2545         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2546         tcg_gen_addi_tl(addr, addr, 8);
2547         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2548         break;
2549 
2550     case GET_ASI_DIRECT:
2551         {
2552             TCGv_i64 tmp = tcg_temp_new_i64();
2553 
2554             gen_address_mask(dc, addr);
2555             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2556 
2557             /* Note that LE ldda acts as if each 32-bit register
2558                result is byte swapped.  Having just performed one
2559                64-bit bswap, we need now to swap the writebacks.  */
2560             if ((da.memop & MO_BSWAP) == MO_TE) {
2561                 tcg_gen_extr32_i64(lo, hi, tmp);
2562             } else {
2563                 tcg_gen_extr32_i64(hi, lo, tmp);
2564             }
2565         }
2566         break;
2567 
2568     default:
2569         /* ??? In theory we've handled all of the ASIs that are valid
2570            for ldda, and this should raise DAE_invalid_asi.  However,
2571            real hardware allows others.  This can be seen with e.g.
2572            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2573         {
2574             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2575             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2576             TCGv_i64 tmp = tcg_temp_new_i64();
2577 
2578             save_state(dc);
2579             gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2580 
2581             /* See above.  */
2582             if ((da.memop & MO_BSWAP) == MO_TE) {
2583                 tcg_gen_extr32_i64(lo, hi, tmp);
2584             } else {
2585                 tcg_gen_extr32_i64(hi, lo, tmp);
2586             }
2587         }
2588         break;
2589     }
2590 
2591     gen_store_gpr(dc, rd, hi);
2592     gen_store_gpr(dc, rd + 1, lo);
2593 }
2594 
2595 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2596                          int insn, int rd)
2597 {
2598     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2599     TCGv lo = gen_load_gpr(dc, rd + 1);
2600 
2601     switch (da.type) {
2602     case GET_ASI_EXCP:
2603         break;
2604 
2605     case GET_ASI_DTWINX:
2606         gen_address_mask(dc, addr);
2607         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2608         tcg_gen_addi_tl(addr, addr, 8);
2609         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2610         break;
2611 
2612     case GET_ASI_DIRECT:
2613         {
2614             TCGv_i64 t64 = tcg_temp_new_i64();
2615 
2616             /* Note that LE stda acts as if each 32-bit register result is
2617                byte swapped.  We will perform one 64-bit LE store, so now
2618                we must swap the order of the construction.  */
2619             if ((da.memop & MO_BSWAP) == MO_TE) {
2620                 tcg_gen_concat32_i64(t64, lo, hi);
2621             } else {
2622                 tcg_gen_concat32_i64(t64, hi, lo);
2623             }
2624             gen_address_mask(dc, addr);
2625             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2626         }
2627         break;
2628 
2629     default:
2630         /* ??? In theory we've handled all of the ASIs that are valid
2631            for stda, and this should raise DAE_invalid_asi.  */
2632         {
2633             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2634             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2635             TCGv_i64 t64 = tcg_temp_new_i64();
2636 
2637             /* See above.  */
2638             if ((da.memop & MO_BSWAP) == MO_TE) {
2639                 tcg_gen_concat32_i64(t64, lo, hi);
2640             } else {
2641                 tcg_gen_concat32_i64(t64, hi, lo);
2642             }
2643 
2644             save_state(dc);
2645             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2646         }
2647         break;
2648     }
2649 }
2650 
2651 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2652                          int insn, int rd)
2653 {
2654     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2655     TCGv oldv;
2656 
2657     switch (da.type) {
2658     case GET_ASI_EXCP:
2659         return;
2660     case GET_ASI_DIRECT:
2661         oldv = tcg_temp_new();
2662         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2663                                   da.mem_idx, da.memop | MO_ALIGN);
2664         gen_store_gpr(dc, rd, oldv);
2665         break;
2666     default:
2667         /* ??? Should be DAE_invalid_asi.  */
2668         gen_exception(dc, TT_DATA_ACCESS);
2669         break;
2670     }
2671 }
2672 
2673 #elif !defined(CONFIG_USER_ONLY)
2674 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2675 {
2676     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2677        whereby "rd + 1" elicits "error: array subscript is above array".
2678        Since we have already asserted that rd is even, the semantics
2679        are unchanged.  */
2680     TCGv lo = gen_dest_gpr(dc, rd | 1);
2681     TCGv hi = gen_dest_gpr(dc, rd);
2682     TCGv_i64 t64 = tcg_temp_new_i64();
2683     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2684 
2685     switch (da.type) {
2686     case GET_ASI_EXCP:
2687         return;
2688     case GET_ASI_DIRECT:
2689         gen_address_mask(dc, addr);
2690         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2691         break;
2692     default:
2693         {
2694             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2695             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2696 
2697             save_state(dc);
2698             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2699         }
2700         break;
2701     }
2702 
2703     tcg_gen_extr_i64_i32(lo, hi, t64);
2704     gen_store_gpr(dc, rd | 1, lo);
2705     gen_store_gpr(dc, rd, hi);
2706 }
2707 
2708 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2709                          int insn, int rd)
2710 {
2711     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2712     TCGv lo = gen_load_gpr(dc, rd + 1);
2713     TCGv_i64 t64 = tcg_temp_new_i64();
2714 
2715     tcg_gen_concat_tl_i64(t64, lo, hi);
2716 
2717     switch (da.type) {
2718     case GET_ASI_EXCP:
2719         break;
2720     case GET_ASI_DIRECT:
2721         gen_address_mask(dc, addr);
2722         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2723         break;
2724     case GET_ASI_BFILL:
2725         /* Store 32 bytes of T64 to ADDR.  */
2726         /* ??? The original qemu code suggests 8-byte alignment, dropping
2727            the low bits, but the only place I can see this used is in the
2728            Linux kernel with 32 byte alignment, which would make more sense
2729            as a cacheline-style operation.  */
2730         {
2731             TCGv d_addr = tcg_temp_new();
2732             TCGv eight = tcg_constant_tl(8);
2733             int i;
2734 
2735             tcg_gen_andi_tl(d_addr, addr, -8);
2736             for (i = 0; i < 32; i += 8) {
2737                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2738                 tcg_gen_add_tl(d_addr, d_addr, eight);
2739             }
2740         }
2741         break;
2742     default:
2743         {
2744             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2745             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2746 
2747             save_state(dc);
2748             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2749         }
2750         break;
2751     }
2752 }
2753 #endif
2754 
2755 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2756 {
2757     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2758     return gen_load_gpr(dc, rs1);
2759 }
2760 
2761 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2762 {
2763     if (IS_IMM) { /* immediate */
2764         target_long simm = GET_FIELDs(insn, 19, 31);
2765         TCGv t = tcg_temp_new();
2766         tcg_gen_movi_tl(t, simm);
2767         return t;
2768     } else {      /* register */
2769         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2770         return gen_load_gpr(dc, rs2);
2771     }
2772 }
2773 
2774 #ifdef TARGET_SPARC64
2775 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2776 {
2777     TCGv_i32 c32, zero, dst, s1, s2;
2778 
2779     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2780        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2781        the later.  */
2782     c32 = tcg_temp_new_i32();
2783     if (cmp->is_bool) {
2784         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2785     } else {
2786         TCGv_i64 c64 = tcg_temp_new_i64();
2787         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2788         tcg_gen_extrl_i64_i32(c32, c64);
2789     }
2790 
2791     s1 = gen_load_fpr_F(dc, rs);
2792     s2 = gen_load_fpr_F(dc, rd);
2793     dst = gen_dest_fpr_F(dc);
2794     zero = tcg_constant_i32(0);
2795 
2796     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2797 
2798     gen_store_fpr_F(dc, rd, dst);
2799 }
2800 
2801 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2802 {
2803     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2804     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2805                         gen_load_fpr_D(dc, rs),
2806                         gen_load_fpr_D(dc, rd));
2807     gen_store_fpr_D(dc, rd, dst);
2808 }
2809 
2810 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2811 {
2812     int qd = QFPREG(rd);
2813     int qs = QFPREG(rs);
2814 
2815     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2816                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2817     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2818                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2819 
2820     gen_update_fprs_dirty(dc, qd);
2821 }
2822 
2823 #ifndef CONFIG_USER_ONLY
2824 static void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2825 {
2826     TCGv_i32 r_tl = tcg_temp_new_i32();
2827 
2828     /* load env->tl into r_tl */
2829     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2830 
2831     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2832     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2833 
2834     /* calculate offset to current trap state from env->ts, reuse r_tl */
2835     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2836     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2837 
2838     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2839     {
2840         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2841         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2842         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2843     }
2844 }
2845 #endif
2846 
2847 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2848                      int width, bool cc, bool left)
2849 {
2850     TCGv lo1, lo2;
2851     uint64_t amask, tabl, tabr;
2852     int shift, imask, omask;
2853 
2854     if (cc) {
2855         tcg_gen_mov_tl(cpu_cc_src, s1);
2856         tcg_gen_mov_tl(cpu_cc_src2, s2);
2857         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2858         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2859         dc->cc_op = CC_OP_SUB;
2860     }
2861 
2862     /* Theory of operation: there are two tables, left and right (not to
2863        be confused with the left and right versions of the opcode).  These
2864        are indexed by the low 3 bits of the inputs.  To make things "easy",
2865        these tables are loaded into two constants, TABL and TABR below.
2866        The operation index = (input & imask) << shift calculates the index
2867        into the constant, while val = (table >> index) & omask calculates
2868        the value we're looking for.  */
2869     switch (width) {
2870     case 8:
2871         imask = 0x7;
2872         shift = 3;
2873         omask = 0xff;
2874         if (left) {
2875             tabl = 0x80c0e0f0f8fcfeffULL;
2876             tabr = 0xff7f3f1f0f070301ULL;
2877         } else {
2878             tabl = 0x0103070f1f3f7fffULL;
2879             tabr = 0xfffefcf8f0e0c080ULL;
2880         }
2881         break;
2882     case 16:
2883         imask = 0x6;
2884         shift = 1;
2885         omask = 0xf;
2886         if (left) {
2887             tabl = 0x8cef;
2888             tabr = 0xf731;
2889         } else {
2890             tabl = 0x137f;
2891             tabr = 0xfec8;
2892         }
2893         break;
2894     case 32:
2895         imask = 0x4;
2896         shift = 0;
2897         omask = 0x3;
2898         if (left) {
2899             tabl = (2 << 2) | 3;
2900             tabr = (3 << 2) | 1;
2901         } else {
2902             tabl = (1 << 2) | 3;
2903             tabr = (3 << 2) | 2;
2904         }
2905         break;
2906     default:
2907         abort();
2908     }
2909 
2910     lo1 = tcg_temp_new();
2911     lo2 = tcg_temp_new();
2912     tcg_gen_andi_tl(lo1, s1, imask);
2913     tcg_gen_andi_tl(lo2, s2, imask);
2914     tcg_gen_shli_tl(lo1, lo1, shift);
2915     tcg_gen_shli_tl(lo2, lo2, shift);
2916 
2917     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2918     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2919     tcg_gen_andi_tl(dst, lo1, omask);
2920     tcg_gen_andi_tl(lo2, lo2, omask);
2921 
2922     amask = -8;
2923     if (AM_CHECK(dc)) {
2924         amask &= 0xffffffffULL;
2925     }
2926     tcg_gen_andi_tl(s1, s1, amask);
2927     tcg_gen_andi_tl(s2, s2, amask);
2928 
2929     /* We want to compute
2930         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2931        We've already done dst = lo1, so this reduces to
2932         dst &= (s1 == s2 ? -1 : lo2)
2933        Which we perform by
2934         lo2 |= -(s1 == s2)
2935         dst &= lo2
2936     */
2937     tcg_gen_setcond_tl(TCG_COND_EQ, lo1, s1, s2);
2938     tcg_gen_neg_tl(lo1, lo1);
2939     tcg_gen_or_tl(lo2, lo2, lo1);
2940     tcg_gen_and_tl(dst, dst, lo2);
2941 }
2942 
2943 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2944 {
2945     TCGv tmp = tcg_temp_new();
2946 
2947     tcg_gen_add_tl(tmp, s1, s2);
2948     tcg_gen_andi_tl(dst, tmp, -8);
2949     if (left) {
2950         tcg_gen_neg_tl(tmp, tmp);
2951     }
2952     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2953 }
2954 
2955 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2956 {
2957     TCGv t1, t2, shift;
2958 
2959     t1 = tcg_temp_new();
2960     t2 = tcg_temp_new();
2961     shift = tcg_temp_new();
2962 
2963     tcg_gen_andi_tl(shift, gsr, 7);
2964     tcg_gen_shli_tl(shift, shift, 3);
2965     tcg_gen_shl_tl(t1, s1, shift);
2966 
2967     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2968        shift of (up to 63) followed by a constant shift of 1.  */
2969     tcg_gen_xori_tl(shift, shift, 63);
2970     tcg_gen_shr_tl(t2, s2, shift);
2971     tcg_gen_shri_tl(t2, t2, 1);
2972 
2973     tcg_gen_or_tl(dst, t1, t2);
2974 }
2975 #endif
2976 
2977 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2978     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2979         goto illegal_insn;
2980 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2981     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2982         goto nfpu_insn;
2983 
2984 /* before an instruction, dc->pc must be static */
2985 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2986 {
2987     unsigned int opc, rs1, rs2, rd;
2988     TCGv cpu_src1, cpu_src2;
2989     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2990     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2991     target_long simm;
2992 
2993     opc = GET_FIELD(insn, 0, 1);
2994     rd = GET_FIELD(insn, 2, 6);
2995 
2996     switch (opc) {
2997     case 0:                     /* branches/sethi */
2998         {
2999             unsigned int xop = GET_FIELD(insn, 7, 9);
3000             int32_t target;
3001             switch (xop) {
3002 #ifdef TARGET_SPARC64
3003             case 0x1:           /* V9 BPcc */
3004                 {
3005                     int cc;
3006 
3007                     target = GET_FIELD_SP(insn, 0, 18);
3008                     target = sign_extend(target, 19);
3009                     target <<= 2;
3010                     cc = GET_FIELD_SP(insn, 20, 21);
3011                     if (cc == 0)
3012                         do_branch(dc, target, insn, 0);
3013                     else if (cc == 2)
3014                         do_branch(dc, target, insn, 1);
3015                     else
3016                         goto illegal_insn;
3017                     goto jmp_insn;
3018                 }
3019             case 0x3:           /* V9 BPr */
3020                 {
3021                     target = GET_FIELD_SP(insn, 0, 13) |
3022                         (GET_FIELD_SP(insn, 20, 21) << 14);
3023                     target = sign_extend(target, 16);
3024                     target <<= 2;
3025                     cpu_src1 = get_src1(dc, insn);
3026                     do_branch_reg(dc, target, insn, cpu_src1);
3027                     goto jmp_insn;
3028                 }
3029             case 0x5:           /* V9 FBPcc */
3030                 {
3031                     int cc = GET_FIELD_SP(insn, 20, 21);
3032                     if (gen_trap_ifnofpu(dc)) {
3033                         goto jmp_insn;
3034                     }
3035                     target = GET_FIELD_SP(insn, 0, 18);
3036                     target = sign_extend(target, 19);
3037                     target <<= 2;
3038                     do_fbranch(dc, target, insn, cc);
3039                     goto jmp_insn;
3040                 }
3041 #else
3042             case 0x7:           /* CBN+x */
3043                 {
3044                     goto ncp_insn;
3045                 }
3046 #endif
3047             case 0x2:           /* BN+x */
3048                 {
3049                     target = GET_FIELD(insn, 10, 31);
3050                     target = sign_extend(target, 22);
3051                     target <<= 2;
3052                     do_branch(dc, target, insn, 0);
3053                     goto jmp_insn;
3054                 }
3055             case 0x6:           /* FBN+x */
3056                 {
3057                     if (gen_trap_ifnofpu(dc)) {
3058                         goto jmp_insn;
3059                     }
3060                     target = GET_FIELD(insn, 10, 31);
3061                     target = sign_extend(target, 22);
3062                     target <<= 2;
3063                     do_fbranch(dc, target, insn, 0);
3064                     goto jmp_insn;
3065                 }
3066             case 0x4:           /* SETHI */
3067                 /* Special-case %g0 because that's the canonical nop.  */
3068                 if (rd) {
3069                     uint32_t value = GET_FIELD(insn, 10, 31);
3070                     TCGv t = gen_dest_gpr(dc, rd);
3071                     tcg_gen_movi_tl(t, value << 10);
3072                     gen_store_gpr(dc, rd, t);
3073                 }
3074                 break;
3075             case 0x0:           /* UNIMPL */
3076             default:
3077                 goto illegal_insn;
3078             }
3079             break;
3080         }
3081         break;
3082     case 1:                     /*CALL*/
3083         {
3084             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3085             TCGv o7 = gen_dest_gpr(dc, 15);
3086 
3087             tcg_gen_movi_tl(o7, dc->pc);
3088             gen_store_gpr(dc, 15, o7);
3089             target += dc->pc;
3090             gen_mov_pc_npc(dc);
3091 #ifdef TARGET_SPARC64
3092             if (unlikely(AM_CHECK(dc))) {
3093                 target &= 0xffffffffULL;
3094             }
3095 #endif
3096             dc->npc = target;
3097         }
3098         goto jmp_insn;
3099     case 2:                     /* FPU & Logical Operations */
3100         {
3101             unsigned int xop = GET_FIELD(insn, 7, 12);
3102             TCGv cpu_dst = tcg_temp_new();
3103             TCGv cpu_tmp0;
3104 
3105             if (xop == 0x3a) {  /* generate trap */
3106                 int cond = GET_FIELD(insn, 3, 6);
3107                 TCGv_i32 trap;
3108                 TCGLabel *l1 = NULL;
3109                 int mask;
3110 
3111                 if (cond == 0) {
3112                     /* Trap never.  */
3113                     break;
3114                 }
3115 
3116                 save_state(dc);
3117 
3118                 if (cond != 8) {
3119                     /* Conditional trap.  */
3120                     DisasCompare cmp;
3121 #ifdef TARGET_SPARC64
3122                     /* V9 icc/xcc */
3123                     int cc = GET_FIELD_SP(insn, 11, 12);
3124                     if (cc == 0) {
3125                         gen_compare(&cmp, 0, cond, dc);
3126                     } else if (cc == 2) {
3127                         gen_compare(&cmp, 1, cond, dc);
3128                     } else {
3129                         goto illegal_insn;
3130                     }
3131 #else
3132                     gen_compare(&cmp, 0, cond, dc);
3133 #endif
3134                     l1 = gen_new_label();
3135                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3136                                       cmp.c1, cmp.c2, l1);
3137                 }
3138 
3139                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3140                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3141 
3142                 /* Don't use the normal temporaries, as they may well have
3143                    gone out of scope with the branch above.  While we're
3144                    doing that we might as well pre-truncate to 32-bit.  */
3145                 trap = tcg_temp_new_i32();
3146 
3147                 rs1 = GET_FIELD_SP(insn, 14, 18);
3148                 if (IS_IMM) {
3149                     rs2 = GET_FIELD_SP(insn, 0, 7);
3150                     if (rs1 == 0) {
3151                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3152                         /* Signal that the trap value is fully constant.  */
3153                         mask = 0;
3154                     } else {
3155                         TCGv t1 = gen_load_gpr(dc, rs1);
3156                         tcg_gen_trunc_tl_i32(trap, t1);
3157                         tcg_gen_addi_i32(trap, trap, rs2);
3158                     }
3159                 } else {
3160                     TCGv t1, t2;
3161                     rs2 = GET_FIELD_SP(insn, 0, 4);
3162                     t1 = gen_load_gpr(dc, rs1);
3163                     t2 = gen_load_gpr(dc, rs2);
3164                     tcg_gen_add_tl(t1, t1, t2);
3165                     tcg_gen_trunc_tl_i32(trap, t1);
3166                 }
3167                 if (mask != 0) {
3168                     tcg_gen_andi_i32(trap, trap, mask);
3169                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3170                 }
3171 
3172                 gen_helper_raise_exception(cpu_env, trap);
3173 
3174                 if (cond == 8) {
3175                     /* An unconditional trap ends the TB.  */
3176                     dc->base.is_jmp = DISAS_NORETURN;
3177                     goto jmp_insn;
3178                 } else {
3179                     /* A conditional trap falls through to the next insn.  */
3180                     gen_set_label(l1);
3181                     break;
3182                 }
3183             } else if (xop == 0x28) {
3184                 rs1 = GET_FIELD(insn, 13, 17);
3185                 switch(rs1) {
3186                 case 0: /* rdy */
3187 #ifndef TARGET_SPARC64
3188                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3189                                        manual, rdy on the microSPARC
3190                                        II */
3191                 case 0x0f:          /* stbar in the SPARCv8 manual,
3192                                        rdy on the microSPARC II */
3193                 case 0x10 ... 0x1f: /* implementation-dependent in the
3194                                        SPARCv8 manual, rdy on the
3195                                        microSPARC II */
3196                     /* Read Asr17 */
3197                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3198                         TCGv t = gen_dest_gpr(dc, rd);
3199                         /* Read Asr17 for a Leon3 monoprocessor */
3200                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3201                         gen_store_gpr(dc, rd, t);
3202                         break;
3203                     }
3204 #endif
3205                     gen_store_gpr(dc, rd, cpu_y);
3206                     break;
3207 #ifdef TARGET_SPARC64
3208                 case 0x2: /* V9 rdccr */
3209                     update_psr(dc);
3210                     gen_helper_rdccr(cpu_dst, cpu_env);
3211                     gen_store_gpr(dc, rd, cpu_dst);
3212                     break;
3213                 case 0x3: /* V9 rdasi */
3214                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3215                     gen_store_gpr(dc, rd, cpu_dst);
3216                     break;
3217                 case 0x4: /* V9 rdtick */
3218                     {
3219                         TCGv_ptr r_tickptr;
3220                         TCGv_i32 r_const;
3221 
3222                         r_tickptr = tcg_temp_new_ptr();
3223                         r_const = tcg_constant_i32(dc->mem_idx);
3224                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3225                                        offsetof(CPUSPARCState, tick));
3226                         if (translator_io_start(&dc->base)) {
3227                             dc->base.is_jmp = DISAS_EXIT;
3228                         }
3229                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3230                                                   r_const);
3231                         gen_store_gpr(dc, rd, cpu_dst);
3232                     }
3233                     break;
3234                 case 0x5: /* V9 rdpc */
3235                     {
3236                         TCGv t = gen_dest_gpr(dc, rd);
3237                         if (unlikely(AM_CHECK(dc))) {
3238                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3239                         } else {
3240                             tcg_gen_movi_tl(t, dc->pc);
3241                         }
3242                         gen_store_gpr(dc, rd, t);
3243                     }
3244                     break;
3245                 case 0x6: /* V9 rdfprs */
3246                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3247                     gen_store_gpr(dc, rd, cpu_dst);
3248                     break;
3249                 case 0xf: /* V9 membar */
3250                     break; /* no effect */
3251                 case 0x13: /* Graphics Status */
3252                     if (gen_trap_ifnofpu(dc)) {
3253                         goto jmp_insn;
3254                     }
3255                     gen_store_gpr(dc, rd, cpu_gsr);
3256                     break;
3257                 case 0x16: /* Softint */
3258                     tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3259                                      offsetof(CPUSPARCState, softint));
3260                     gen_store_gpr(dc, rd, cpu_dst);
3261                     break;
3262                 case 0x17: /* Tick compare */
3263                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3264                     break;
3265                 case 0x18: /* System tick */
3266                     {
3267                         TCGv_ptr r_tickptr;
3268                         TCGv_i32 r_const;
3269 
3270                         r_tickptr = tcg_temp_new_ptr();
3271                         r_const = tcg_constant_i32(dc->mem_idx);
3272                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3273                                        offsetof(CPUSPARCState, stick));
3274                         if (translator_io_start(&dc->base)) {
3275                             dc->base.is_jmp = DISAS_EXIT;
3276                         }
3277                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3278                                                   r_const);
3279                         gen_store_gpr(dc, rd, cpu_dst);
3280                     }
3281                     break;
3282                 case 0x19: /* System tick compare */
3283                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3284                     break;
3285                 case 0x1a: /* UltraSPARC-T1 Strand status */
3286                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3287                      * this ASR as impl. dep
3288                      */
3289                     CHECK_IU_FEATURE(dc, HYPV);
3290                     {
3291                         TCGv t = gen_dest_gpr(dc, rd);
3292                         tcg_gen_movi_tl(t, 1UL);
3293                         gen_store_gpr(dc, rd, t);
3294                     }
3295                     break;
3296                 case 0x10: /* Performance Control */
3297                 case 0x11: /* Performance Instrumentation Counter */
3298                 case 0x12: /* Dispatch Control */
3299                 case 0x14: /* Softint set, WO */
3300                 case 0x15: /* Softint clear, WO */
3301 #endif
3302                 default:
3303                     goto illegal_insn;
3304                 }
3305 #if !defined(CONFIG_USER_ONLY)
3306             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3307 #ifndef TARGET_SPARC64
3308                 if (!supervisor(dc)) {
3309                     goto priv_insn;
3310                 }
3311                 update_psr(dc);
3312                 gen_helper_rdpsr(cpu_dst, cpu_env);
3313 #else
3314                 CHECK_IU_FEATURE(dc, HYPV);
3315                 if (!hypervisor(dc))
3316                     goto priv_insn;
3317                 rs1 = GET_FIELD(insn, 13, 17);
3318                 switch (rs1) {
3319                 case 0: // hpstate
3320                     tcg_gen_ld_i64(cpu_dst, cpu_env,
3321                                    offsetof(CPUSPARCState, hpstate));
3322                     break;
3323                 case 1: // htstate
3324                     // gen_op_rdhtstate();
3325                     break;
3326                 case 3: // hintp
3327                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3328                     break;
3329                 case 5: // htba
3330                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3331                     break;
3332                 case 6: // hver
3333                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3334                     break;
3335                 case 31: // hstick_cmpr
3336                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3337                     break;
3338                 default:
3339                     goto illegal_insn;
3340                 }
3341 #endif
3342                 gen_store_gpr(dc, rd, cpu_dst);
3343                 break;
3344             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3345                 if (!supervisor(dc)) {
3346                     goto priv_insn;
3347                 }
3348                 cpu_tmp0 = tcg_temp_new();
3349 #ifdef TARGET_SPARC64
3350                 rs1 = GET_FIELD(insn, 13, 17);
3351                 switch (rs1) {
3352                 case 0: // tpc
3353                     {
3354                         TCGv_ptr r_tsptr;
3355 
3356                         r_tsptr = tcg_temp_new_ptr();
3357                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3358                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3359                                       offsetof(trap_state, tpc));
3360                     }
3361                     break;
3362                 case 1: // tnpc
3363                     {
3364                         TCGv_ptr r_tsptr;
3365 
3366                         r_tsptr = tcg_temp_new_ptr();
3367                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3368                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3369                                       offsetof(trap_state, tnpc));
3370                     }
3371                     break;
3372                 case 2: // tstate
3373                     {
3374                         TCGv_ptr r_tsptr;
3375 
3376                         r_tsptr = tcg_temp_new_ptr();
3377                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3378                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3379                                       offsetof(trap_state, tstate));
3380                     }
3381                     break;
3382                 case 3: // tt
3383                     {
3384                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3385 
3386                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3387                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3388                                          offsetof(trap_state, tt));
3389                     }
3390                     break;
3391                 case 4: // tick
3392                     {
3393                         TCGv_ptr r_tickptr;
3394                         TCGv_i32 r_const;
3395 
3396                         r_tickptr = tcg_temp_new_ptr();
3397                         r_const = tcg_constant_i32(dc->mem_idx);
3398                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3399                                        offsetof(CPUSPARCState, tick));
3400                         if (translator_io_start(&dc->base)) {
3401                             dc->base.is_jmp = DISAS_EXIT;
3402                         }
3403                         gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3404                                                   r_tickptr, r_const);
3405                     }
3406                     break;
3407                 case 5: // tba
3408                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3409                     break;
3410                 case 6: // pstate
3411                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3412                                      offsetof(CPUSPARCState, pstate));
3413                     break;
3414                 case 7: // tl
3415                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3416                                      offsetof(CPUSPARCState, tl));
3417                     break;
3418                 case 8: // pil
3419                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3420                                      offsetof(CPUSPARCState, psrpil));
3421                     break;
3422                 case 9: // cwp
3423                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
3424                     break;
3425                 case 10: // cansave
3426                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3427                                      offsetof(CPUSPARCState, cansave));
3428                     break;
3429                 case 11: // canrestore
3430                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3431                                      offsetof(CPUSPARCState, canrestore));
3432                     break;
3433                 case 12: // cleanwin
3434                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3435                                      offsetof(CPUSPARCState, cleanwin));
3436                     break;
3437                 case 13: // otherwin
3438                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3439                                      offsetof(CPUSPARCState, otherwin));
3440                     break;
3441                 case 14: // wstate
3442                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3443                                      offsetof(CPUSPARCState, wstate));
3444                     break;
3445                 case 16: // UA2005 gl
3446                     CHECK_IU_FEATURE(dc, GL);
3447                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3448                                      offsetof(CPUSPARCState, gl));
3449                     break;
3450                 case 26: // UA2005 strand status
3451                     CHECK_IU_FEATURE(dc, HYPV);
3452                     if (!hypervisor(dc))
3453                         goto priv_insn;
3454                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3455                     break;
3456                 case 31: // ver
3457                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3458                     break;
3459                 case 15: // fq
3460                 default:
3461                     goto illegal_insn;
3462                 }
3463 #else
3464                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3465 #endif
3466                 gen_store_gpr(dc, rd, cpu_tmp0);
3467                 break;
3468 #endif
3469 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3470             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3471 #ifdef TARGET_SPARC64
3472                 gen_helper_flushw(cpu_env);
3473 #else
3474                 if (!supervisor(dc))
3475                     goto priv_insn;
3476                 gen_store_gpr(dc, rd, cpu_tbr);
3477 #endif
3478                 break;
3479 #endif
3480             } else if (xop == 0x34) {   /* FPU Operations */
3481                 if (gen_trap_ifnofpu(dc)) {
3482                     goto jmp_insn;
3483                 }
3484                 gen_op_clear_ieee_excp_and_FTT();
3485                 rs1 = GET_FIELD(insn, 13, 17);
3486                 rs2 = GET_FIELD(insn, 27, 31);
3487                 xop = GET_FIELD(insn, 18, 26);
3488 
3489                 switch (xop) {
3490                 case 0x1: /* fmovs */
3491                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3492                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3493                     break;
3494                 case 0x5: /* fnegs */
3495                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3496                     break;
3497                 case 0x9: /* fabss */
3498                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3499                     break;
3500                 case 0x29: /* fsqrts */
3501                     CHECK_FPU_FEATURE(dc, FSQRT);
3502                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3503                     break;
3504                 case 0x2a: /* fsqrtd */
3505                     CHECK_FPU_FEATURE(dc, FSQRT);
3506                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3507                     break;
3508                 case 0x2b: /* fsqrtq */
3509                     CHECK_FPU_FEATURE(dc, FLOAT128);
3510                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3511                     break;
3512                 case 0x41: /* fadds */
3513                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3514                     break;
3515                 case 0x42: /* faddd */
3516                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3517                     break;
3518                 case 0x43: /* faddq */
3519                     CHECK_FPU_FEATURE(dc, FLOAT128);
3520                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3521                     break;
3522                 case 0x45: /* fsubs */
3523                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3524                     break;
3525                 case 0x46: /* fsubd */
3526                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3527                     break;
3528                 case 0x47: /* fsubq */
3529                     CHECK_FPU_FEATURE(dc, FLOAT128);
3530                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3531                     break;
3532                 case 0x49: /* fmuls */
3533                     CHECK_FPU_FEATURE(dc, FMUL);
3534                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3535                     break;
3536                 case 0x4a: /* fmuld */
3537                     CHECK_FPU_FEATURE(dc, FMUL);
3538                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3539                     break;
3540                 case 0x4b: /* fmulq */
3541                     CHECK_FPU_FEATURE(dc, FLOAT128);
3542                     CHECK_FPU_FEATURE(dc, FMUL);
3543                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3544                     break;
3545                 case 0x4d: /* fdivs */
3546                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3547                     break;
3548                 case 0x4e: /* fdivd */
3549                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3550                     break;
3551                 case 0x4f: /* fdivq */
3552                     CHECK_FPU_FEATURE(dc, FLOAT128);
3553                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3554                     break;
3555                 case 0x69: /* fsmuld */
3556                     CHECK_FPU_FEATURE(dc, FSMULD);
3557                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3558                     break;
3559                 case 0x6e: /* fdmulq */
3560                     CHECK_FPU_FEATURE(dc, FLOAT128);
3561                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3562                     break;
3563                 case 0xc4: /* fitos */
3564                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3565                     break;
3566                 case 0xc6: /* fdtos */
3567                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3568                     break;
3569                 case 0xc7: /* fqtos */
3570                     CHECK_FPU_FEATURE(dc, FLOAT128);
3571                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3572                     break;
3573                 case 0xc8: /* fitod */
3574                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3575                     break;
3576                 case 0xc9: /* fstod */
3577                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3578                     break;
3579                 case 0xcb: /* fqtod */
3580                     CHECK_FPU_FEATURE(dc, FLOAT128);
3581                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3582                     break;
3583                 case 0xcc: /* fitoq */
3584                     CHECK_FPU_FEATURE(dc, FLOAT128);
3585                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3586                     break;
3587                 case 0xcd: /* fstoq */
3588                     CHECK_FPU_FEATURE(dc, FLOAT128);
3589                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3590                     break;
3591                 case 0xce: /* fdtoq */
3592                     CHECK_FPU_FEATURE(dc, FLOAT128);
3593                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3594                     break;
3595                 case 0xd1: /* fstoi */
3596                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3597                     break;
3598                 case 0xd2: /* fdtoi */
3599                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3600                     break;
3601                 case 0xd3: /* fqtoi */
3602                     CHECK_FPU_FEATURE(dc, FLOAT128);
3603                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3604                     break;
3605 #ifdef TARGET_SPARC64
3606                 case 0x2: /* V9 fmovd */
3607                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3608                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3609                     break;
3610                 case 0x3: /* V9 fmovq */
3611                     CHECK_FPU_FEATURE(dc, FLOAT128);
3612                     gen_move_Q(dc, rd, rs2);
3613                     break;
3614                 case 0x6: /* V9 fnegd */
3615                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3616                     break;
3617                 case 0x7: /* V9 fnegq */
3618                     CHECK_FPU_FEATURE(dc, FLOAT128);
3619                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3620                     break;
3621                 case 0xa: /* V9 fabsd */
3622                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3623                     break;
3624                 case 0xb: /* V9 fabsq */
3625                     CHECK_FPU_FEATURE(dc, FLOAT128);
3626                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3627                     break;
3628                 case 0x81: /* V9 fstox */
3629                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3630                     break;
3631                 case 0x82: /* V9 fdtox */
3632                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3633                     break;
3634                 case 0x83: /* V9 fqtox */
3635                     CHECK_FPU_FEATURE(dc, FLOAT128);
3636                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3637                     break;
3638                 case 0x84: /* V9 fxtos */
3639                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3640                     break;
3641                 case 0x88: /* V9 fxtod */
3642                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3643                     break;
3644                 case 0x8c: /* V9 fxtoq */
3645                     CHECK_FPU_FEATURE(dc, FLOAT128);
3646                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3647                     break;
3648 #endif
3649                 default:
3650                     goto illegal_insn;
3651                 }
3652             } else if (xop == 0x35) {   /* FPU Operations */
3653 #ifdef TARGET_SPARC64
3654                 int cond;
3655 #endif
3656                 if (gen_trap_ifnofpu(dc)) {
3657                     goto jmp_insn;
3658                 }
3659                 gen_op_clear_ieee_excp_and_FTT();
3660                 rs1 = GET_FIELD(insn, 13, 17);
3661                 rs2 = GET_FIELD(insn, 27, 31);
3662                 xop = GET_FIELD(insn, 18, 26);
3663 
3664 #ifdef TARGET_SPARC64
3665 #define FMOVR(sz)                                                  \
3666                 do {                                               \
3667                     DisasCompare cmp;                              \
3668                     cond = GET_FIELD_SP(insn, 10, 12);             \
3669                     cpu_src1 = get_src1(dc, insn);                 \
3670                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3671                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3672                 } while (0)
3673 
3674                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3675                     FMOVR(s);
3676                     break;
3677                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3678                     FMOVR(d);
3679                     break;
3680                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3681                     CHECK_FPU_FEATURE(dc, FLOAT128);
3682                     FMOVR(q);
3683                     break;
3684                 }
3685 #undef FMOVR
3686 #endif
3687                 switch (xop) {
3688 #ifdef TARGET_SPARC64
3689 #define FMOVCC(fcc, sz)                                                 \
3690                     do {                                                \
3691                         DisasCompare cmp;                               \
3692                         cond = GET_FIELD_SP(insn, 14, 17);              \
3693                         gen_fcompare(&cmp, fcc, cond);                  \
3694                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3695                     } while (0)
3696 
3697                     case 0x001: /* V9 fmovscc %fcc0 */
3698                         FMOVCC(0, s);
3699                         break;
3700                     case 0x002: /* V9 fmovdcc %fcc0 */
3701                         FMOVCC(0, d);
3702                         break;
3703                     case 0x003: /* V9 fmovqcc %fcc0 */
3704                         CHECK_FPU_FEATURE(dc, FLOAT128);
3705                         FMOVCC(0, q);
3706                         break;
3707                     case 0x041: /* V9 fmovscc %fcc1 */
3708                         FMOVCC(1, s);
3709                         break;
3710                     case 0x042: /* V9 fmovdcc %fcc1 */
3711                         FMOVCC(1, d);
3712                         break;
3713                     case 0x043: /* V9 fmovqcc %fcc1 */
3714                         CHECK_FPU_FEATURE(dc, FLOAT128);
3715                         FMOVCC(1, q);
3716                         break;
3717                     case 0x081: /* V9 fmovscc %fcc2 */
3718                         FMOVCC(2, s);
3719                         break;
3720                     case 0x082: /* V9 fmovdcc %fcc2 */
3721                         FMOVCC(2, d);
3722                         break;
3723                     case 0x083: /* V9 fmovqcc %fcc2 */
3724                         CHECK_FPU_FEATURE(dc, FLOAT128);
3725                         FMOVCC(2, q);
3726                         break;
3727                     case 0x0c1: /* V9 fmovscc %fcc3 */
3728                         FMOVCC(3, s);
3729                         break;
3730                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3731                         FMOVCC(3, d);
3732                         break;
3733                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3734                         CHECK_FPU_FEATURE(dc, FLOAT128);
3735                         FMOVCC(3, q);
3736                         break;
3737 #undef FMOVCC
3738 #define FMOVCC(xcc, sz)                                                 \
3739                     do {                                                \
3740                         DisasCompare cmp;                               \
3741                         cond = GET_FIELD_SP(insn, 14, 17);              \
3742                         gen_compare(&cmp, xcc, cond, dc);               \
3743                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3744                     } while (0)
3745 
3746                     case 0x101: /* V9 fmovscc %icc */
3747                         FMOVCC(0, s);
3748                         break;
3749                     case 0x102: /* V9 fmovdcc %icc */
3750                         FMOVCC(0, d);
3751                         break;
3752                     case 0x103: /* V9 fmovqcc %icc */
3753                         CHECK_FPU_FEATURE(dc, FLOAT128);
3754                         FMOVCC(0, q);
3755                         break;
3756                     case 0x181: /* V9 fmovscc %xcc */
3757                         FMOVCC(1, s);
3758                         break;
3759                     case 0x182: /* V9 fmovdcc %xcc */
3760                         FMOVCC(1, d);
3761                         break;
3762                     case 0x183: /* V9 fmovqcc %xcc */
3763                         CHECK_FPU_FEATURE(dc, FLOAT128);
3764                         FMOVCC(1, q);
3765                         break;
3766 #undef FMOVCC
3767 #endif
3768                     case 0x51: /* fcmps, V9 %fcc */
3769                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3770                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3771                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3772                         break;
3773                     case 0x52: /* fcmpd, V9 %fcc */
3774                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3775                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3776                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3777                         break;
3778                     case 0x53: /* fcmpq, V9 %fcc */
3779                         CHECK_FPU_FEATURE(dc, FLOAT128);
3780                         gen_op_load_fpr_QT0(QFPREG(rs1));
3781                         gen_op_load_fpr_QT1(QFPREG(rs2));
3782                         gen_op_fcmpq(rd & 3);
3783                         break;
3784                     case 0x55: /* fcmpes, V9 %fcc */
3785                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3786                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3787                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3788                         break;
3789                     case 0x56: /* fcmped, V9 %fcc */
3790                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3791                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3792                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3793                         break;
3794                     case 0x57: /* fcmpeq, V9 %fcc */
3795                         CHECK_FPU_FEATURE(dc, FLOAT128);
3796                         gen_op_load_fpr_QT0(QFPREG(rs1));
3797                         gen_op_load_fpr_QT1(QFPREG(rs2));
3798                         gen_op_fcmpeq(rd & 3);
3799                         break;
3800                     default:
3801                         goto illegal_insn;
3802                 }
3803             } else if (xop == 0x2) {
3804                 TCGv dst = gen_dest_gpr(dc, rd);
3805                 rs1 = GET_FIELD(insn, 13, 17);
3806                 if (rs1 == 0) {
3807                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3808                     if (IS_IMM) {       /* immediate */
3809                         simm = GET_FIELDs(insn, 19, 31);
3810                         tcg_gen_movi_tl(dst, simm);
3811                         gen_store_gpr(dc, rd, dst);
3812                     } else {            /* register */
3813                         rs2 = GET_FIELD(insn, 27, 31);
3814                         if (rs2 == 0) {
3815                             tcg_gen_movi_tl(dst, 0);
3816                             gen_store_gpr(dc, rd, dst);
3817                         } else {
3818                             cpu_src2 = gen_load_gpr(dc, rs2);
3819                             gen_store_gpr(dc, rd, cpu_src2);
3820                         }
3821                     }
3822                 } else {
3823                     cpu_src1 = get_src1(dc, insn);
3824                     if (IS_IMM) {       /* immediate */
3825                         simm = GET_FIELDs(insn, 19, 31);
3826                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3827                         gen_store_gpr(dc, rd, dst);
3828                     } else {            /* register */
3829                         rs2 = GET_FIELD(insn, 27, 31);
3830                         if (rs2 == 0) {
3831                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3832                             gen_store_gpr(dc, rd, cpu_src1);
3833                         } else {
3834                             cpu_src2 = gen_load_gpr(dc, rs2);
3835                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3836                             gen_store_gpr(dc, rd, dst);
3837                         }
3838                     }
3839                 }
3840 #ifdef TARGET_SPARC64
3841             } else if (xop == 0x25) { /* sll, V9 sllx */
3842                 cpu_src1 = get_src1(dc, insn);
3843                 if (IS_IMM) {   /* immediate */
3844                     simm = GET_FIELDs(insn, 20, 31);
3845                     if (insn & (1 << 12)) {
3846                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3847                     } else {
3848                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3849                     }
3850                 } else {                /* register */
3851                     rs2 = GET_FIELD(insn, 27, 31);
3852                     cpu_src2 = gen_load_gpr(dc, rs2);
3853                     cpu_tmp0 = tcg_temp_new();
3854                     if (insn & (1 << 12)) {
3855                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3856                     } else {
3857                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3858                     }
3859                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3860                 }
3861                 gen_store_gpr(dc, rd, cpu_dst);
3862             } else if (xop == 0x26) { /* srl, V9 srlx */
3863                 cpu_src1 = get_src1(dc, insn);
3864                 if (IS_IMM) {   /* immediate */
3865                     simm = GET_FIELDs(insn, 20, 31);
3866                     if (insn & (1 << 12)) {
3867                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3868                     } else {
3869                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3870                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3871                     }
3872                 } else {                /* register */
3873                     rs2 = GET_FIELD(insn, 27, 31);
3874                     cpu_src2 = gen_load_gpr(dc, rs2);
3875                     cpu_tmp0 = tcg_temp_new();
3876                     if (insn & (1 << 12)) {
3877                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3878                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3879                     } else {
3880                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3881                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3882                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3883                     }
3884                 }
3885                 gen_store_gpr(dc, rd, cpu_dst);
3886             } else if (xop == 0x27) { /* sra, V9 srax */
3887                 cpu_src1 = get_src1(dc, insn);
3888                 if (IS_IMM) {   /* immediate */
3889                     simm = GET_FIELDs(insn, 20, 31);
3890                     if (insn & (1 << 12)) {
3891                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3892                     } else {
3893                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3894                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3895                     }
3896                 } else {                /* register */
3897                     rs2 = GET_FIELD(insn, 27, 31);
3898                     cpu_src2 = gen_load_gpr(dc, rs2);
3899                     cpu_tmp0 = tcg_temp_new();
3900                     if (insn & (1 << 12)) {
3901                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3902                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3903                     } else {
3904                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3905                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3906                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3907                     }
3908                 }
3909                 gen_store_gpr(dc, rd, cpu_dst);
3910 #endif
3911             } else if (xop < 0x36) {
3912                 if (xop < 0x20) {
3913                     cpu_src1 = get_src1(dc, insn);
3914                     cpu_src2 = get_src2(dc, insn);
3915                     switch (xop & ~0x10) {
3916                     case 0x0: /* add */
3917                         if (xop & 0x10) {
3918                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3919                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3920                             dc->cc_op = CC_OP_ADD;
3921                         } else {
3922                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3923                         }
3924                         break;
3925                     case 0x1: /* and */
3926                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3927                         if (xop & 0x10) {
3928                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3929                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3930                             dc->cc_op = CC_OP_LOGIC;
3931                         }
3932                         break;
3933                     case 0x2: /* or */
3934                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3935                         if (xop & 0x10) {
3936                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3937                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3938                             dc->cc_op = CC_OP_LOGIC;
3939                         }
3940                         break;
3941                     case 0x3: /* xor */
3942                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3943                         if (xop & 0x10) {
3944                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3945                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3946                             dc->cc_op = CC_OP_LOGIC;
3947                         }
3948                         break;
3949                     case 0x4: /* sub */
3950                         if (xop & 0x10) {
3951                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3952                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3953                             dc->cc_op = CC_OP_SUB;
3954                         } else {
3955                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3956                         }
3957                         break;
3958                     case 0x5: /* andn */
3959                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3960                         if (xop & 0x10) {
3961                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3962                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3963                             dc->cc_op = CC_OP_LOGIC;
3964                         }
3965                         break;
3966                     case 0x6: /* orn */
3967                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3968                         if (xop & 0x10) {
3969                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3970                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3971                             dc->cc_op = CC_OP_LOGIC;
3972                         }
3973                         break;
3974                     case 0x7: /* xorn */
3975                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3976                         if (xop & 0x10) {
3977                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3978                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3979                             dc->cc_op = CC_OP_LOGIC;
3980                         }
3981                         break;
3982                     case 0x8: /* addx, V9 addc */
3983                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3984                                         (xop & 0x10));
3985                         break;
3986 #ifdef TARGET_SPARC64
3987                     case 0x9: /* V9 mulx */
3988                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3989                         break;
3990 #endif
3991                     case 0xa: /* umul */
3992                         CHECK_IU_FEATURE(dc, MUL);
3993                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3994                         if (xop & 0x10) {
3995                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3996                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3997                             dc->cc_op = CC_OP_LOGIC;
3998                         }
3999                         break;
4000                     case 0xb: /* smul */
4001                         CHECK_IU_FEATURE(dc, MUL);
4002                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4003                         if (xop & 0x10) {
4004                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4005                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4006                             dc->cc_op = CC_OP_LOGIC;
4007                         }
4008                         break;
4009                     case 0xc: /* subx, V9 subc */
4010                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4011                                         (xop & 0x10));
4012                         break;
4013 #ifdef TARGET_SPARC64
4014                     case 0xd: /* V9 udivx */
4015                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4016                         break;
4017 #endif
4018                     case 0xe: /* udiv */
4019                         CHECK_IU_FEATURE(dc, DIV);
4020                         if (xop & 0x10) {
4021                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4022                                                cpu_src2);
4023                             dc->cc_op = CC_OP_DIV;
4024                         } else {
4025                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4026                                             cpu_src2);
4027                         }
4028                         break;
4029                     case 0xf: /* sdiv */
4030                         CHECK_IU_FEATURE(dc, DIV);
4031                         if (xop & 0x10) {
4032                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4033                                                cpu_src2);
4034                             dc->cc_op = CC_OP_DIV;
4035                         } else {
4036                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4037                                             cpu_src2);
4038                         }
4039                         break;
4040                     default:
4041                         goto illegal_insn;
4042                     }
4043                     gen_store_gpr(dc, rd, cpu_dst);
4044                 } else {
4045                     cpu_src1 = get_src1(dc, insn);
4046                     cpu_src2 = get_src2(dc, insn);
4047                     switch (xop) {
4048                     case 0x20: /* taddcc */
4049                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4050                         gen_store_gpr(dc, rd, cpu_dst);
4051                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4052                         dc->cc_op = CC_OP_TADD;
4053                         break;
4054                     case 0x21: /* tsubcc */
4055                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4056                         gen_store_gpr(dc, rd, cpu_dst);
4057                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4058                         dc->cc_op = CC_OP_TSUB;
4059                         break;
4060                     case 0x22: /* taddcctv */
4061                         gen_helper_taddcctv(cpu_dst, cpu_env,
4062                                             cpu_src1, cpu_src2);
4063                         gen_store_gpr(dc, rd, cpu_dst);
4064                         dc->cc_op = CC_OP_TADDTV;
4065                         break;
4066                     case 0x23: /* tsubcctv */
4067                         gen_helper_tsubcctv(cpu_dst, cpu_env,
4068                                             cpu_src1, cpu_src2);
4069                         gen_store_gpr(dc, rd, cpu_dst);
4070                         dc->cc_op = CC_OP_TSUBTV;
4071                         break;
4072                     case 0x24: /* mulscc */
4073                         update_psr(dc);
4074                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4075                         gen_store_gpr(dc, rd, cpu_dst);
4076                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4077                         dc->cc_op = CC_OP_ADD;
4078                         break;
4079 #ifndef TARGET_SPARC64
4080                     case 0x25:  /* sll */
4081                         if (IS_IMM) { /* immediate */
4082                             simm = GET_FIELDs(insn, 20, 31);
4083                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4084                         } else { /* register */
4085                             cpu_tmp0 = tcg_temp_new();
4086                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4087                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4088                         }
4089                         gen_store_gpr(dc, rd, cpu_dst);
4090                         break;
4091                     case 0x26:  /* srl */
4092                         if (IS_IMM) { /* immediate */
4093                             simm = GET_FIELDs(insn, 20, 31);
4094                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4095                         } else { /* register */
4096                             cpu_tmp0 = tcg_temp_new();
4097                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4098                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4099                         }
4100                         gen_store_gpr(dc, rd, cpu_dst);
4101                         break;
4102                     case 0x27:  /* sra */
4103                         if (IS_IMM) { /* immediate */
4104                             simm = GET_FIELDs(insn, 20, 31);
4105                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4106                         } else { /* register */
4107                             cpu_tmp0 = tcg_temp_new();
4108                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4109                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4110                         }
4111                         gen_store_gpr(dc, rd, cpu_dst);
4112                         break;
4113 #endif
4114                     case 0x30:
4115                         {
4116                             cpu_tmp0 = tcg_temp_new();
4117                             switch(rd) {
4118                             case 0: /* wry */
4119                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4120                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4121                                 break;
4122 #ifndef TARGET_SPARC64
4123                             case 0x01 ... 0x0f: /* undefined in the
4124                                                    SPARCv8 manual, nop
4125                                                    on the microSPARC
4126                                                    II */
4127                             case 0x10 ... 0x1f: /* implementation-dependent
4128                                                    in the SPARCv8
4129                                                    manual, nop on the
4130                                                    microSPARC II */
4131                                 if ((rd == 0x13) && (dc->def->features &
4132                                                      CPU_FEATURE_POWERDOWN)) {
4133                                     /* LEON3 power-down */
4134                                     save_state(dc);
4135                                     gen_helper_power_down(cpu_env);
4136                                 }
4137                                 break;
4138 #else
4139                             case 0x2: /* V9 wrccr */
4140                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4141                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
4142                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4143                                 dc->cc_op = CC_OP_FLAGS;
4144                                 break;
4145                             case 0x3: /* V9 wrasi */
4146                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4147                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4148                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4149                                                 offsetof(CPUSPARCState, asi));
4150                                 /*
4151                                  * End TB to notice changed ASI.
4152                                  * TODO: Could notice src1 = %g0 and IS_IMM,
4153                                  * update DisasContext and not exit the TB.
4154                                  */
4155                                 save_state(dc);
4156                                 gen_op_next_insn();
4157                                 tcg_gen_lookup_and_goto_ptr();
4158                                 dc->base.is_jmp = DISAS_NORETURN;
4159                                 break;
4160                             case 0x6: /* V9 wrfprs */
4161                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4162                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4163                                 dc->fprs_dirty = 0;
4164                                 save_state(dc);
4165                                 gen_op_next_insn();
4166                                 tcg_gen_exit_tb(NULL, 0);
4167                                 dc->base.is_jmp = DISAS_NORETURN;
4168                                 break;
4169                             case 0xf: /* V9 sir, nop if user */
4170 #if !defined(CONFIG_USER_ONLY)
4171                                 if (supervisor(dc)) {
4172                                     ; // XXX
4173                                 }
4174 #endif
4175                                 break;
4176                             case 0x13: /* Graphics Status */
4177                                 if (gen_trap_ifnofpu(dc)) {
4178                                     goto jmp_insn;
4179                                 }
4180                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4181                                 break;
4182                             case 0x14: /* Softint set */
4183                                 if (!supervisor(dc))
4184                                     goto illegal_insn;
4185                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4186                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
4187                                 break;
4188                             case 0x15: /* Softint clear */
4189                                 if (!supervisor(dc))
4190                                     goto illegal_insn;
4191                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4192                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
4193                                 break;
4194                             case 0x16: /* Softint write */
4195                                 if (!supervisor(dc))
4196                                     goto illegal_insn;
4197                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4198                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
4199                                 break;
4200                             case 0x17: /* Tick compare */
4201 #if !defined(CONFIG_USER_ONLY)
4202                                 if (!supervisor(dc))
4203                                     goto illegal_insn;
4204 #endif
4205                                 {
4206                                     TCGv_ptr r_tickptr;
4207 
4208                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4209                                                    cpu_src2);
4210                                     r_tickptr = tcg_temp_new_ptr();
4211                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4212                                                    offsetof(CPUSPARCState, tick));
4213                                     translator_io_start(&dc->base);
4214                                     gen_helper_tick_set_limit(r_tickptr,
4215                                                               cpu_tick_cmpr);
4216                                     /* End TB to handle timer interrupt */
4217                                     dc->base.is_jmp = DISAS_EXIT;
4218                                 }
4219                                 break;
4220                             case 0x18: /* System tick */
4221 #if !defined(CONFIG_USER_ONLY)
4222                                 if (!supervisor(dc))
4223                                     goto illegal_insn;
4224 #endif
4225                                 {
4226                                     TCGv_ptr r_tickptr;
4227 
4228                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4229                                                    cpu_src2);
4230                                     r_tickptr = tcg_temp_new_ptr();
4231                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4232                                                    offsetof(CPUSPARCState, stick));
4233                                     translator_io_start(&dc->base);
4234                                     gen_helper_tick_set_count(r_tickptr,
4235                                                               cpu_tmp0);
4236                                     /* End TB to handle timer interrupt */
4237                                     dc->base.is_jmp = DISAS_EXIT;
4238                                 }
4239                                 break;
4240                             case 0x19: /* System tick compare */
4241 #if !defined(CONFIG_USER_ONLY)
4242                                 if (!supervisor(dc))
4243                                     goto illegal_insn;
4244 #endif
4245                                 {
4246                                     TCGv_ptr r_tickptr;
4247 
4248                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4249                                                    cpu_src2);
4250                                     r_tickptr = tcg_temp_new_ptr();
4251                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4252                                                    offsetof(CPUSPARCState, stick));
4253                                     translator_io_start(&dc->base);
4254                                     gen_helper_tick_set_limit(r_tickptr,
4255                                                               cpu_stick_cmpr);
4256                                     /* End TB to handle timer interrupt */
4257                                     dc->base.is_jmp = DISAS_EXIT;
4258                                 }
4259                                 break;
4260 
4261                             case 0x10: /* Performance Control */
4262                             case 0x11: /* Performance Instrumentation
4263                                           Counter */
4264                             case 0x12: /* Dispatch Control */
4265 #endif
4266                             default:
4267                                 goto illegal_insn;
4268                             }
4269                         }
4270                         break;
4271 #if !defined(CONFIG_USER_ONLY)
4272                     case 0x31: /* wrpsr, V9 saved, restored */
4273                         {
4274                             if (!supervisor(dc))
4275                                 goto priv_insn;
4276 #ifdef TARGET_SPARC64
4277                             switch (rd) {
4278                             case 0:
4279                                 gen_helper_saved(cpu_env);
4280                                 break;
4281                             case 1:
4282                                 gen_helper_restored(cpu_env);
4283                                 break;
4284                             case 2: /* UA2005 allclean */
4285                             case 3: /* UA2005 otherw */
4286                             case 4: /* UA2005 normalw */
4287                             case 5: /* UA2005 invalw */
4288                                 // XXX
4289                             default:
4290                                 goto illegal_insn;
4291                             }
4292 #else
4293                             cpu_tmp0 = tcg_temp_new();
4294                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4295                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
4296                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4297                             dc->cc_op = CC_OP_FLAGS;
4298                             save_state(dc);
4299                             gen_op_next_insn();
4300                             tcg_gen_exit_tb(NULL, 0);
4301                             dc->base.is_jmp = DISAS_NORETURN;
4302 #endif
4303                         }
4304                         break;
4305                     case 0x32: /* wrwim, V9 wrpr */
4306                         {
4307                             if (!supervisor(dc))
4308                                 goto priv_insn;
4309                             cpu_tmp0 = tcg_temp_new();
4310                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4311 #ifdef TARGET_SPARC64
4312                             switch (rd) {
4313                             case 0: // tpc
4314                                 {
4315                                     TCGv_ptr r_tsptr;
4316 
4317                                     r_tsptr = tcg_temp_new_ptr();
4318                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4319                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4320                                                   offsetof(trap_state, tpc));
4321                                 }
4322                                 break;
4323                             case 1: // tnpc
4324                                 {
4325                                     TCGv_ptr r_tsptr;
4326 
4327                                     r_tsptr = tcg_temp_new_ptr();
4328                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4329                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4330                                                   offsetof(trap_state, tnpc));
4331                                 }
4332                                 break;
4333                             case 2: // tstate
4334                                 {
4335                                     TCGv_ptr r_tsptr;
4336 
4337                                     r_tsptr = tcg_temp_new_ptr();
4338                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4339                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4340                                                   offsetof(trap_state,
4341                                                            tstate));
4342                                 }
4343                                 break;
4344                             case 3: // tt
4345                                 {
4346                                     TCGv_ptr r_tsptr;
4347 
4348                                     r_tsptr = tcg_temp_new_ptr();
4349                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4350                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4351                                                     offsetof(trap_state, tt));
4352                                 }
4353                                 break;
4354                             case 4: // tick
4355                                 {
4356                                     TCGv_ptr r_tickptr;
4357 
4358                                     r_tickptr = tcg_temp_new_ptr();
4359                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4360                                                    offsetof(CPUSPARCState, tick));
4361                                     translator_io_start(&dc->base);
4362                                     gen_helper_tick_set_count(r_tickptr,
4363                                                               cpu_tmp0);
4364                                     /* End TB to handle timer interrupt */
4365                                     dc->base.is_jmp = DISAS_EXIT;
4366                                 }
4367                                 break;
4368                             case 5: // tba
4369                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4370                                 break;
4371                             case 6: // pstate
4372                                 save_state(dc);
4373                                 if (translator_io_start(&dc->base)) {
4374                                     dc->base.is_jmp = DISAS_EXIT;
4375                                 }
4376                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
4377                                 dc->npc = DYNAMIC_PC;
4378                                 break;
4379                             case 7: // tl
4380                                 save_state(dc);
4381                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4382                                                offsetof(CPUSPARCState, tl));
4383                                 dc->npc = DYNAMIC_PC;
4384                                 break;
4385                             case 8: // pil
4386                                 if (translator_io_start(&dc->base)) {
4387                                     dc->base.is_jmp = DISAS_EXIT;
4388                                 }
4389                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
4390                                 break;
4391                             case 9: // cwp
4392                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
4393                                 break;
4394                             case 10: // cansave
4395                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4396                                                 offsetof(CPUSPARCState,
4397                                                          cansave));
4398                                 break;
4399                             case 11: // canrestore
4400                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4401                                                 offsetof(CPUSPARCState,
4402                                                          canrestore));
4403                                 break;
4404                             case 12: // cleanwin
4405                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4406                                                 offsetof(CPUSPARCState,
4407                                                          cleanwin));
4408                                 break;
4409                             case 13: // otherwin
4410                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4411                                                 offsetof(CPUSPARCState,
4412                                                          otherwin));
4413                                 break;
4414                             case 14: // wstate
4415                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4416                                                 offsetof(CPUSPARCState,
4417                                                          wstate));
4418                                 break;
4419                             case 16: // UA2005 gl
4420                                 CHECK_IU_FEATURE(dc, GL);
4421                                 gen_helper_wrgl(cpu_env, cpu_tmp0);
4422                                 break;
4423                             case 26: // UA2005 strand status
4424                                 CHECK_IU_FEATURE(dc, HYPV);
4425                                 if (!hypervisor(dc))
4426                                     goto priv_insn;
4427                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4428                                 break;
4429                             default:
4430                                 goto illegal_insn;
4431                             }
4432 #else
4433                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4434                             if (dc->def->nwindows != 32) {
4435                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4436                                                 (1 << dc->def->nwindows) - 1);
4437                             }
4438 #endif
4439                         }
4440                         break;
4441                     case 0x33: /* wrtbr, UA2005 wrhpr */
4442                         {
4443 #ifndef TARGET_SPARC64
4444                             if (!supervisor(dc))
4445                                 goto priv_insn;
4446                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4447 #else
4448                             CHECK_IU_FEATURE(dc, HYPV);
4449                             if (!hypervisor(dc))
4450                                 goto priv_insn;
4451                             cpu_tmp0 = tcg_temp_new();
4452                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4453                             switch (rd) {
4454                             case 0: // hpstate
4455                                 tcg_gen_st_i64(cpu_tmp0, cpu_env,
4456                                                offsetof(CPUSPARCState,
4457                                                         hpstate));
4458                                 save_state(dc);
4459                                 gen_op_next_insn();
4460                                 tcg_gen_exit_tb(NULL, 0);
4461                                 dc->base.is_jmp = DISAS_NORETURN;
4462                                 break;
4463                             case 1: // htstate
4464                                 // XXX gen_op_wrhtstate();
4465                                 break;
4466                             case 3: // hintp
4467                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4468                                 break;
4469                             case 5: // htba
4470                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4471                                 break;
4472                             case 31: // hstick_cmpr
4473                                 {
4474                                     TCGv_ptr r_tickptr;
4475 
4476                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4477                                     r_tickptr = tcg_temp_new_ptr();
4478                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4479                                                    offsetof(CPUSPARCState, hstick));
4480                                     translator_io_start(&dc->base);
4481                                     gen_helper_tick_set_limit(r_tickptr,
4482                                                               cpu_hstick_cmpr);
4483                                     /* End TB to handle timer interrupt */
4484                                     dc->base.is_jmp = DISAS_EXIT;
4485                                 }
4486                                 break;
4487                             case 6: // hver readonly
4488                             default:
4489                                 goto illegal_insn;
4490                             }
4491 #endif
4492                         }
4493                         break;
4494 #endif
4495 #ifdef TARGET_SPARC64
4496                     case 0x2c: /* V9 movcc */
4497                         {
4498                             int cc = GET_FIELD_SP(insn, 11, 12);
4499                             int cond = GET_FIELD_SP(insn, 14, 17);
4500                             DisasCompare cmp;
4501                             TCGv dst;
4502 
4503                             if (insn & (1 << 18)) {
4504                                 if (cc == 0) {
4505                                     gen_compare(&cmp, 0, cond, dc);
4506                                 } else if (cc == 2) {
4507                                     gen_compare(&cmp, 1, cond, dc);
4508                                 } else {
4509                                     goto illegal_insn;
4510                                 }
4511                             } else {
4512                                 gen_fcompare(&cmp, cc, cond);
4513                             }
4514 
4515                             /* The get_src2 above loaded the normal 13-bit
4516                                immediate field, not the 11-bit field we have
4517                                in movcc.  But it did handle the reg case.  */
4518                             if (IS_IMM) {
4519                                 simm = GET_FIELD_SPs(insn, 0, 10);
4520                                 tcg_gen_movi_tl(cpu_src2, simm);
4521                             }
4522 
4523                             dst = gen_load_gpr(dc, rd);
4524                             tcg_gen_movcond_tl(cmp.cond, dst,
4525                                                cmp.c1, cmp.c2,
4526                                                cpu_src2, dst);
4527                             gen_store_gpr(dc, rd, dst);
4528                             break;
4529                         }
4530                     case 0x2d: /* V9 sdivx */
4531                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4532                         gen_store_gpr(dc, rd, cpu_dst);
4533                         break;
4534                     case 0x2e: /* V9 popc */
4535                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4536                         gen_store_gpr(dc, rd, cpu_dst);
4537                         break;
4538                     case 0x2f: /* V9 movr */
4539                         {
4540                             int cond = GET_FIELD_SP(insn, 10, 12);
4541                             DisasCompare cmp;
4542                             TCGv dst;
4543 
4544                             gen_compare_reg(&cmp, cond, cpu_src1);
4545 
4546                             /* The get_src2 above loaded the normal 13-bit
4547                                immediate field, not the 10-bit field we have
4548                                in movr.  But it did handle the reg case.  */
4549                             if (IS_IMM) {
4550                                 simm = GET_FIELD_SPs(insn, 0, 9);
4551                                 tcg_gen_movi_tl(cpu_src2, simm);
4552                             }
4553 
4554                             dst = gen_load_gpr(dc, rd);
4555                             tcg_gen_movcond_tl(cmp.cond, dst,
4556                                                cmp.c1, cmp.c2,
4557                                                cpu_src2, dst);
4558                             gen_store_gpr(dc, rd, dst);
4559                             break;
4560                         }
4561 #endif
4562                     default:
4563                         goto illegal_insn;
4564                     }
4565                 }
4566             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4567 #ifdef TARGET_SPARC64
4568                 int opf = GET_FIELD_SP(insn, 5, 13);
4569                 rs1 = GET_FIELD(insn, 13, 17);
4570                 rs2 = GET_FIELD(insn, 27, 31);
4571                 if (gen_trap_ifnofpu(dc)) {
4572                     goto jmp_insn;
4573                 }
4574 
4575                 switch (opf) {
4576                 case 0x000: /* VIS I edge8cc */
4577                     CHECK_FPU_FEATURE(dc, VIS1);
4578                     cpu_src1 = gen_load_gpr(dc, rs1);
4579                     cpu_src2 = gen_load_gpr(dc, rs2);
4580                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4581                     gen_store_gpr(dc, rd, cpu_dst);
4582                     break;
4583                 case 0x001: /* VIS II edge8n */
4584                     CHECK_FPU_FEATURE(dc, VIS2);
4585                     cpu_src1 = gen_load_gpr(dc, rs1);
4586                     cpu_src2 = gen_load_gpr(dc, rs2);
4587                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4588                     gen_store_gpr(dc, rd, cpu_dst);
4589                     break;
4590                 case 0x002: /* VIS I edge8lcc */
4591                     CHECK_FPU_FEATURE(dc, VIS1);
4592                     cpu_src1 = gen_load_gpr(dc, rs1);
4593                     cpu_src2 = gen_load_gpr(dc, rs2);
4594                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4595                     gen_store_gpr(dc, rd, cpu_dst);
4596                     break;
4597                 case 0x003: /* VIS II edge8ln */
4598                     CHECK_FPU_FEATURE(dc, VIS2);
4599                     cpu_src1 = gen_load_gpr(dc, rs1);
4600                     cpu_src2 = gen_load_gpr(dc, rs2);
4601                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4602                     gen_store_gpr(dc, rd, cpu_dst);
4603                     break;
4604                 case 0x004: /* VIS I edge16cc */
4605                     CHECK_FPU_FEATURE(dc, VIS1);
4606                     cpu_src1 = gen_load_gpr(dc, rs1);
4607                     cpu_src2 = gen_load_gpr(dc, rs2);
4608                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4609                     gen_store_gpr(dc, rd, cpu_dst);
4610                     break;
4611                 case 0x005: /* VIS II edge16n */
4612                     CHECK_FPU_FEATURE(dc, VIS2);
4613                     cpu_src1 = gen_load_gpr(dc, rs1);
4614                     cpu_src2 = gen_load_gpr(dc, rs2);
4615                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4616                     gen_store_gpr(dc, rd, cpu_dst);
4617                     break;
4618                 case 0x006: /* VIS I edge16lcc */
4619                     CHECK_FPU_FEATURE(dc, VIS1);
4620                     cpu_src1 = gen_load_gpr(dc, rs1);
4621                     cpu_src2 = gen_load_gpr(dc, rs2);
4622                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4623                     gen_store_gpr(dc, rd, cpu_dst);
4624                     break;
4625                 case 0x007: /* VIS II edge16ln */
4626                     CHECK_FPU_FEATURE(dc, VIS2);
4627                     cpu_src1 = gen_load_gpr(dc, rs1);
4628                     cpu_src2 = gen_load_gpr(dc, rs2);
4629                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4630                     gen_store_gpr(dc, rd, cpu_dst);
4631                     break;
4632                 case 0x008: /* VIS I edge32cc */
4633                     CHECK_FPU_FEATURE(dc, VIS1);
4634                     cpu_src1 = gen_load_gpr(dc, rs1);
4635                     cpu_src2 = gen_load_gpr(dc, rs2);
4636                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4637                     gen_store_gpr(dc, rd, cpu_dst);
4638                     break;
4639                 case 0x009: /* VIS II edge32n */
4640                     CHECK_FPU_FEATURE(dc, VIS2);
4641                     cpu_src1 = gen_load_gpr(dc, rs1);
4642                     cpu_src2 = gen_load_gpr(dc, rs2);
4643                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4644                     gen_store_gpr(dc, rd, cpu_dst);
4645                     break;
4646                 case 0x00a: /* VIS I edge32lcc */
4647                     CHECK_FPU_FEATURE(dc, VIS1);
4648                     cpu_src1 = gen_load_gpr(dc, rs1);
4649                     cpu_src2 = gen_load_gpr(dc, rs2);
4650                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4651                     gen_store_gpr(dc, rd, cpu_dst);
4652                     break;
4653                 case 0x00b: /* VIS II edge32ln */
4654                     CHECK_FPU_FEATURE(dc, VIS2);
4655                     cpu_src1 = gen_load_gpr(dc, rs1);
4656                     cpu_src2 = gen_load_gpr(dc, rs2);
4657                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4658                     gen_store_gpr(dc, rd, cpu_dst);
4659                     break;
4660                 case 0x010: /* VIS I array8 */
4661                     CHECK_FPU_FEATURE(dc, VIS1);
4662                     cpu_src1 = gen_load_gpr(dc, rs1);
4663                     cpu_src2 = gen_load_gpr(dc, rs2);
4664                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4665                     gen_store_gpr(dc, rd, cpu_dst);
4666                     break;
4667                 case 0x012: /* VIS I array16 */
4668                     CHECK_FPU_FEATURE(dc, VIS1);
4669                     cpu_src1 = gen_load_gpr(dc, rs1);
4670                     cpu_src2 = gen_load_gpr(dc, rs2);
4671                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4672                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4673                     gen_store_gpr(dc, rd, cpu_dst);
4674                     break;
4675                 case 0x014: /* VIS I array32 */
4676                     CHECK_FPU_FEATURE(dc, VIS1);
4677                     cpu_src1 = gen_load_gpr(dc, rs1);
4678                     cpu_src2 = gen_load_gpr(dc, rs2);
4679                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4680                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4681                     gen_store_gpr(dc, rd, cpu_dst);
4682                     break;
4683                 case 0x018: /* VIS I alignaddr */
4684                     CHECK_FPU_FEATURE(dc, VIS1);
4685                     cpu_src1 = gen_load_gpr(dc, rs1);
4686                     cpu_src2 = gen_load_gpr(dc, rs2);
4687                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4688                     gen_store_gpr(dc, rd, cpu_dst);
4689                     break;
4690                 case 0x01a: /* VIS I alignaddrl */
4691                     CHECK_FPU_FEATURE(dc, VIS1);
4692                     cpu_src1 = gen_load_gpr(dc, rs1);
4693                     cpu_src2 = gen_load_gpr(dc, rs2);
4694                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4695                     gen_store_gpr(dc, rd, cpu_dst);
4696                     break;
4697                 case 0x019: /* VIS II bmask */
4698                     CHECK_FPU_FEATURE(dc, VIS2);
4699                     cpu_src1 = gen_load_gpr(dc, rs1);
4700                     cpu_src2 = gen_load_gpr(dc, rs2);
4701                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4702                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4703                     gen_store_gpr(dc, rd, cpu_dst);
4704                     break;
4705                 case 0x020: /* VIS I fcmple16 */
4706                     CHECK_FPU_FEATURE(dc, VIS1);
4707                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4708                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4709                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4710                     gen_store_gpr(dc, rd, cpu_dst);
4711                     break;
4712                 case 0x022: /* VIS I fcmpne16 */
4713                     CHECK_FPU_FEATURE(dc, VIS1);
4714                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4715                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4716                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4717                     gen_store_gpr(dc, rd, cpu_dst);
4718                     break;
4719                 case 0x024: /* VIS I fcmple32 */
4720                     CHECK_FPU_FEATURE(dc, VIS1);
4721                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4722                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4723                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4724                     gen_store_gpr(dc, rd, cpu_dst);
4725                     break;
4726                 case 0x026: /* VIS I fcmpne32 */
4727                     CHECK_FPU_FEATURE(dc, VIS1);
4728                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4729                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4730                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4731                     gen_store_gpr(dc, rd, cpu_dst);
4732                     break;
4733                 case 0x028: /* VIS I fcmpgt16 */
4734                     CHECK_FPU_FEATURE(dc, VIS1);
4735                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4736                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4737                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4738                     gen_store_gpr(dc, rd, cpu_dst);
4739                     break;
4740                 case 0x02a: /* VIS I fcmpeq16 */
4741                     CHECK_FPU_FEATURE(dc, VIS1);
4742                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4743                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4744                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4745                     gen_store_gpr(dc, rd, cpu_dst);
4746                     break;
4747                 case 0x02c: /* VIS I fcmpgt32 */
4748                     CHECK_FPU_FEATURE(dc, VIS1);
4749                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4750                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4751                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4752                     gen_store_gpr(dc, rd, cpu_dst);
4753                     break;
4754                 case 0x02e: /* VIS I fcmpeq32 */
4755                     CHECK_FPU_FEATURE(dc, VIS1);
4756                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4757                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4758                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4759                     gen_store_gpr(dc, rd, cpu_dst);
4760                     break;
4761                 case 0x031: /* VIS I fmul8x16 */
4762                     CHECK_FPU_FEATURE(dc, VIS1);
4763                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4764                     break;
4765                 case 0x033: /* VIS I fmul8x16au */
4766                     CHECK_FPU_FEATURE(dc, VIS1);
4767                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4768                     break;
4769                 case 0x035: /* VIS I fmul8x16al */
4770                     CHECK_FPU_FEATURE(dc, VIS1);
4771                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4772                     break;
4773                 case 0x036: /* VIS I fmul8sux16 */
4774                     CHECK_FPU_FEATURE(dc, VIS1);
4775                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4776                     break;
4777                 case 0x037: /* VIS I fmul8ulx16 */
4778                     CHECK_FPU_FEATURE(dc, VIS1);
4779                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4780                     break;
4781                 case 0x038: /* VIS I fmuld8sux16 */
4782                     CHECK_FPU_FEATURE(dc, VIS1);
4783                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4784                     break;
4785                 case 0x039: /* VIS I fmuld8ulx16 */
4786                     CHECK_FPU_FEATURE(dc, VIS1);
4787                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4788                     break;
4789                 case 0x03a: /* VIS I fpack32 */
4790                     CHECK_FPU_FEATURE(dc, VIS1);
4791                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4792                     break;
4793                 case 0x03b: /* VIS I fpack16 */
4794                     CHECK_FPU_FEATURE(dc, VIS1);
4795                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4796                     cpu_dst_32 = gen_dest_fpr_F(dc);
4797                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4798                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4799                     break;
4800                 case 0x03d: /* VIS I fpackfix */
4801                     CHECK_FPU_FEATURE(dc, VIS1);
4802                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4803                     cpu_dst_32 = gen_dest_fpr_F(dc);
4804                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4805                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4806                     break;
4807                 case 0x03e: /* VIS I pdist */
4808                     CHECK_FPU_FEATURE(dc, VIS1);
4809                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4810                     break;
4811                 case 0x048: /* VIS I faligndata */
4812                     CHECK_FPU_FEATURE(dc, VIS1);
4813                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4814                     break;
4815                 case 0x04b: /* VIS I fpmerge */
4816                     CHECK_FPU_FEATURE(dc, VIS1);
4817                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4818                     break;
4819                 case 0x04c: /* VIS II bshuffle */
4820                     CHECK_FPU_FEATURE(dc, VIS2);
4821                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4822                     break;
4823                 case 0x04d: /* VIS I fexpand */
4824                     CHECK_FPU_FEATURE(dc, VIS1);
4825                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4826                     break;
4827                 case 0x050: /* VIS I fpadd16 */
4828                     CHECK_FPU_FEATURE(dc, VIS1);
4829                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4830                     break;
4831                 case 0x051: /* VIS I fpadd16s */
4832                     CHECK_FPU_FEATURE(dc, VIS1);
4833                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4834                     break;
4835                 case 0x052: /* VIS I fpadd32 */
4836                     CHECK_FPU_FEATURE(dc, VIS1);
4837                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4838                     break;
4839                 case 0x053: /* VIS I fpadd32s */
4840                     CHECK_FPU_FEATURE(dc, VIS1);
4841                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4842                     break;
4843                 case 0x054: /* VIS I fpsub16 */
4844                     CHECK_FPU_FEATURE(dc, VIS1);
4845                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4846                     break;
4847                 case 0x055: /* VIS I fpsub16s */
4848                     CHECK_FPU_FEATURE(dc, VIS1);
4849                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4850                     break;
4851                 case 0x056: /* VIS I fpsub32 */
4852                     CHECK_FPU_FEATURE(dc, VIS1);
4853                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4854                     break;
4855                 case 0x057: /* VIS I fpsub32s */
4856                     CHECK_FPU_FEATURE(dc, VIS1);
4857                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4858                     break;
4859                 case 0x060: /* VIS I fzero */
4860                     CHECK_FPU_FEATURE(dc, VIS1);
4861                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4862                     tcg_gen_movi_i64(cpu_dst_64, 0);
4863                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4864                     break;
4865                 case 0x061: /* VIS I fzeros */
4866                     CHECK_FPU_FEATURE(dc, VIS1);
4867                     cpu_dst_32 = gen_dest_fpr_F(dc);
4868                     tcg_gen_movi_i32(cpu_dst_32, 0);
4869                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4870                     break;
4871                 case 0x062: /* VIS I fnor */
4872                     CHECK_FPU_FEATURE(dc, VIS1);
4873                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4874                     break;
4875                 case 0x063: /* VIS I fnors */
4876                     CHECK_FPU_FEATURE(dc, VIS1);
4877                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4878                     break;
4879                 case 0x064: /* VIS I fandnot2 */
4880                     CHECK_FPU_FEATURE(dc, VIS1);
4881                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4882                     break;
4883                 case 0x065: /* VIS I fandnot2s */
4884                     CHECK_FPU_FEATURE(dc, VIS1);
4885                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4886                     break;
4887                 case 0x066: /* VIS I fnot2 */
4888                     CHECK_FPU_FEATURE(dc, VIS1);
4889                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4890                     break;
4891                 case 0x067: /* VIS I fnot2s */
4892                     CHECK_FPU_FEATURE(dc, VIS1);
4893                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4894                     break;
4895                 case 0x068: /* VIS I fandnot1 */
4896                     CHECK_FPU_FEATURE(dc, VIS1);
4897                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4898                     break;
4899                 case 0x069: /* VIS I fandnot1s */
4900                     CHECK_FPU_FEATURE(dc, VIS1);
4901                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4902                     break;
4903                 case 0x06a: /* VIS I fnot1 */
4904                     CHECK_FPU_FEATURE(dc, VIS1);
4905                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4906                     break;
4907                 case 0x06b: /* VIS I fnot1s */
4908                     CHECK_FPU_FEATURE(dc, VIS1);
4909                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4910                     break;
4911                 case 0x06c: /* VIS I fxor */
4912                     CHECK_FPU_FEATURE(dc, VIS1);
4913                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4914                     break;
4915                 case 0x06d: /* VIS I fxors */
4916                     CHECK_FPU_FEATURE(dc, VIS1);
4917                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4918                     break;
4919                 case 0x06e: /* VIS I fnand */
4920                     CHECK_FPU_FEATURE(dc, VIS1);
4921                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4922                     break;
4923                 case 0x06f: /* VIS I fnands */
4924                     CHECK_FPU_FEATURE(dc, VIS1);
4925                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4926                     break;
4927                 case 0x070: /* VIS I fand */
4928                     CHECK_FPU_FEATURE(dc, VIS1);
4929                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4930                     break;
4931                 case 0x071: /* VIS I fands */
4932                     CHECK_FPU_FEATURE(dc, VIS1);
4933                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4934                     break;
4935                 case 0x072: /* VIS I fxnor */
4936                     CHECK_FPU_FEATURE(dc, VIS1);
4937                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4938                     break;
4939                 case 0x073: /* VIS I fxnors */
4940                     CHECK_FPU_FEATURE(dc, VIS1);
4941                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4942                     break;
4943                 case 0x074: /* VIS I fsrc1 */
4944                     CHECK_FPU_FEATURE(dc, VIS1);
4945                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4946                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4947                     break;
4948                 case 0x075: /* VIS I fsrc1s */
4949                     CHECK_FPU_FEATURE(dc, VIS1);
4950                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4951                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4952                     break;
4953                 case 0x076: /* VIS I fornot2 */
4954                     CHECK_FPU_FEATURE(dc, VIS1);
4955                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4956                     break;
4957                 case 0x077: /* VIS I fornot2s */
4958                     CHECK_FPU_FEATURE(dc, VIS1);
4959                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4960                     break;
4961                 case 0x078: /* VIS I fsrc2 */
4962                     CHECK_FPU_FEATURE(dc, VIS1);
4963                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4964                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4965                     break;
4966                 case 0x079: /* VIS I fsrc2s */
4967                     CHECK_FPU_FEATURE(dc, VIS1);
4968                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4969                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4970                     break;
4971                 case 0x07a: /* VIS I fornot1 */
4972                     CHECK_FPU_FEATURE(dc, VIS1);
4973                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4974                     break;
4975                 case 0x07b: /* VIS I fornot1s */
4976                     CHECK_FPU_FEATURE(dc, VIS1);
4977                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4978                     break;
4979                 case 0x07c: /* VIS I for */
4980                     CHECK_FPU_FEATURE(dc, VIS1);
4981                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4982                     break;
4983                 case 0x07d: /* VIS I fors */
4984                     CHECK_FPU_FEATURE(dc, VIS1);
4985                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4986                     break;
4987                 case 0x07e: /* VIS I fone */
4988                     CHECK_FPU_FEATURE(dc, VIS1);
4989                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4990                     tcg_gen_movi_i64(cpu_dst_64, -1);
4991                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4992                     break;
4993                 case 0x07f: /* VIS I fones */
4994                     CHECK_FPU_FEATURE(dc, VIS1);
4995                     cpu_dst_32 = gen_dest_fpr_F(dc);
4996                     tcg_gen_movi_i32(cpu_dst_32, -1);
4997                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4998                     break;
4999                 case 0x080: /* VIS I shutdown */
5000                 case 0x081: /* VIS II siam */
5001                     // XXX
5002                     goto illegal_insn;
5003                 default:
5004                     goto illegal_insn;
5005                 }
5006 #else
5007                 goto ncp_insn;
5008 #endif
5009             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5010 #ifdef TARGET_SPARC64
5011                 goto illegal_insn;
5012 #else
5013                 goto ncp_insn;
5014 #endif
5015 #ifdef TARGET_SPARC64
5016             } else if (xop == 0x39) { /* V9 return */
5017                 save_state(dc);
5018                 cpu_src1 = get_src1(dc, insn);
5019                 cpu_tmp0 = tcg_temp_new();
5020                 if (IS_IMM) {   /* immediate */
5021                     simm = GET_FIELDs(insn, 19, 31);
5022                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5023                 } else {                /* register */
5024                     rs2 = GET_FIELD(insn, 27, 31);
5025                     if (rs2) {
5026                         cpu_src2 = gen_load_gpr(dc, rs2);
5027                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5028                     } else {
5029                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5030                     }
5031                 }
5032                 gen_helper_restore(cpu_env);
5033                 gen_mov_pc_npc(dc);
5034                 gen_check_align(cpu_tmp0, 3);
5035                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5036                 dc->npc = DYNAMIC_PC_LOOKUP;
5037                 goto jmp_insn;
5038 #endif
5039             } else {
5040                 cpu_src1 = get_src1(dc, insn);
5041                 cpu_tmp0 = tcg_temp_new();
5042                 if (IS_IMM) {   /* immediate */
5043                     simm = GET_FIELDs(insn, 19, 31);
5044                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5045                 } else {                /* register */
5046                     rs2 = GET_FIELD(insn, 27, 31);
5047                     if (rs2) {
5048                         cpu_src2 = gen_load_gpr(dc, rs2);
5049                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5050                     } else {
5051                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5052                     }
5053                 }
5054                 switch (xop) {
5055                 case 0x38:      /* jmpl */
5056                     {
5057                         TCGv t = gen_dest_gpr(dc, rd);
5058                         tcg_gen_movi_tl(t, dc->pc);
5059                         gen_store_gpr(dc, rd, t);
5060 
5061                         gen_mov_pc_npc(dc);
5062                         gen_check_align(cpu_tmp0, 3);
5063                         gen_address_mask(dc, cpu_tmp0);
5064                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5065                         dc->npc = DYNAMIC_PC_LOOKUP;
5066                     }
5067                     goto jmp_insn;
5068 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5069                 case 0x39:      /* rett, V9 return */
5070                     {
5071                         if (!supervisor(dc))
5072                             goto priv_insn;
5073                         gen_mov_pc_npc(dc);
5074                         gen_check_align(cpu_tmp0, 3);
5075                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5076                         dc->npc = DYNAMIC_PC;
5077                         gen_helper_rett(cpu_env);
5078                     }
5079                     goto jmp_insn;
5080 #endif
5081                 case 0x3b: /* flush */
5082                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5083                         goto unimp_flush;
5084                     /* nop */
5085                     break;
5086                 case 0x3c:      /* save */
5087                     gen_helper_save(cpu_env);
5088                     gen_store_gpr(dc, rd, cpu_tmp0);
5089                     break;
5090                 case 0x3d:      /* restore */
5091                     gen_helper_restore(cpu_env);
5092                     gen_store_gpr(dc, rd, cpu_tmp0);
5093                     break;
5094 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5095                 case 0x3e:      /* V9 done/retry */
5096                     {
5097                         switch (rd) {
5098                         case 0:
5099                             if (!supervisor(dc))
5100                                 goto priv_insn;
5101                             dc->npc = DYNAMIC_PC;
5102                             dc->pc = DYNAMIC_PC;
5103                             translator_io_start(&dc->base);
5104                             gen_helper_done(cpu_env);
5105                             goto jmp_insn;
5106                         case 1:
5107                             if (!supervisor(dc))
5108                                 goto priv_insn;
5109                             dc->npc = DYNAMIC_PC;
5110                             dc->pc = DYNAMIC_PC;
5111                             translator_io_start(&dc->base);
5112                             gen_helper_retry(cpu_env);
5113                             goto jmp_insn;
5114                         default:
5115                             goto illegal_insn;
5116                         }
5117                     }
5118                     break;
5119 #endif
5120                 default:
5121                     goto illegal_insn;
5122                 }
5123             }
5124             break;
5125         }
5126         break;
5127     case 3:                     /* load/store instructions */
5128         {
5129             unsigned int xop = GET_FIELD(insn, 7, 12);
5130             /* ??? gen_address_mask prevents us from using a source
5131                register directly.  Always generate a temporary.  */
5132             TCGv cpu_addr = tcg_temp_new();
5133 
5134             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5135             if (xop == 0x3c || xop == 0x3e) {
5136                 /* V9 casa/casxa : no offset */
5137             } else if (IS_IMM) {     /* immediate */
5138                 simm = GET_FIELDs(insn, 19, 31);
5139                 if (simm != 0) {
5140                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5141                 }
5142             } else {            /* register */
5143                 rs2 = GET_FIELD(insn, 27, 31);
5144                 if (rs2 != 0) {
5145                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5146                 }
5147             }
5148             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5149                 (xop > 0x17 && xop <= 0x1d ) ||
5150                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5151                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5152 
5153                 switch (xop) {
5154                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5155                     gen_address_mask(dc, cpu_addr);
5156                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5157                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5158                     break;
5159                 case 0x1:       /* ldub, load unsigned byte */
5160                     gen_address_mask(dc, cpu_addr);
5161                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5162                                        dc->mem_idx, MO_UB);
5163                     break;
5164                 case 0x2:       /* lduh, load unsigned halfword */
5165                     gen_address_mask(dc, cpu_addr);
5166                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5167                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5168                     break;
5169                 case 0x3:       /* ldd, load double word */
5170                     if (rd & 1)
5171                         goto illegal_insn;
5172                     else {
5173                         TCGv_i64 t64;
5174 
5175                         gen_address_mask(dc, cpu_addr);
5176                         t64 = tcg_temp_new_i64();
5177                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5178                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5179                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5180                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5181                         gen_store_gpr(dc, rd + 1, cpu_val);
5182                         tcg_gen_shri_i64(t64, t64, 32);
5183                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5184                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5185                     }
5186                     break;
5187                 case 0x9:       /* ldsb, load signed byte */
5188                     gen_address_mask(dc, cpu_addr);
5189                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5190                     break;
5191                 case 0xa:       /* ldsh, load signed halfword */
5192                     gen_address_mask(dc, cpu_addr);
5193                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5194                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5195                     break;
5196                 case 0xd:       /* ldstub */
5197                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5198                     break;
5199                 case 0x0f:
5200                     /* swap, swap register with memory. Also atomically */
5201                     CHECK_IU_FEATURE(dc, SWAP);
5202                     cpu_src1 = gen_load_gpr(dc, rd);
5203                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5204                              dc->mem_idx, MO_TEUL);
5205                     break;
5206 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5207                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5208                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5209                     break;
5210                 case 0x11:      /* lduba, load unsigned byte alternate */
5211                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5212                     break;
5213                 case 0x12:      /* lduha, load unsigned halfword alternate */
5214                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5215                     break;
5216                 case 0x13:      /* ldda, load double word alternate */
5217                     if (rd & 1) {
5218                         goto illegal_insn;
5219                     }
5220                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5221                     goto skip_move;
5222                 case 0x19:      /* ldsba, load signed byte alternate */
5223                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5224                     break;
5225                 case 0x1a:      /* ldsha, load signed halfword alternate */
5226                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5227                     break;
5228                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5229                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5230                     break;
5231                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5232                                    atomically */
5233                     CHECK_IU_FEATURE(dc, SWAP);
5234                     cpu_src1 = gen_load_gpr(dc, rd);
5235                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5236                     break;
5237 
5238 #ifndef TARGET_SPARC64
5239                 case 0x30: /* ldc */
5240                 case 0x31: /* ldcsr */
5241                 case 0x33: /* lddc */
5242                     goto ncp_insn;
5243 #endif
5244 #endif
5245 #ifdef TARGET_SPARC64
5246                 case 0x08: /* V9 ldsw */
5247                     gen_address_mask(dc, cpu_addr);
5248                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5249                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5250                     break;
5251                 case 0x0b: /* V9 ldx */
5252                     gen_address_mask(dc, cpu_addr);
5253                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5254                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5255                     break;
5256                 case 0x18: /* V9 ldswa */
5257                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5258                     break;
5259                 case 0x1b: /* V9 ldxa */
5260                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5261                     break;
5262                 case 0x2d: /* V9 prefetch, no effect */
5263                     goto skip_move;
5264                 case 0x30: /* V9 ldfa */
5265                     if (gen_trap_ifnofpu(dc)) {
5266                         goto jmp_insn;
5267                     }
5268                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5269                     gen_update_fprs_dirty(dc, rd);
5270                     goto skip_move;
5271                 case 0x33: /* V9 lddfa */
5272                     if (gen_trap_ifnofpu(dc)) {
5273                         goto jmp_insn;
5274                     }
5275                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5276                     gen_update_fprs_dirty(dc, DFPREG(rd));
5277                     goto skip_move;
5278                 case 0x3d: /* V9 prefetcha, no effect */
5279                     goto skip_move;
5280                 case 0x32: /* V9 ldqfa */
5281                     CHECK_FPU_FEATURE(dc, FLOAT128);
5282                     if (gen_trap_ifnofpu(dc)) {
5283                         goto jmp_insn;
5284                     }
5285                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5286                     gen_update_fprs_dirty(dc, QFPREG(rd));
5287                     goto skip_move;
5288 #endif
5289                 default:
5290                     goto illegal_insn;
5291                 }
5292                 gen_store_gpr(dc, rd, cpu_val);
5293 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5294             skip_move: ;
5295 #endif
5296             } else if (xop >= 0x20 && xop < 0x24) {
5297                 if (gen_trap_ifnofpu(dc)) {
5298                     goto jmp_insn;
5299                 }
5300                 switch (xop) {
5301                 case 0x20:      /* ldf, load fpreg */
5302                     gen_address_mask(dc, cpu_addr);
5303                     cpu_dst_32 = gen_dest_fpr_F(dc);
5304                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5305                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5306                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5307                     break;
5308                 case 0x21:      /* ldfsr, V9 ldxfsr */
5309 #ifdef TARGET_SPARC64
5310                     gen_address_mask(dc, cpu_addr);
5311                     if (rd == 1) {
5312                         TCGv_i64 t64 = tcg_temp_new_i64();
5313                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5314                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5315                         gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5316                         break;
5317                     }
5318 #endif
5319                     cpu_dst_32 = tcg_temp_new_i32();
5320                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5321                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5322                     gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5323                     break;
5324                 case 0x22:      /* ldqf, load quad fpreg */
5325                     CHECK_FPU_FEATURE(dc, FLOAT128);
5326                     gen_address_mask(dc, cpu_addr);
5327                     cpu_src1_64 = tcg_temp_new_i64();
5328                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5329                                         MO_TEUQ | MO_ALIGN_4);
5330                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5331                     cpu_src2_64 = tcg_temp_new_i64();
5332                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5333                                         MO_TEUQ | MO_ALIGN_4);
5334                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5335                     break;
5336                 case 0x23:      /* lddf, load double fpreg */
5337                     gen_address_mask(dc, cpu_addr);
5338                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5339                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5340                                         MO_TEUQ | MO_ALIGN_4);
5341                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5342                     break;
5343                 default:
5344                     goto illegal_insn;
5345                 }
5346             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5347                        xop == 0xe || xop == 0x1e) {
5348                 TCGv cpu_val = gen_load_gpr(dc, rd);
5349 
5350                 switch (xop) {
5351                 case 0x4: /* st, store word */
5352                     gen_address_mask(dc, cpu_addr);
5353                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5354                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5355                     break;
5356                 case 0x5: /* stb, store byte */
5357                     gen_address_mask(dc, cpu_addr);
5358                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5359                     break;
5360                 case 0x6: /* sth, store halfword */
5361                     gen_address_mask(dc, cpu_addr);
5362                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5363                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5364                     break;
5365                 case 0x7: /* std, store double word */
5366                     if (rd & 1)
5367                         goto illegal_insn;
5368                     else {
5369                         TCGv_i64 t64;
5370                         TCGv lo;
5371 
5372                         gen_address_mask(dc, cpu_addr);
5373                         lo = gen_load_gpr(dc, rd + 1);
5374                         t64 = tcg_temp_new_i64();
5375                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5376                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5377                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5378                     }
5379                     break;
5380 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5381                 case 0x14: /* sta, V9 stwa, store word alternate */
5382                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5383                     break;
5384                 case 0x15: /* stba, store byte alternate */
5385                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5386                     break;
5387                 case 0x16: /* stha, store halfword alternate */
5388                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5389                     break;
5390                 case 0x17: /* stda, store double word alternate */
5391                     if (rd & 1) {
5392                         goto illegal_insn;
5393                     }
5394                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5395                     break;
5396 #endif
5397 #ifdef TARGET_SPARC64
5398                 case 0x0e: /* V9 stx */
5399                     gen_address_mask(dc, cpu_addr);
5400                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5401                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5402                     break;
5403                 case 0x1e: /* V9 stxa */
5404                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5405                     break;
5406 #endif
5407                 default:
5408                     goto illegal_insn;
5409                 }
5410             } else if (xop > 0x23 && xop < 0x28) {
5411                 if (gen_trap_ifnofpu(dc)) {
5412                     goto jmp_insn;
5413                 }
5414                 switch (xop) {
5415                 case 0x24: /* stf, store fpreg */
5416                     gen_address_mask(dc, cpu_addr);
5417                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5418                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5419                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5420                     break;
5421                 case 0x25: /* stfsr, V9 stxfsr */
5422                     {
5423 #ifdef TARGET_SPARC64
5424                         gen_address_mask(dc, cpu_addr);
5425                         if (rd == 1) {
5426                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5427                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5428                             break;
5429                         }
5430 #endif
5431                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5432                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5433                     }
5434                     break;
5435                 case 0x26:
5436 #ifdef TARGET_SPARC64
5437                     /* V9 stqf, store quad fpreg */
5438                     CHECK_FPU_FEATURE(dc, FLOAT128);
5439                     gen_address_mask(dc, cpu_addr);
5440                     /* ??? While stqf only requires 4-byte alignment, it is
5441                        legal for the cpu to signal the unaligned exception.
5442                        The OS trap handler is then required to fix it up.
5443                        For qemu, this avoids having to probe the second page
5444                        before performing the first write.  */
5445                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5446                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5447                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5448                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5449                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5450                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5451                                         dc->mem_idx, MO_TEUQ);
5452                     break;
5453 #else /* !TARGET_SPARC64 */
5454                     /* stdfq, store floating point queue */
5455 #if defined(CONFIG_USER_ONLY)
5456                     goto illegal_insn;
5457 #else
5458                     if (!supervisor(dc))
5459                         goto priv_insn;
5460                     if (gen_trap_ifnofpu(dc)) {
5461                         goto jmp_insn;
5462                     }
5463                     goto nfq_insn;
5464 #endif
5465 #endif
5466                 case 0x27: /* stdf, store double fpreg */
5467                     gen_address_mask(dc, cpu_addr);
5468                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5469                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5470                                         MO_TEUQ | MO_ALIGN_4);
5471                     break;
5472                 default:
5473                     goto illegal_insn;
5474                 }
5475             } else if (xop > 0x33 && xop < 0x3f) {
5476                 switch (xop) {
5477 #ifdef TARGET_SPARC64
5478                 case 0x34: /* V9 stfa */
5479                     if (gen_trap_ifnofpu(dc)) {
5480                         goto jmp_insn;
5481                     }
5482                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5483                     break;
5484                 case 0x36: /* V9 stqfa */
5485                     {
5486                         CHECK_FPU_FEATURE(dc, FLOAT128);
5487                         if (gen_trap_ifnofpu(dc)) {
5488                             goto jmp_insn;
5489                         }
5490                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5491                     }
5492                     break;
5493                 case 0x37: /* V9 stdfa */
5494                     if (gen_trap_ifnofpu(dc)) {
5495                         goto jmp_insn;
5496                     }
5497                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5498                     break;
5499                 case 0x3e: /* V9 casxa */
5500                     rs2 = GET_FIELD(insn, 27, 31);
5501                     cpu_src2 = gen_load_gpr(dc, rs2);
5502                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5503                     break;
5504 #else
5505                 case 0x34: /* stc */
5506                 case 0x35: /* stcsr */
5507                 case 0x36: /* stdcq */
5508                 case 0x37: /* stdc */
5509                     goto ncp_insn;
5510 #endif
5511 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5512                 case 0x3c: /* V9 or LEON3 casa */
5513 #ifndef TARGET_SPARC64
5514                     CHECK_IU_FEATURE(dc, CASA);
5515 #endif
5516                     rs2 = GET_FIELD(insn, 27, 31);
5517                     cpu_src2 = gen_load_gpr(dc, rs2);
5518                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5519                     break;
5520 #endif
5521                 default:
5522                     goto illegal_insn;
5523                 }
5524             } else {
5525                 goto illegal_insn;
5526             }
5527         }
5528         break;
5529     }
5530     /* default case for non jump instructions */
5531     if (dc->npc & 3) {
5532         switch (dc->npc) {
5533         case DYNAMIC_PC:
5534         case DYNAMIC_PC_LOOKUP:
5535             dc->pc = dc->npc;
5536             gen_op_next_insn();
5537             break;
5538         case JUMP_PC:
5539             /* we can do a static jump */
5540             gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5541             dc->base.is_jmp = DISAS_NORETURN;
5542             break;
5543         default:
5544             g_assert_not_reached();
5545         }
5546     } else {
5547         dc->pc = dc->npc;
5548         dc->npc = dc->npc + 4;
5549     }
5550  jmp_insn:
5551     return;
5552  illegal_insn:
5553     gen_exception(dc, TT_ILL_INSN);
5554     return;
5555  unimp_flush:
5556     gen_exception(dc, TT_UNIMP_FLUSH);
5557     return;
5558 #if !defined(CONFIG_USER_ONLY)
5559  priv_insn:
5560     gen_exception(dc, TT_PRIV_INSN);
5561     return;
5562 #endif
5563  nfpu_insn:
5564     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5565     return;
5566 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5567  nfq_insn:
5568     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5569     return;
5570 #endif
5571 #ifndef TARGET_SPARC64
5572  ncp_insn:
5573     gen_exception(dc, TT_NCP_INSN);
5574     return;
5575 #endif
5576 }
5577 
5578 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5579 {
5580     DisasContext *dc = container_of(dcbase, DisasContext, base);
5581     CPUSPARCState *env = cs->env_ptr;
5582     int bound;
5583 
5584     dc->pc = dc->base.pc_first;
5585     dc->npc = (target_ulong)dc->base.tb->cs_base;
5586     dc->cc_op = CC_OP_DYNAMIC;
5587     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5588     dc->def = &env->def;
5589     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5590     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5591 #ifndef CONFIG_USER_ONLY
5592     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5593 #endif
5594 #ifdef TARGET_SPARC64
5595     dc->fprs_dirty = 0;
5596     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5597 #ifndef CONFIG_USER_ONLY
5598     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5599 #endif
5600 #endif
5601     /*
5602      * if we reach a page boundary, we stop generation so that the
5603      * PC of a TT_TFAULT exception is always in the right page
5604      */
5605     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5606     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5607 }
5608 
5609 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5610 {
5611 }
5612 
5613 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5614 {
5615     DisasContext *dc = container_of(dcbase, DisasContext, base);
5616     target_ulong npc = dc->npc;
5617 
5618     if (npc & 3) {
5619         switch (npc) {
5620         case JUMP_PC:
5621             assert(dc->jump_pc[1] == dc->pc + 4);
5622             npc = dc->jump_pc[0] | JUMP_PC;
5623             break;
5624         case DYNAMIC_PC:
5625         case DYNAMIC_PC_LOOKUP:
5626             npc = DYNAMIC_PC;
5627             break;
5628         default:
5629             g_assert_not_reached();
5630         }
5631     }
5632     tcg_gen_insn_start(dc->pc, npc);
5633 }
5634 
5635 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5636 {
5637     DisasContext *dc = container_of(dcbase, DisasContext, base);
5638     CPUSPARCState *env = cs->env_ptr;
5639     unsigned int insn;
5640 
5641     insn = translator_ldl(env, &dc->base, dc->pc);
5642     dc->base.pc_next += 4;
5643     disas_sparc_insn(dc, insn);
5644 
5645     if (dc->base.is_jmp == DISAS_NORETURN) {
5646         return;
5647     }
5648     if (dc->pc != dc->base.pc_next) {
5649         dc->base.is_jmp = DISAS_TOO_MANY;
5650     }
5651 }
5652 
5653 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5654 {
5655     DisasContext *dc = container_of(dcbase, DisasContext, base);
5656     bool may_lookup;
5657 
5658     switch (dc->base.is_jmp) {
5659     case DISAS_NEXT:
5660     case DISAS_TOO_MANY:
5661         if (((dc->pc | dc->npc) & 3) == 0) {
5662             /* static PC and NPC: we can use direct chaining */
5663             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5664             break;
5665         }
5666 
5667         if (dc->pc & 3) {
5668             switch (dc->pc) {
5669             case DYNAMIC_PC_LOOKUP:
5670                 may_lookup = true;
5671                 break;
5672             case DYNAMIC_PC:
5673                 may_lookup = false;
5674                 break;
5675             default:
5676                 g_assert_not_reached();
5677             }
5678         } else {
5679             tcg_gen_movi_tl(cpu_pc, dc->pc);
5680             may_lookup = true;
5681         }
5682 
5683         save_npc(dc);
5684         if (may_lookup) {
5685             tcg_gen_lookup_and_goto_ptr();
5686         } else {
5687             tcg_gen_exit_tb(NULL, 0);
5688         }
5689         break;
5690 
5691     case DISAS_NORETURN:
5692        break;
5693 
5694     case DISAS_EXIT:
5695         /* Exit TB */
5696         save_state(dc);
5697         tcg_gen_exit_tb(NULL, 0);
5698         break;
5699 
5700     default:
5701         g_assert_not_reached();
5702     }
5703 }
5704 
5705 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5706                                CPUState *cpu, FILE *logfile)
5707 {
5708     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5709     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5710 }
5711 
5712 static const TranslatorOps sparc_tr_ops = {
5713     .init_disas_context = sparc_tr_init_disas_context,
5714     .tb_start           = sparc_tr_tb_start,
5715     .insn_start         = sparc_tr_insn_start,
5716     .translate_insn     = sparc_tr_translate_insn,
5717     .tb_stop            = sparc_tr_tb_stop,
5718     .disas_log          = sparc_tr_disas_log,
5719 };
5720 
5721 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5722                            target_ulong pc, void *host_pc)
5723 {
5724     DisasContext dc = {};
5725 
5726     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5727 }
5728 
5729 void sparc_tcg_init(void)
5730 {
5731     static const char gregnames[32][4] = {
5732         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5733         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5734         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5735         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5736     };
5737     static const char fregnames[32][4] = {
5738         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5739         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5740         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5741         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5742     };
5743 
5744     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5745 #ifdef TARGET_SPARC64
5746         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5747         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5748 #else
5749         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5750 #endif
5751         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5752         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5753     };
5754 
5755     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5756 #ifdef TARGET_SPARC64
5757         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5758         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5759         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5760         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5761           "hstick_cmpr" },
5762         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5763         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5764         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5765         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5766         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5767 #endif
5768         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5769         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5770         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5771         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5772         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5773         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5774         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5775         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5776 #ifndef CONFIG_USER_ONLY
5777         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5778 #endif
5779     };
5780 
5781     unsigned int i;
5782 
5783     cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5784                                          offsetof(CPUSPARCState, regwptr),
5785                                          "regwptr");
5786 
5787     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5788         *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5789     }
5790 
5791     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5792         *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5793     }
5794 
5795     cpu_regs[0] = NULL;
5796     for (i = 1; i < 8; ++i) {
5797         cpu_regs[i] = tcg_global_mem_new(cpu_env,
5798                                          offsetof(CPUSPARCState, gregs[i]),
5799                                          gregnames[i]);
5800     }
5801 
5802     for (i = 8; i < 32; ++i) {
5803         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5804                                          (i - 8) * sizeof(target_ulong),
5805                                          gregnames[i]);
5806     }
5807 
5808     for (i = 0; i < TARGET_DPREGS; i++) {
5809         cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5810                                             offsetof(CPUSPARCState, fpr[i]),
5811                                             fregnames[i]);
5812     }
5813 }
5814 
5815 void sparc_restore_state_to_opc(CPUState *cs,
5816                                 const TranslationBlock *tb,
5817                                 const uint64_t *data)
5818 {
5819     SPARCCPU *cpu = SPARC_CPU(cs);
5820     CPUSPARCState *env = &cpu->env;
5821     target_ulong pc = data[0];
5822     target_ulong npc = data[1];
5823 
5824     env->pc = pc;
5825     if (npc == DYNAMIC_PC) {
5826         /* dynamic NPC: already stored */
5827     } else if (npc & JUMP_PC) {
5828         /* jump PC: use 'cond' and the jump targets of the translation */
5829         if (env->cond) {
5830             env->npc = npc & ~3;
5831         } else {
5832             env->npc = pc + 4;
5833         }
5834     } else {
5835         env->npc = npc;
5836     }
5837 }
5838