xref: /qemu/target/sparc/translate.c (revision dc03272d)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg-op.h"
28 #include "exec/cpu_ldst.h"
29 
30 #include "exec/helper-gen.h"
31 
32 #include "trace-tcg.h"
33 #include "exec/translator.h"
34 #include "exec/log.h"
35 #include "asi.h"
36 
37 
38 #define DEBUG_DISAS
39 
40 #define DYNAMIC_PC  1 /* dynamic pc value */
41 #define JUMP_PC     2 /* dynamic pc value which takes only two values
42                          according to jump_pc[T2] */
43 
44 /* global register indexes */
45 static TCGv_ptr cpu_regwptr;
46 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
47 static TCGv_i32 cpu_cc_op;
48 static TCGv_i32 cpu_psr;
49 static TCGv cpu_fsr, cpu_pc, cpu_npc;
50 static TCGv cpu_regs[32];
51 static TCGv cpu_y;
52 #ifndef CONFIG_USER_ONLY
53 static TCGv cpu_tbr;
54 #endif
55 static TCGv cpu_cond;
56 #ifdef TARGET_SPARC64
57 static TCGv_i32 cpu_xcc, cpu_fprs;
58 static TCGv cpu_gsr;
59 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
60 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
61 #else
62 static TCGv cpu_wim;
63 #endif
64 /* Floating point registers */
65 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
66 
67 #include "exec/gen-icount.h"
68 
69 typedef struct DisasContext {
70     DisasContextBase base;
71     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
72     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
73     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
74     int mem_idx;
75     bool fpu_enabled;
76     bool address_mask_32bit;
77 #ifndef CONFIG_USER_ONLY
78     bool supervisor;
79 #ifdef TARGET_SPARC64
80     bool hypervisor;
81 #endif
82 #endif
83 
84     uint32_t cc_op;  /* current CC operation */
85     sparc_def_t *def;
86     TCGv_i32 t32[3];
87     TCGv ttl[5];
88     int n_t32;
89     int n_ttl;
90 #ifdef TARGET_SPARC64
91     int fprs_dirty;
92     int asi;
93 #endif
94 } DisasContext;
95 
96 typedef struct {
97     TCGCond cond;
98     bool is_bool;
99     bool g1, g2;
100     TCGv c1, c2;
101 } DisasCompare;
102 
103 // This function uses non-native bit order
104 #define GET_FIELD(X, FROM, TO)                                  \
105     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
106 
107 // This function uses the order in the manuals, i.e. bit 0 is 2^0
108 #define GET_FIELD_SP(X, FROM, TO)               \
109     GET_FIELD(X, 31 - (TO), 31 - (FROM))
110 
111 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
112 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
113 
114 #ifdef TARGET_SPARC64
115 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
116 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
117 #else
118 #define DFPREG(r) (r & 0x1e)
119 #define QFPREG(r) (r & 0x1c)
120 #endif
121 
122 #define UA2005_HTRAP_MASK 0xff
123 #define V8_TRAP_MASK 0x7f
124 
125 static int sign_extend(int x, int len)
126 {
127     len = 32 - len;
128     return (x << len) >> len;
129 }
130 
131 #define IS_IMM (insn & (1<<13))
132 
133 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
134 {
135     TCGv_i32 t;
136     assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
137     dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
138     return t;
139 }
140 
141 static inline TCGv get_temp_tl(DisasContext *dc)
142 {
143     TCGv t;
144     assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
145     dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
146     return t;
147 }
148 
149 static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
150 {
151 #if defined(TARGET_SPARC64)
152     int bit = (rd < 32) ? 1 : 2;
153     /* If we know we've already set this bit within the TB,
154        we can avoid setting it again.  */
155     if (!(dc->fprs_dirty & bit)) {
156         dc->fprs_dirty |= bit;
157         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
158     }
159 #endif
160 }
161 
162 /* floating point registers moves */
163 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
164 {
165 #if TCG_TARGET_REG_BITS == 32
166     if (src & 1) {
167         return TCGV_LOW(cpu_fpr[src / 2]);
168     } else {
169         return TCGV_HIGH(cpu_fpr[src / 2]);
170     }
171 #else
172     TCGv_i32 ret = get_temp_i32(dc);
173     if (src & 1) {
174         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
175     } else {
176         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
177     }
178     return ret;
179 #endif
180 }
181 
182 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
183 {
184 #if TCG_TARGET_REG_BITS == 32
185     if (dst & 1) {
186         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
187     } else {
188         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
189     }
190 #else
191     TCGv_i64 t = (TCGv_i64)v;
192     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
193                         (dst & 1 ? 0 : 32), 32);
194 #endif
195     gen_update_fprs_dirty(dc, dst);
196 }
197 
198 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
199 {
200     return get_temp_i32(dc);
201 }
202 
203 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
204 {
205     src = DFPREG(src);
206     return cpu_fpr[src / 2];
207 }
208 
209 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
210 {
211     dst = DFPREG(dst);
212     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
213     gen_update_fprs_dirty(dc, dst);
214 }
215 
216 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
217 {
218     return cpu_fpr[DFPREG(dst) / 2];
219 }
220 
221 static void gen_op_load_fpr_QT0(unsigned int src)
222 {
223     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
224                    offsetof(CPU_QuadU, ll.upper));
225     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
226                    offsetof(CPU_QuadU, ll.lower));
227 }
228 
229 static void gen_op_load_fpr_QT1(unsigned int src)
230 {
231     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
232                    offsetof(CPU_QuadU, ll.upper));
233     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
234                    offsetof(CPU_QuadU, ll.lower));
235 }
236 
237 static void gen_op_store_QT0_fpr(unsigned int dst)
238 {
239     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
240                    offsetof(CPU_QuadU, ll.upper));
241     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
242                    offsetof(CPU_QuadU, ll.lower));
243 }
244 
245 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
246                             TCGv_i64 v1, TCGv_i64 v2)
247 {
248     dst = QFPREG(dst);
249 
250     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
251     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
252     gen_update_fprs_dirty(dc, dst);
253 }
254 
255 #ifdef TARGET_SPARC64
256 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
257 {
258     src = QFPREG(src);
259     return cpu_fpr[src / 2];
260 }
261 
262 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
263 {
264     src = QFPREG(src);
265     return cpu_fpr[src / 2 + 1];
266 }
267 
268 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
269 {
270     rd = QFPREG(rd);
271     rs = QFPREG(rs);
272 
273     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
274     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
275     gen_update_fprs_dirty(dc, rd);
276 }
277 #endif
278 
279 /* moves */
280 #ifdef CONFIG_USER_ONLY
281 #define supervisor(dc) 0
282 #ifdef TARGET_SPARC64
283 #define hypervisor(dc) 0
284 #endif
285 #else
286 #ifdef TARGET_SPARC64
287 #define hypervisor(dc) (dc->hypervisor)
288 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
289 #else
290 #define supervisor(dc) (dc->supervisor)
291 #endif
292 #endif
293 
294 #ifdef TARGET_SPARC64
295 #ifndef TARGET_ABI32
296 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
297 #else
298 #define AM_CHECK(dc) (1)
299 #endif
300 #endif
301 
302 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
303 {
304 #ifdef TARGET_SPARC64
305     if (AM_CHECK(dc))
306         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
307 #endif
308 }
309 
310 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
311 {
312     if (reg > 0) {
313         assert(reg < 32);
314         return cpu_regs[reg];
315     } else {
316         TCGv t = get_temp_tl(dc);
317         tcg_gen_movi_tl(t, 0);
318         return t;
319     }
320 }
321 
322 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
323 {
324     if (reg > 0) {
325         assert(reg < 32);
326         tcg_gen_mov_tl(cpu_regs[reg], v);
327     }
328 }
329 
330 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
331 {
332     if (reg > 0) {
333         assert(reg < 32);
334         return cpu_regs[reg];
335     } else {
336         return get_temp_tl(dc);
337     }
338 }
339 
340 static inline bool use_goto_tb(DisasContext *s, target_ulong pc,
341                                target_ulong npc)
342 {
343     if (unlikely(s->base.singlestep_enabled || singlestep)) {
344         return false;
345     }
346 
347 #ifndef CONFIG_USER_ONLY
348     return (pc & TARGET_PAGE_MASK) == (s->base.tb->pc & TARGET_PAGE_MASK) &&
349            (npc & TARGET_PAGE_MASK) == (s->base.tb->pc & TARGET_PAGE_MASK);
350 #else
351     return true;
352 #endif
353 }
354 
355 static inline void gen_goto_tb(DisasContext *s, int tb_num,
356                                target_ulong pc, target_ulong npc)
357 {
358     if (use_goto_tb(s, pc, npc))  {
359         /* jump to same page: we can use a direct jump */
360         tcg_gen_goto_tb(tb_num);
361         tcg_gen_movi_tl(cpu_pc, pc);
362         tcg_gen_movi_tl(cpu_npc, npc);
363         tcg_gen_exit_tb((uintptr_t)s->base.tb + tb_num);
364     } else {
365         /* jump to another page: currently not optimized */
366         tcg_gen_movi_tl(cpu_pc, pc);
367         tcg_gen_movi_tl(cpu_npc, npc);
368         tcg_gen_exit_tb(0);
369     }
370 }
371 
372 // XXX suboptimal
373 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
374 {
375     tcg_gen_extu_i32_tl(reg, src);
376     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
377 }
378 
379 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
380 {
381     tcg_gen_extu_i32_tl(reg, src);
382     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
383 }
384 
385 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
386 {
387     tcg_gen_extu_i32_tl(reg, src);
388     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
389 }
390 
391 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
392 {
393     tcg_gen_extu_i32_tl(reg, src);
394     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
395 }
396 
397 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
398 {
399     tcg_gen_mov_tl(cpu_cc_src, src1);
400     tcg_gen_mov_tl(cpu_cc_src2, src2);
401     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
402     tcg_gen_mov_tl(dst, cpu_cc_dst);
403 }
404 
405 static TCGv_i32 gen_add32_carry32(void)
406 {
407     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
408 
409     /* Carry is computed from a previous add: (dst < src)  */
410 #if TARGET_LONG_BITS == 64
411     cc_src1_32 = tcg_temp_new_i32();
412     cc_src2_32 = tcg_temp_new_i32();
413     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
414     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
415 #else
416     cc_src1_32 = cpu_cc_dst;
417     cc_src2_32 = cpu_cc_src;
418 #endif
419 
420     carry_32 = tcg_temp_new_i32();
421     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
422 
423 #if TARGET_LONG_BITS == 64
424     tcg_temp_free_i32(cc_src1_32);
425     tcg_temp_free_i32(cc_src2_32);
426 #endif
427 
428     return carry_32;
429 }
430 
431 static TCGv_i32 gen_sub32_carry32(void)
432 {
433     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
434 
435     /* Carry is computed from a previous borrow: (src1 < src2)  */
436 #if TARGET_LONG_BITS == 64
437     cc_src1_32 = tcg_temp_new_i32();
438     cc_src2_32 = tcg_temp_new_i32();
439     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
440     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
441 #else
442     cc_src1_32 = cpu_cc_src;
443     cc_src2_32 = cpu_cc_src2;
444 #endif
445 
446     carry_32 = tcg_temp_new_i32();
447     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
448 
449 #if TARGET_LONG_BITS == 64
450     tcg_temp_free_i32(cc_src1_32);
451     tcg_temp_free_i32(cc_src2_32);
452 #endif
453 
454     return carry_32;
455 }
456 
457 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
458                             TCGv src2, int update_cc)
459 {
460     TCGv_i32 carry_32;
461     TCGv carry;
462 
463     switch (dc->cc_op) {
464     case CC_OP_DIV:
465     case CC_OP_LOGIC:
466         /* Carry is known to be zero.  Fall back to plain ADD.  */
467         if (update_cc) {
468             gen_op_add_cc(dst, src1, src2);
469         } else {
470             tcg_gen_add_tl(dst, src1, src2);
471         }
472         return;
473 
474     case CC_OP_ADD:
475     case CC_OP_TADD:
476     case CC_OP_TADDTV:
477         if (TARGET_LONG_BITS == 32) {
478             /* We can re-use the host's hardware carry generation by using
479                an ADD2 opcode.  We discard the low part of the output.
480                Ideally we'd combine this operation with the add that
481                generated the carry in the first place.  */
482             carry = tcg_temp_new();
483             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
484             tcg_temp_free(carry);
485             goto add_done;
486         }
487         carry_32 = gen_add32_carry32();
488         break;
489 
490     case CC_OP_SUB:
491     case CC_OP_TSUB:
492     case CC_OP_TSUBTV:
493         carry_32 = gen_sub32_carry32();
494         break;
495 
496     default:
497         /* We need external help to produce the carry.  */
498         carry_32 = tcg_temp_new_i32();
499         gen_helper_compute_C_icc(carry_32, cpu_env);
500         break;
501     }
502 
503 #if TARGET_LONG_BITS == 64
504     carry = tcg_temp_new();
505     tcg_gen_extu_i32_i64(carry, carry_32);
506 #else
507     carry = carry_32;
508 #endif
509 
510     tcg_gen_add_tl(dst, src1, src2);
511     tcg_gen_add_tl(dst, dst, carry);
512 
513     tcg_temp_free_i32(carry_32);
514 #if TARGET_LONG_BITS == 64
515     tcg_temp_free(carry);
516 #endif
517 
518  add_done:
519     if (update_cc) {
520         tcg_gen_mov_tl(cpu_cc_src, src1);
521         tcg_gen_mov_tl(cpu_cc_src2, src2);
522         tcg_gen_mov_tl(cpu_cc_dst, dst);
523         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
524         dc->cc_op = CC_OP_ADDX;
525     }
526 }
527 
528 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
529 {
530     tcg_gen_mov_tl(cpu_cc_src, src1);
531     tcg_gen_mov_tl(cpu_cc_src2, src2);
532     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
533     tcg_gen_mov_tl(dst, cpu_cc_dst);
534 }
535 
536 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
537                             TCGv src2, int update_cc)
538 {
539     TCGv_i32 carry_32;
540     TCGv carry;
541 
542     switch (dc->cc_op) {
543     case CC_OP_DIV:
544     case CC_OP_LOGIC:
545         /* Carry is known to be zero.  Fall back to plain SUB.  */
546         if (update_cc) {
547             gen_op_sub_cc(dst, src1, src2);
548         } else {
549             tcg_gen_sub_tl(dst, src1, src2);
550         }
551         return;
552 
553     case CC_OP_ADD:
554     case CC_OP_TADD:
555     case CC_OP_TADDTV:
556         carry_32 = gen_add32_carry32();
557         break;
558 
559     case CC_OP_SUB:
560     case CC_OP_TSUB:
561     case CC_OP_TSUBTV:
562         if (TARGET_LONG_BITS == 32) {
563             /* We can re-use the host's hardware carry generation by using
564                a SUB2 opcode.  We discard the low part of the output.
565                Ideally we'd combine this operation with the add that
566                generated the carry in the first place.  */
567             carry = tcg_temp_new();
568             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
569             tcg_temp_free(carry);
570             goto sub_done;
571         }
572         carry_32 = gen_sub32_carry32();
573         break;
574 
575     default:
576         /* We need external help to produce the carry.  */
577         carry_32 = tcg_temp_new_i32();
578         gen_helper_compute_C_icc(carry_32, cpu_env);
579         break;
580     }
581 
582 #if TARGET_LONG_BITS == 64
583     carry = tcg_temp_new();
584     tcg_gen_extu_i32_i64(carry, carry_32);
585 #else
586     carry = carry_32;
587 #endif
588 
589     tcg_gen_sub_tl(dst, src1, src2);
590     tcg_gen_sub_tl(dst, dst, carry);
591 
592     tcg_temp_free_i32(carry_32);
593 #if TARGET_LONG_BITS == 64
594     tcg_temp_free(carry);
595 #endif
596 
597  sub_done:
598     if (update_cc) {
599         tcg_gen_mov_tl(cpu_cc_src, src1);
600         tcg_gen_mov_tl(cpu_cc_src2, src2);
601         tcg_gen_mov_tl(cpu_cc_dst, dst);
602         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
603         dc->cc_op = CC_OP_SUBX;
604     }
605 }
606 
607 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
608 {
609     TCGv r_temp, zero, t0;
610 
611     r_temp = tcg_temp_new();
612     t0 = tcg_temp_new();
613 
614     /* old op:
615     if (!(env->y & 1))
616         T1 = 0;
617     */
618     zero = tcg_const_tl(0);
619     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
620     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
621     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
622     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
623                        zero, cpu_cc_src2);
624     tcg_temp_free(zero);
625 
626     // b2 = T0 & 1;
627     // env->y = (b2 << 31) | (env->y >> 1);
628     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
629     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
630 
631     // b1 = N ^ V;
632     gen_mov_reg_N(t0, cpu_psr);
633     gen_mov_reg_V(r_temp, cpu_psr);
634     tcg_gen_xor_tl(t0, t0, r_temp);
635     tcg_temp_free(r_temp);
636 
637     // T0 = (b1 << 31) | (T0 >> 1);
638     // src1 = T0;
639     tcg_gen_shli_tl(t0, t0, 31);
640     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
641     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
642     tcg_temp_free(t0);
643 
644     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
645 
646     tcg_gen_mov_tl(dst, cpu_cc_dst);
647 }
648 
649 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
650 {
651 #if TARGET_LONG_BITS == 32
652     if (sign_ext) {
653         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
654     } else {
655         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
656     }
657 #else
658     TCGv t0 = tcg_temp_new_i64();
659     TCGv t1 = tcg_temp_new_i64();
660 
661     if (sign_ext) {
662         tcg_gen_ext32s_i64(t0, src1);
663         tcg_gen_ext32s_i64(t1, src2);
664     } else {
665         tcg_gen_ext32u_i64(t0, src1);
666         tcg_gen_ext32u_i64(t1, src2);
667     }
668 
669     tcg_gen_mul_i64(dst, t0, t1);
670     tcg_temp_free(t0);
671     tcg_temp_free(t1);
672 
673     tcg_gen_shri_i64(cpu_y, dst, 32);
674 #endif
675 }
676 
677 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
678 {
679     /* zero-extend truncated operands before multiplication */
680     gen_op_multiply(dst, src1, src2, 0);
681 }
682 
683 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
684 {
685     /* sign-extend truncated operands before multiplication */
686     gen_op_multiply(dst, src1, src2, 1);
687 }
688 
689 // 1
690 static inline void gen_op_eval_ba(TCGv dst)
691 {
692     tcg_gen_movi_tl(dst, 1);
693 }
694 
695 // Z
696 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
697 {
698     gen_mov_reg_Z(dst, src);
699 }
700 
701 // Z | (N ^ V)
702 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
703 {
704     TCGv t0 = tcg_temp_new();
705     gen_mov_reg_N(t0, src);
706     gen_mov_reg_V(dst, src);
707     tcg_gen_xor_tl(dst, dst, t0);
708     gen_mov_reg_Z(t0, src);
709     tcg_gen_or_tl(dst, dst, t0);
710     tcg_temp_free(t0);
711 }
712 
713 // N ^ V
714 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
715 {
716     TCGv t0 = tcg_temp_new();
717     gen_mov_reg_V(t0, src);
718     gen_mov_reg_N(dst, src);
719     tcg_gen_xor_tl(dst, dst, t0);
720     tcg_temp_free(t0);
721 }
722 
723 // C | Z
724 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
725 {
726     TCGv t0 = tcg_temp_new();
727     gen_mov_reg_Z(t0, src);
728     gen_mov_reg_C(dst, src);
729     tcg_gen_or_tl(dst, dst, t0);
730     tcg_temp_free(t0);
731 }
732 
733 // C
734 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
735 {
736     gen_mov_reg_C(dst, src);
737 }
738 
739 // V
740 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
741 {
742     gen_mov_reg_V(dst, src);
743 }
744 
745 // 0
746 static inline void gen_op_eval_bn(TCGv dst)
747 {
748     tcg_gen_movi_tl(dst, 0);
749 }
750 
751 // N
752 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
753 {
754     gen_mov_reg_N(dst, src);
755 }
756 
757 // !Z
758 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
759 {
760     gen_mov_reg_Z(dst, src);
761     tcg_gen_xori_tl(dst, dst, 0x1);
762 }
763 
764 // !(Z | (N ^ V))
765 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
766 {
767     gen_op_eval_ble(dst, src);
768     tcg_gen_xori_tl(dst, dst, 0x1);
769 }
770 
771 // !(N ^ V)
772 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
773 {
774     gen_op_eval_bl(dst, src);
775     tcg_gen_xori_tl(dst, dst, 0x1);
776 }
777 
778 // !(C | Z)
779 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
780 {
781     gen_op_eval_bleu(dst, src);
782     tcg_gen_xori_tl(dst, dst, 0x1);
783 }
784 
785 // !C
786 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
787 {
788     gen_mov_reg_C(dst, src);
789     tcg_gen_xori_tl(dst, dst, 0x1);
790 }
791 
792 // !N
793 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
794 {
795     gen_mov_reg_N(dst, src);
796     tcg_gen_xori_tl(dst, dst, 0x1);
797 }
798 
799 // !V
800 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
801 {
802     gen_mov_reg_V(dst, src);
803     tcg_gen_xori_tl(dst, dst, 0x1);
804 }
805 
806 /*
807   FPSR bit field FCC1 | FCC0:
808    0 =
809    1 <
810    2 >
811    3 unordered
812 */
813 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
814                                     unsigned int fcc_offset)
815 {
816     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
817     tcg_gen_andi_tl(reg, reg, 0x1);
818 }
819 
820 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
821                                     unsigned int fcc_offset)
822 {
823     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
824     tcg_gen_andi_tl(reg, reg, 0x1);
825 }
826 
827 // !0: FCC0 | FCC1
828 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
829                                     unsigned int fcc_offset)
830 {
831     TCGv t0 = tcg_temp_new();
832     gen_mov_reg_FCC0(dst, src, fcc_offset);
833     gen_mov_reg_FCC1(t0, src, fcc_offset);
834     tcg_gen_or_tl(dst, dst, t0);
835     tcg_temp_free(t0);
836 }
837 
838 // 1 or 2: FCC0 ^ FCC1
839 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
840                                     unsigned int fcc_offset)
841 {
842     TCGv t0 = tcg_temp_new();
843     gen_mov_reg_FCC0(dst, src, fcc_offset);
844     gen_mov_reg_FCC1(t0, src, fcc_offset);
845     tcg_gen_xor_tl(dst, dst, t0);
846     tcg_temp_free(t0);
847 }
848 
849 // 1 or 3: FCC0
850 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
851                                     unsigned int fcc_offset)
852 {
853     gen_mov_reg_FCC0(dst, src, fcc_offset);
854 }
855 
856 // 1: FCC0 & !FCC1
857 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
858                                     unsigned int fcc_offset)
859 {
860     TCGv t0 = tcg_temp_new();
861     gen_mov_reg_FCC0(dst, src, fcc_offset);
862     gen_mov_reg_FCC1(t0, src, fcc_offset);
863     tcg_gen_andc_tl(dst, dst, t0);
864     tcg_temp_free(t0);
865 }
866 
867 // 2 or 3: FCC1
868 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
869                                     unsigned int fcc_offset)
870 {
871     gen_mov_reg_FCC1(dst, src, fcc_offset);
872 }
873 
874 // 2: !FCC0 & FCC1
875 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
876                                     unsigned int fcc_offset)
877 {
878     TCGv t0 = tcg_temp_new();
879     gen_mov_reg_FCC0(dst, src, fcc_offset);
880     gen_mov_reg_FCC1(t0, src, fcc_offset);
881     tcg_gen_andc_tl(dst, t0, dst);
882     tcg_temp_free(t0);
883 }
884 
885 // 3: FCC0 & FCC1
886 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
887                                     unsigned int fcc_offset)
888 {
889     TCGv t0 = tcg_temp_new();
890     gen_mov_reg_FCC0(dst, src, fcc_offset);
891     gen_mov_reg_FCC1(t0, src, fcc_offset);
892     tcg_gen_and_tl(dst, dst, t0);
893     tcg_temp_free(t0);
894 }
895 
896 // 0: !(FCC0 | FCC1)
897 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
898                                     unsigned int fcc_offset)
899 {
900     TCGv t0 = tcg_temp_new();
901     gen_mov_reg_FCC0(dst, src, fcc_offset);
902     gen_mov_reg_FCC1(t0, src, fcc_offset);
903     tcg_gen_or_tl(dst, dst, t0);
904     tcg_gen_xori_tl(dst, dst, 0x1);
905     tcg_temp_free(t0);
906 }
907 
908 // 0 or 3: !(FCC0 ^ FCC1)
909 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
910                                     unsigned int fcc_offset)
911 {
912     TCGv t0 = tcg_temp_new();
913     gen_mov_reg_FCC0(dst, src, fcc_offset);
914     gen_mov_reg_FCC1(t0, src, fcc_offset);
915     tcg_gen_xor_tl(dst, dst, t0);
916     tcg_gen_xori_tl(dst, dst, 0x1);
917     tcg_temp_free(t0);
918 }
919 
920 // 0 or 2: !FCC0
921 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
922                                     unsigned int fcc_offset)
923 {
924     gen_mov_reg_FCC0(dst, src, fcc_offset);
925     tcg_gen_xori_tl(dst, dst, 0x1);
926 }
927 
928 // !1: !(FCC0 & !FCC1)
929 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
930                                     unsigned int fcc_offset)
931 {
932     TCGv t0 = tcg_temp_new();
933     gen_mov_reg_FCC0(dst, src, fcc_offset);
934     gen_mov_reg_FCC1(t0, src, fcc_offset);
935     tcg_gen_andc_tl(dst, dst, t0);
936     tcg_gen_xori_tl(dst, dst, 0x1);
937     tcg_temp_free(t0);
938 }
939 
940 // 0 or 1: !FCC1
941 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
942                                     unsigned int fcc_offset)
943 {
944     gen_mov_reg_FCC1(dst, src, fcc_offset);
945     tcg_gen_xori_tl(dst, dst, 0x1);
946 }
947 
948 // !2: !(!FCC0 & FCC1)
949 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
950                                     unsigned int fcc_offset)
951 {
952     TCGv t0 = tcg_temp_new();
953     gen_mov_reg_FCC0(dst, src, fcc_offset);
954     gen_mov_reg_FCC1(t0, src, fcc_offset);
955     tcg_gen_andc_tl(dst, t0, dst);
956     tcg_gen_xori_tl(dst, dst, 0x1);
957     tcg_temp_free(t0);
958 }
959 
960 // !3: !(FCC0 & FCC1)
961 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
962                                     unsigned int fcc_offset)
963 {
964     TCGv t0 = tcg_temp_new();
965     gen_mov_reg_FCC0(dst, src, fcc_offset);
966     gen_mov_reg_FCC1(t0, src, fcc_offset);
967     tcg_gen_and_tl(dst, dst, t0);
968     tcg_gen_xori_tl(dst, dst, 0x1);
969     tcg_temp_free(t0);
970 }
971 
972 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
973                                target_ulong pc2, TCGv r_cond)
974 {
975     TCGLabel *l1 = gen_new_label();
976 
977     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
978 
979     gen_goto_tb(dc, 0, pc1, pc1 + 4);
980 
981     gen_set_label(l1);
982     gen_goto_tb(dc, 1, pc2, pc2 + 4);
983 }
984 
985 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
986 {
987     TCGLabel *l1 = gen_new_label();
988     target_ulong npc = dc->npc;
989 
990     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
991 
992     gen_goto_tb(dc, 0, npc, pc1);
993 
994     gen_set_label(l1);
995     gen_goto_tb(dc, 1, npc + 4, npc + 8);
996 
997     dc->base.is_jmp = DISAS_NORETURN;
998 }
999 
1000 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
1001 {
1002     target_ulong npc = dc->npc;
1003 
1004     if (likely(npc != DYNAMIC_PC)) {
1005         dc->pc = npc;
1006         dc->jump_pc[0] = pc1;
1007         dc->jump_pc[1] = npc + 4;
1008         dc->npc = JUMP_PC;
1009     } else {
1010         TCGv t, z;
1011 
1012         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1013 
1014         tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1015         t = tcg_const_tl(pc1);
1016         z = tcg_const_tl(0);
1017         tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
1018         tcg_temp_free(t);
1019         tcg_temp_free(z);
1020 
1021         dc->pc = DYNAMIC_PC;
1022     }
1023 }
1024 
1025 static inline void gen_generic_branch(DisasContext *dc)
1026 {
1027     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1028     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1029     TCGv zero = tcg_const_tl(0);
1030 
1031     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1032 
1033     tcg_temp_free(npc0);
1034     tcg_temp_free(npc1);
1035     tcg_temp_free(zero);
1036 }
1037 
1038 /* call this function before using the condition register as it may
1039    have been set for a jump */
1040 static inline void flush_cond(DisasContext *dc)
1041 {
1042     if (dc->npc == JUMP_PC) {
1043         gen_generic_branch(dc);
1044         dc->npc = DYNAMIC_PC;
1045     }
1046 }
1047 
1048 static inline void save_npc(DisasContext *dc)
1049 {
1050     if (dc->npc == JUMP_PC) {
1051         gen_generic_branch(dc);
1052         dc->npc = DYNAMIC_PC;
1053     } else if (dc->npc != DYNAMIC_PC) {
1054         tcg_gen_movi_tl(cpu_npc, dc->npc);
1055     }
1056 }
1057 
1058 static inline void update_psr(DisasContext *dc)
1059 {
1060     if (dc->cc_op != CC_OP_FLAGS) {
1061         dc->cc_op = CC_OP_FLAGS;
1062         gen_helper_compute_psr(cpu_env);
1063     }
1064 }
1065 
1066 static inline void save_state(DisasContext *dc)
1067 {
1068     tcg_gen_movi_tl(cpu_pc, dc->pc);
1069     save_npc(dc);
1070 }
1071 
1072 static void gen_exception(DisasContext *dc, int which)
1073 {
1074     TCGv_i32 t;
1075 
1076     save_state(dc);
1077     t = tcg_const_i32(which);
1078     gen_helper_raise_exception(cpu_env, t);
1079     tcg_temp_free_i32(t);
1080     dc->base.is_jmp = DISAS_NORETURN;
1081 }
1082 
1083 static void gen_check_align(TCGv addr, int mask)
1084 {
1085     TCGv_i32 r_mask = tcg_const_i32(mask);
1086     gen_helper_check_align(cpu_env, addr, r_mask);
1087     tcg_temp_free_i32(r_mask);
1088 }
1089 
1090 static inline void gen_mov_pc_npc(DisasContext *dc)
1091 {
1092     if (dc->npc == JUMP_PC) {
1093         gen_generic_branch(dc);
1094         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1095         dc->pc = DYNAMIC_PC;
1096     } else if (dc->npc == DYNAMIC_PC) {
1097         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1098         dc->pc = DYNAMIC_PC;
1099     } else {
1100         dc->pc = dc->npc;
1101     }
1102 }
1103 
1104 static inline void gen_op_next_insn(void)
1105 {
1106     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1107     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1108 }
1109 
1110 static void free_compare(DisasCompare *cmp)
1111 {
1112     if (!cmp->g1) {
1113         tcg_temp_free(cmp->c1);
1114     }
1115     if (!cmp->g2) {
1116         tcg_temp_free(cmp->c2);
1117     }
1118 }
1119 
1120 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1121                         DisasContext *dc)
1122 {
1123     static int subcc_cond[16] = {
1124         TCG_COND_NEVER,
1125         TCG_COND_EQ,
1126         TCG_COND_LE,
1127         TCG_COND_LT,
1128         TCG_COND_LEU,
1129         TCG_COND_LTU,
1130         -1, /* neg */
1131         -1, /* overflow */
1132         TCG_COND_ALWAYS,
1133         TCG_COND_NE,
1134         TCG_COND_GT,
1135         TCG_COND_GE,
1136         TCG_COND_GTU,
1137         TCG_COND_GEU,
1138         -1, /* pos */
1139         -1, /* no overflow */
1140     };
1141 
1142     static int logic_cond[16] = {
1143         TCG_COND_NEVER,
1144         TCG_COND_EQ,     /* eq:  Z */
1145         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1146         TCG_COND_LT,     /* lt:  N ^ V -> N */
1147         TCG_COND_EQ,     /* leu: C | Z -> Z */
1148         TCG_COND_NEVER,  /* ltu: C -> 0 */
1149         TCG_COND_LT,     /* neg: N */
1150         TCG_COND_NEVER,  /* vs:  V -> 0 */
1151         TCG_COND_ALWAYS,
1152         TCG_COND_NE,     /* ne:  !Z */
1153         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1154         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1155         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1156         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1157         TCG_COND_GE,     /* pos: !N */
1158         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1159     };
1160 
1161     TCGv_i32 r_src;
1162     TCGv r_dst;
1163 
1164 #ifdef TARGET_SPARC64
1165     if (xcc) {
1166         r_src = cpu_xcc;
1167     } else {
1168         r_src = cpu_psr;
1169     }
1170 #else
1171     r_src = cpu_psr;
1172 #endif
1173 
1174     switch (dc->cc_op) {
1175     case CC_OP_LOGIC:
1176         cmp->cond = logic_cond[cond];
1177     do_compare_dst_0:
1178         cmp->is_bool = false;
1179         cmp->g2 = false;
1180         cmp->c2 = tcg_const_tl(0);
1181 #ifdef TARGET_SPARC64
1182         if (!xcc) {
1183             cmp->g1 = false;
1184             cmp->c1 = tcg_temp_new();
1185             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1186             break;
1187         }
1188 #endif
1189         cmp->g1 = true;
1190         cmp->c1 = cpu_cc_dst;
1191         break;
1192 
1193     case CC_OP_SUB:
1194         switch (cond) {
1195         case 6:  /* neg */
1196         case 14: /* pos */
1197             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1198             goto do_compare_dst_0;
1199 
1200         case 7: /* overflow */
1201         case 15: /* !overflow */
1202             goto do_dynamic;
1203 
1204         default:
1205             cmp->cond = subcc_cond[cond];
1206             cmp->is_bool = false;
1207 #ifdef TARGET_SPARC64
1208             if (!xcc) {
1209                 /* Note that sign-extension works for unsigned compares as
1210                    long as both operands are sign-extended.  */
1211                 cmp->g1 = cmp->g2 = false;
1212                 cmp->c1 = tcg_temp_new();
1213                 cmp->c2 = tcg_temp_new();
1214                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1215                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1216                 break;
1217             }
1218 #endif
1219             cmp->g1 = cmp->g2 = true;
1220             cmp->c1 = cpu_cc_src;
1221             cmp->c2 = cpu_cc_src2;
1222             break;
1223         }
1224         break;
1225 
1226     default:
1227     do_dynamic:
1228         gen_helper_compute_psr(cpu_env);
1229         dc->cc_op = CC_OP_FLAGS;
1230         /* FALLTHRU */
1231 
1232     case CC_OP_FLAGS:
1233         /* We're going to generate a boolean result.  */
1234         cmp->cond = TCG_COND_NE;
1235         cmp->is_bool = true;
1236         cmp->g1 = cmp->g2 = false;
1237         cmp->c1 = r_dst = tcg_temp_new();
1238         cmp->c2 = tcg_const_tl(0);
1239 
1240         switch (cond) {
1241         case 0x0:
1242             gen_op_eval_bn(r_dst);
1243             break;
1244         case 0x1:
1245             gen_op_eval_be(r_dst, r_src);
1246             break;
1247         case 0x2:
1248             gen_op_eval_ble(r_dst, r_src);
1249             break;
1250         case 0x3:
1251             gen_op_eval_bl(r_dst, r_src);
1252             break;
1253         case 0x4:
1254             gen_op_eval_bleu(r_dst, r_src);
1255             break;
1256         case 0x5:
1257             gen_op_eval_bcs(r_dst, r_src);
1258             break;
1259         case 0x6:
1260             gen_op_eval_bneg(r_dst, r_src);
1261             break;
1262         case 0x7:
1263             gen_op_eval_bvs(r_dst, r_src);
1264             break;
1265         case 0x8:
1266             gen_op_eval_ba(r_dst);
1267             break;
1268         case 0x9:
1269             gen_op_eval_bne(r_dst, r_src);
1270             break;
1271         case 0xa:
1272             gen_op_eval_bg(r_dst, r_src);
1273             break;
1274         case 0xb:
1275             gen_op_eval_bge(r_dst, r_src);
1276             break;
1277         case 0xc:
1278             gen_op_eval_bgu(r_dst, r_src);
1279             break;
1280         case 0xd:
1281             gen_op_eval_bcc(r_dst, r_src);
1282             break;
1283         case 0xe:
1284             gen_op_eval_bpos(r_dst, r_src);
1285             break;
1286         case 0xf:
1287             gen_op_eval_bvc(r_dst, r_src);
1288             break;
1289         }
1290         break;
1291     }
1292 }
1293 
1294 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1295 {
1296     unsigned int offset;
1297     TCGv r_dst;
1298 
1299     /* For now we still generate a straight boolean result.  */
1300     cmp->cond = TCG_COND_NE;
1301     cmp->is_bool = true;
1302     cmp->g1 = cmp->g2 = false;
1303     cmp->c1 = r_dst = tcg_temp_new();
1304     cmp->c2 = tcg_const_tl(0);
1305 
1306     switch (cc) {
1307     default:
1308     case 0x0:
1309         offset = 0;
1310         break;
1311     case 0x1:
1312         offset = 32 - 10;
1313         break;
1314     case 0x2:
1315         offset = 34 - 10;
1316         break;
1317     case 0x3:
1318         offset = 36 - 10;
1319         break;
1320     }
1321 
1322     switch (cond) {
1323     case 0x0:
1324         gen_op_eval_bn(r_dst);
1325         break;
1326     case 0x1:
1327         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1328         break;
1329     case 0x2:
1330         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1331         break;
1332     case 0x3:
1333         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1334         break;
1335     case 0x4:
1336         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1337         break;
1338     case 0x5:
1339         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1340         break;
1341     case 0x6:
1342         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1343         break;
1344     case 0x7:
1345         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1346         break;
1347     case 0x8:
1348         gen_op_eval_ba(r_dst);
1349         break;
1350     case 0x9:
1351         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1352         break;
1353     case 0xa:
1354         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1355         break;
1356     case 0xb:
1357         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1358         break;
1359     case 0xc:
1360         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1361         break;
1362     case 0xd:
1363         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1364         break;
1365     case 0xe:
1366         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1367         break;
1368     case 0xf:
1369         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1370         break;
1371     }
1372 }
1373 
1374 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1375                      DisasContext *dc)
1376 {
1377     DisasCompare cmp;
1378     gen_compare(&cmp, cc, cond, dc);
1379 
1380     /* The interface is to return a boolean in r_dst.  */
1381     if (cmp.is_bool) {
1382         tcg_gen_mov_tl(r_dst, cmp.c1);
1383     } else {
1384         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1385     }
1386 
1387     free_compare(&cmp);
1388 }
1389 
1390 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1391 {
1392     DisasCompare cmp;
1393     gen_fcompare(&cmp, cc, cond);
1394 
1395     /* The interface is to return a boolean in r_dst.  */
1396     if (cmp.is_bool) {
1397         tcg_gen_mov_tl(r_dst, cmp.c1);
1398     } else {
1399         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1400     }
1401 
1402     free_compare(&cmp);
1403 }
1404 
1405 #ifdef TARGET_SPARC64
1406 // Inverted logic
1407 static const int gen_tcg_cond_reg[8] = {
1408     -1,
1409     TCG_COND_NE,
1410     TCG_COND_GT,
1411     TCG_COND_GE,
1412     -1,
1413     TCG_COND_EQ,
1414     TCG_COND_LE,
1415     TCG_COND_LT,
1416 };
1417 
1418 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1419 {
1420     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1421     cmp->is_bool = false;
1422     cmp->g1 = true;
1423     cmp->g2 = false;
1424     cmp->c1 = r_src;
1425     cmp->c2 = tcg_const_tl(0);
1426 }
1427 
1428 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1429 {
1430     DisasCompare cmp;
1431     gen_compare_reg(&cmp, cond, r_src);
1432 
1433     /* The interface is to return a boolean in r_dst.  */
1434     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1435 
1436     free_compare(&cmp);
1437 }
1438 #endif
1439 
1440 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1441 {
1442     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1443     target_ulong target = dc->pc + offset;
1444 
1445 #ifdef TARGET_SPARC64
1446     if (unlikely(AM_CHECK(dc))) {
1447         target &= 0xffffffffULL;
1448     }
1449 #endif
1450     if (cond == 0x0) {
1451         /* unconditional not taken */
1452         if (a) {
1453             dc->pc = dc->npc + 4;
1454             dc->npc = dc->pc + 4;
1455         } else {
1456             dc->pc = dc->npc;
1457             dc->npc = dc->pc + 4;
1458         }
1459     } else if (cond == 0x8) {
1460         /* unconditional taken */
1461         if (a) {
1462             dc->pc = target;
1463             dc->npc = dc->pc + 4;
1464         } else {
1465             dc->pc = dc->npc;
1466             dc->npc = target;
1467             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1468         }
1469     } else {
1470         flush_cond(dc);
1471         gen_cond(cpu_cond, cc, cond, dc);
1472         if (a) {
1473             gen_branch_a(dc, target);
1474         } else {
1475             gen_branch_n(dc, target);
1476         }
1477     }
1478 }
1479 
1480 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1481 {
1482     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1483     target_ulong target = dc->pc + offset;
1484 
1485 #ifdef TARGET_SPARC64
1486     if (unlikely(AM_CHECK(dc))) {
1487         target &= 0xffffffffULL;
1488     }
1489 #endif
1490     if (cond == 0x0) {
1491         /* unconditional not taken */
1492         if (a) {
1493             dc->pc = dc->npc + 4;
1494             dc->npc = dc->pc + 4;
1495         } else {
1496             dc->pc = dc->npc;
1497             dc->npc = dc->pc + 4;
1498         }
1499     } else if (cond == 0x8) {
1500         /* unconditional taken */
1501         if (a) {
1502             dc->pc = target;
1503             dc->npc = dc->pc + 4;
1504         } else {
1505             dc->pc = dc->npc;
1506             dc->npc = target;
1507             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1508         }
1509     } else {
1510         flush_cond(dc);
1511         gen_fcond(cpu_cond, cc, cond);
1512         if (a) {
1513             gen_branch_a(dc, target);
1514         } else {
1515             gen_branch_n(dc, target);
1516         }
1517     }
1518 }
1519 
1520 #ifdef TARGET_SPARC64
1521 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1522                           TCGv r_reg)
1523 {
1524     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1525     target_ulong target = dc->pc + offset;
1526 
1527     if (unlikely(AM_CHECK(dc))) {
1528         target &= 0xffffffffULL;
1529     }
1530     flush_cond(dc);
1531     gen_cond_reg(cpu_cond, cond, r_reg);
1532     if (a) {
1533         gen_branch_a(dc, target);
1534     } else {
1535         gen_branch_n(dc, target);
1536     }
1537 }
1538 
1539 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1540 {
1541     switch (fccno) {
1542     case 0:
1543         gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1544         break;
1545     case 1:
1546         gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1547         break;
1548     case 2:
1549         gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1550         break;
1551     case 3:
1552         gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1553         break;
1554     }
1555 }
1556 
1557 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1558 {
1559     switch (fccno) {
1560     case 0:
1561         gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1562         break;
1563     case 1:
1564         gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1565         break;
1566     case 2:
1567         gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1568         break;
1569     case 3:
1570         gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1571         break;
1572     }
1573 }
1574 
1575 static inline void gen_op_fcmpq(int fccno)
1576 {
1577     switch (fccno) {
1578     case 0:
1579         gen_helper_fcmpq(cpu_fsr, cpu_env);
1580         break;
1581     case 1:
1582         gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1583         break;
1584     case 2:
1585         gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1586         break;
1587     case 3:
1588         gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1589         break;
1590     }
1591 }
1592 
1593 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1594 {
1595     switch (fccno) {
1596     case 0:
1597         gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1598         break;
1599     case 1:
1600         gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1601         break;
1602     case 2:
1603         gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1604         break;
1605     case 3:
1606         gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1607         break;
1608     }
1609 }
1610 
1611 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1612 {
1613     switch (fccno) {
1614     case 0:
1615         gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1616         break;
1617     case 1:
1618         gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1619         break;
1620     case 2:
1621         gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1622         break;
1623     case 3:
1624         gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1625         break;
1626     }
1627 }
1628 
1629 static inline void gen_op_fcmpeq(int fccno)
1630 {
1631     switch (fccno) {
1632     case 0:
1633         gen_helper_fcmpeq(cpu_fsr, cpu_env);
1634         break;
1635     case 1:
1636         gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1637         break;
1638     case 2:
1639         gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1640         break;
1641     case 3:
1642         gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1643         break;
1644     }
1645 }
1646 
1647 #else
1648 
1649 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1650 {
1651     gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1652 }
1653 
1654 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1655 {
1656     gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1657 }
1658 
1659 static inline void gen_op_fcmpq(int fccno)
1660 {
1661     gen_helper_fcmpq(cpu_fsr, cpu_env);
1662 }
1663 
1664 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1665 {
1666     gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1667 }
1668 
1669 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1670 {
1671     gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1672 }
1673 
1674 static inline void gen_op_fcmpeq(int fccno)
1675 {
1676     gen_helper_fcmpeq(cpu_fsr, cpu_env);
1677 }
1678 #endif
1679 
1680 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1681 {
1682     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1683     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1684     gen_exception(dc, TT_FP_EXCP);
1685 }
1686 
1687 static int gen_trap_ifnofpu(DisasContext *dc)
1688 {
1689 #if !defined(CONFIG_USER_ONLY)
1690     if (!dc->fpu_enabled) {
1691         gen_exception(dc, TT_NFPU_INSN);
1692         return 1;
1693     }
1694 #endif
1695     return 0;
1696 }
1697 
1698 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1699 {
1700     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1701 }
1702 
1703 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1704                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1705 {
1706     TCGv_i32 dst, src;
1707 
1708     src = gen_load_fpr_F(dc, rs);
1709     dst = gen_dest_fpr_F(dc);
1710 
1711     gen(dst, cpu_env, src);
1712     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1713 
1714     gen_store_fpr_F(dc, rd, dst);
1715 }
1716 
1717 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1718                                  void (*gen)(TCGv_i32, TCGv_i32))
1719 {
1720     TCGv_i32 dst, src;
1721 
1722     src = gen_load_fpr_F(dc, rs);
1723     dst = gen_dest_fpr_F(dc);
1724 
1725     gen(dst, src);
1726 
1727     gen_store_fpr_F(dc, rd, dst);
1728 }
1729 
1730 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1731                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1732 {
1733     TCGv_i32 dst, src1, src2;
1734 
1735     src1 = gen_load_fpr_F(dc, rs1);
1736     src2 = gen_load_fpr_F(dc, rs2);
1737     dst = gen_dest_fpr_F(dc);
1738 
1739     gen(dst, cpu_env, src1, src2);
1740     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1741 
1742     gen_store_fpr_F(dc, rd, dst);
1743 }
1744 
1745 #ifdef TARGET_SPARC64
1746 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1747                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1748 {
1749     TCGv_i32 dst, src1, src2;
1750 
1751     src1 = gen_load_fpr_F(dc, rs1);
1752     src2 = gen_load_fpr_F(dc, rs2);
1753     dst = gen_dest_fpr_F(dc);
1754 
1755     gen(dst, src1, src2);
1756 
1757     gen_store_fpr_F(dc, rd, dst);
1758 }
1759 #endif
1760 
1761 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1762                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1763 {
1764     TCGv_i64 dst, src;
1765 
1766     src = gen_load_fpr_D(dc, rs);
1767     dst = gen_dest_fpr_D(dc, rd);
1768 
1769     gen(dst, cpu_env, src);
1770     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1771 
1772     gen_store_fpr_D(dc, rd, dst);
1773 }
1774 
1775 #ifdef TARGET_SPARC64
1776 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1777                                  void (*gen)(TCGv_i64, TCGv_i64))
1778 {
1779     TCGv_i64 dst, src;
1780 
1781     src = gen_load_fpr_D(dc, rs);
1782     dst = gen_dest_fpr_D(dc, rd);
1783 
1784     gen(dst, src);
1785 
1786     gen_store_fpr_D(dc, rd, dst);
1787 }
1788 #endif
1789 
1790 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1791                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1792 {
1793     TCGv_i64 dst, src1, src2;
1794 
1795     src1 = gen_load_fpr_D(dc, rs1);
1796     src2 = gen_load_fpr_D(dc, rs2);
1797     dst = gen_dest_fpr_D(dc, rd);
1798 
1799     gen(dst, cpu_env, src1, src2);
1800     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1801 
1802     gen_store_fpr_D(dc, rd, dst);
1803 }
1804 
1805 #ifdef TARGET_SPARC64
1806 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1807                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1808 {
1809     TCGv_i64 dst, src1, src2;
1810 
1811     src1 = gen_load_fpr_D(dc, rs1);
1812     src2 = gen_load_fpr_D(dc, rs2);
1813     dst = gen_dest_fpr_D(dc, rd);
1814 
1815     gen(dst, src1, src2);
1816 
1817     gen_store_fpr_D(dc, rd, dst);
1818 }
1819 
1820 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1821                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1822 {
1823     TCGv_i64 dst, src1, src2;
1824 
1825     src1 = gen_load_fpr_D(dc, rs1);
1826     src2 = gen_load_fpr_D(dc, rs2);
1827     dst = gen_dest_fpr_D(dc, rd);
1828 
1829     gen(dst, cpu_gsr, src1, src2);
1830 
1831     gen_store_fpr_D(dc, rd, dst);
1832 }
1833 
1834 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1835                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1836 {
1837     TCGv_i64 dst, src0, src1, src2;
1838 
1839     src1 = gen_load_fpr_D(dc, rs1);
1840     src2 = gen_load_fpr_D(dc, rs2);
1841     src0 = gen_load_fpr_D(dc, rd);
1842     dst = gen_dest_fpr_D(dc, rd);
1843 
1844     gen(dst, src0, src1, src2);
1845 
1846     gen_store_fpr_D(dc, rd, dst);
1847 }
1848 #endif
1849 
1850 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1851                               void (*gen)(TCGv_ptr))
1852 {
1853     gen_op_load_fpr_QT1(QFPREG(rs));
1854 
1855     gen(cpu_env);
1856     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1857 
1858     gen_op_store_QT0_fpr(QFPREG(rd));
1859     gen_update_fprs_dirty(dc, QFPREG(rd));
1860 }
1861 
1862 #ifdef TARGET_SPARC64
1863 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1864                                  void (*gen)(TCGv_ptr))
1865 {
1866     gen_op_load_fpr_QT1(QFPREG(rs));
1867 
1868     gen(cpu_env);
1869 
1870     gen_op_store_QT0_fpr(QFPREG(rd));
1871     gen_update_fprs_dirty(dc, QFPREG(rd));
1872 }
1873 #endif
1874 
1875 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1876                                void (*gen)(TCGv_ptr))
1877 {
1878     gen_op_load_fpr_QT0(QFPREG(rs1));
1879     gen_op_load_fpr_QT1(QFPREG(rs2));
1880 
1881     gen(cpu_env);
1882     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1883 
1884     gen_op_store_QT0_fpr(QFPREG(rd));
1885     gen_update_fprs_dirty(dc, QFPREG(rd));
1886 }
1887 
1888 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1889                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1890 {
1891     TCGv_i64 dst;
1892     TCGv_i32 src1, src2;
1893 
1894     src1 = gen_load_fpr_F(dc, rs1);
1895     src2 = gen_load_fpr_F(dc, rs2);
1896     dst = gen_dest_fpr_D(dc, rd);
1897 
1898     gen(dst, cpu_env, src1, src2);
1899     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1900 
1901     gen_store_fpr_D(dc, rd, dst);
1902 }
1903 
1904 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1905                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1906 {
1907     TCGv_i64 src1, src2;
1908 
1909     src1 = gen_load_fpr_D(dc, rs1);
1910     src2 = gen_load_fpr_D(dc, rs2);
1911 
1912     gen(cpu_env, src1, src2);
1913     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1914 
1915     gen_op_store_QT0_fpr(QFPREG(rd));
1916     gen_update_fprs_dirty(dc, QFPREG(rd));
1917 }
1918 
1919 #ifdef TARGET_SPARC64
1920 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1921                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1922 {
1923     TCGv_i64 dst;
1924     TCGv_i32 src;
1925 
1926     src = gen_load_fpr_F(dc, rs);
1927     dst = gen_dest_fpr_D(dc, rd);
1928 
1929     gen(dst, cpu_env, src);
1930     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1931 
1932     gen_store_fpr_D(dc, rd, dst);
1933 }
1934 #endif
1935 
1936 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1937                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1938 {
1939     TCGv_i64 dst;
1940     TCGv_i32 src;
1941 
1942     src = gen_load_fpr_F(dc, rs);
1943     dst = gen_dest_fpr_D(dc, rd);
1944 
1945     gen(dst, cpu_env, src);
1946 
1947     gen_store_fpr_D(dc, rd, dst);
1948 }
1949 
1950 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1951                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1952 {
1953     TCGv_i32 dst;
1954     TCGv_i64 src;
1955 
1956     src = gen_load_fpr_D(dc, rs);
1957     dst = gen_dest_fpr_F(dc);
1958 
1959     gen(dst, cpu_env, src);
1960     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1961 
1962     gen_store_fpr_F(dc, rd, dst);
1963 }
1964 
1965 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1966                               void (*gen)(TCGv_i32, TCGv_ptr))
1967 {
1968     TCGv_i32 dst;
1969 
1970     gen_op_load_fpr_QT1(QFPREG(rs));
1971     dst = gen_dest_fpr_F(dc);
1972 
1973     gen(dst, cpu_env);
1974     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1975 
1976     gen_store_fpr_F(dc, rd, dst);
1977 }
1978 
1979 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1980                               void (*gen)(TCGv_i64, TCGv_ptr))
1981 {
1982     TCGv_i64 dst;
1983 
1984     gen_op_load_fpr_QT1(QFPREG(rs));
1985     dst = gen_dest_fpr_D(dc, rd);
1986 
1987     gen(dst, cpu_env);
1988     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1989 
1990     gen_store_fpr_D(dc, rd, dst);
1991 }
1992 
1993 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1994                                  void (*gen)(TCGv_ptr, TCGv_i32))
1995 {
1996     TCGv_i32 src;
1997 
1998     src = gen_load_fpr_F(dc, rs);
1999 
2000     gen(cpu_env, src);
2001 
2002     gen_op_store_QT0_fpr(QFPREG(rd));
2003     gen_update_fprs_dirty(dc, QFPREG(rd));
2004 }
2005 
2006 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
2007                                  void (*gen)(TCGv_ptr, TCGv_i64))
2008 {
2009     TCGv_i64 src;
2010 
2011     src = gen_load_fpr_D(dc, rs);
2012 
2013     gen(cpu_env, src);
2014 
2015     gen_op_store_QT0_fpr(QFPREG(rd));
2016     gen_update_fprs_dirty(dc, QFPREG(rd));
2017 }
2018 
2019 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
2020                      TCGv addr, int mmu_idx, TCGMemOp memop)
2021 {
2022     gen_address_mask(dc, addr);
2023     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop);
2024 }
2025 
2026 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
2027 {
2028     TCGv m1 = tcg_const_tl(0xff);
2029     gen_address_mask(dc, addr);
2030     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
2031     tcg_temp_free(m1);
2032 }
2033 
2034 /* asi moves */
2035 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2036 typedef enum {
2037     GET_ASI_HELPER,
2038     GET_ASI_EXCP,
2039     GET_ASI_DIRECT,
2040     GET_ASI_DTWINX,
2041     GET_ASI_BLOCK,
2042     GET_ASI_SHORT,
2043     GET_ASI_BCOPY,
2044     GET_ASI_BFILL,
2045 } ASIType;
2046 
2047 typedef struct {
2048     ASIType type;
2049     int asi;
2050     int mem_idx;
2051     TCGMemOp memop;
2052 } DisasASI;
2053 
2054 static DisasASI get_asi(DisasContext *dc, int insn, TCGMemOp memop)
2055 {
2056     int asi = GET_FIELD(insn, 19, 26);
2057     ASIType type = GET_ASI_HELPER;
2058     int mem_idx = dc->mem_idx;
2059 
2060 #ifndef TARGET_SPARC64
2061     /* Before v9, all asis are immediate and privileged.  */
2062     if (IS_IMM) {
2063         gen_exception(dc, TT_ILL_INSN);
2064         type = GET_ASI_EXCP;
2065     } else if (supervisor(dc)
2066                /* Note that LEON accepts ASI_USERDATA in user mode, for
2067                   use with CASA.  Also note that previous versions of
2068                   QEMU allowed (and old versions of gcc emitted) ASI_P
2069                   for LEON, which is incorrect.  */
2070                || (asi == ASI_USERDATA
2071                    && (dc->def->features & CPU_FEATURE_CASA))) {
2072         switch (asi) {
2073         case ASI_USERDATA:   /* User data access */
2074             mem_idx = MMU_USER_IDX;
2075             type = GET_ASI_DIRECT;
2076             break;
2077         case ASI_KERNELDATA: /* Supervisor data access */
2078             mem_idx = MMU_KERNEL_IDX;
2079             type = GET_ASI_DIRECT;
2080             break;
2081         case ASI_M_BYPASS:    /* MMU passthrough */
2082         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
2083             mem_idx = MMU_PHYS_IDX;
2084             type = GET_ASI_DIRECT;
2085             break;
2086         case ASI_M_BCOPY: /* Block copy, sta access */
2087             mem_idx = MMU_KERNEL_IDX;
2088             type = GET_ASI_BCOPY;
2089             break;
2090         case ASI_M_BFILL: /* Block fill, stda access */
2091             mem_idx = MMU_KERNEL_IDX;
2092             type = GET_ASI_BFILL;
2093             break;
2094         }
2095 
2096         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
2097          * permissions check in get_physical_address(..).
2098          */
2099         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
2100     } else {
2101         gen_exception(dc, TT_PRIV_INSN);
2102         type = GET_ASI_EXCP;
2103     }
2104 #else
2105     if (IS_IMM) {
2106         asi = dc->asi;
2107     }
2108     /* With v9, all asis below 0x80 are privileged.  */
2109     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
2110        down that bit into DisasContext.  For the moment that's ok,
2111        since the direct implementations below doesn't have any ASIs
2112        in the restricted [0x30, 0x7f] range, and the check will be
2113        done properly in the helper.  */
2114     if (!supervisor(dc) && asi < 0x80) {
2115         gen_exception(dc, TT_PRIV_ACT);
2116         type = GET_ASI_EXCP;
2117     } else {
2118         switch (asi) {
2119         case ASI_REAL:      /* Bypass */
2120         case ASI_REAL_IO:   /* Bypass, non-cacheable */
2121         case ASI_REAL_L:    /* Bypass LE */
2122         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2123         case ASI_TWINX_REAL:   /* Real address, twinx */
2124         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2125         case ASI_QUAD_LDD_PHYS:
2126         case ASI_QUAD_LDD_PHYS_L:
2127             mem_idx = MMU_PHYS_IDX;
2128             break;
2129         case ASI_N:  /* Nucleus */
2130         case ASI_NL: /* Nucleus LE */
2131         case ASI_TWINX_N:
2132         case ASI_TWINX_NL:
2133         case ASI_NUCLEUS_QUAD_LDD:
2134         case ASI_NUCLEUS_QUAD_LDD_L:
2135             if (hypervisor(dc)) {
2136                 mem_idx = MMU_PHYS_IDX;
2137             } else {
2138                 mem_idx = MMU_NUCLEUS_IDX;
2139             }
2140             break;
2141         case ASI_AIUP:  /* As if user primary */
2142         case ASI_AIUPL: /* As if user primary LE */
2143         case ASI_TWINX_AIUP:
2144         case ASI_TWINX_AIUP_L:
2145         case ASI_BLK_AIUP_4V:
2146         case ASI_BLK_AIUP_L_4V:
2147         case ASI_BLK_AIUP:
2148         case ASI_BLK_AIUPL:
2149             mem_idx = MMU_USER_IDX;
2150             break;
2151         case ASI_AIUS:  /* As if user secondary */
2152         case ASI_AIUSL: /* As if user secondary LE */
2153         case ASI_TWINX_AIUS:
2154         case ASI_TWINX_AIUS_L:
2155         case ASI_BLK_AIUS_4V:
2156         case ASI_BLK_AIUS_L_4V:
2157         case ASI_BLK_AIUS:
2158         case ASI_BLK_AIUSL:
2159             mem_idx = MMU_USER_SECONDARY_IDX;
2160             break;
2161         case ASI_S:  /* Secondary */
2162         case ASI_SL: /* Secondary LE */
2163         case ASI_TWINX_S:
2164         case ASI_TWINX_SL:
2165         case ASI_BLK_COMMIT_S:
2166         case ASI_BLK_S:
2167         case ASI_BLK_SL:
2168         case ASI_FL8_S:
2169         case ASI_FL8_SL:
2170         case ASI_FL16_S:
2171         case ASI_FL16_SL:
2172             if (mem_idx == MMU_USER_IDX) {
2173                 mem_idx = MMU_USER_SECONDARY_IDX;
2174             } else if (mem_idx == MMU_KERNEL_IDX) {
2175                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2176             }
2177             break;
2178         case ASI_P:  /* Primary */
2179         case ASI_PL: /* Primary LE */
2180         case ASI_TWINX_P:
2181         case ASI_TWINX_PL:
2182         case ASI_BLK_COMMIT_P:
2183         case ASI_BLK_P:
2184         case ASI_BLK_PL:
2185         case ASI_FL8_P:
2186         case ASI_FL8_PL:
2187         case ASI_FL16_P:
2188         case ASI_FL16_PL:
2189             break;
2190         }
2191         switch (asi) {
2192         case ASI_REAL:
2193         case ASI_REAL_IO:
2194         case ASI_REAL_L:
2195         case ASI_REAL_IO_L:
2196         case ASI_N:
2197         case ASI_NL:
2198         case ASI_AIUP:
2199         case ASI_AIUPL:
2200         case ASI_AIUS:
2201         case ASI_AIUSL:
2202         case ASI_S:
2203         case ASI_SL:
2204         case ASI_P:
2205         case ASI_PL:
2206             type = GET_ASI_DIRECT;
2207             break;
2208         case ASI_TWINX_REAL:
2209         case ASI_TWINX_REAL_L:
2210         case ASI_TWINX_N:
2211         case ASI_TWINX_NL:
2212         case ASI_TWINX_AIUP:
2213         case ASI_TWINX_AIUP_L:
2214         case ASI_TWINX_AIUS:
2215         case ASI_TWINX_AIUS_L:
2216         case ASI_TWINX_P:
2217         case ASI_TWINX_PL:
2218         case ASI_TWINX_S:
2219         case ASI_TWINX_SL:
2220         case ASI_QUAD_LDD_PHYS:
2221         case ASI_QUAD_LDD_PHYS_L:
2222         case ASI_NUCLEUS_QUAD_LDD:
2223         case ASI_NUCLEUS_QUAD_LDD_L:
2224             type = GET_ASI_DTWINX;
2225             break;
2226         case ASI_BLK_COMMIT_P:
2227         case ASI_BLK_COMMIT_S:
2228         case ASI_BLK_AIUP_4V:
2229         case ASI_BLK_AIUP_L_4V:
2230         case ASI_BLK_AIUP:
2231         case ASI_BLK_AIUPL:
2232         case ASI_BLK_AIUS_4V:
2233         case ASI_BLK_AIUS_L_4V:
2234         case ASI_BLK_AIUS:
2235         case ASI_BLK_AIUSL:
2236         case ASI_BLK_S:
2237         case ASI_BLK_SL:
2238         case ASI_BLK_P:
2239         case ASI_BLK_PL:
2240             type = GET_ASI_BLOCK;
2241             break;
2242         case ASI_FL8_S:
2243         case ASI_FL8_SL:
2244         case ASI_FL8_P:
2245         case ASI_FL8_PL:
2246             memop = MO_UB;
2247             type = GET_ASI_SHORT;
2248             break;
2249         case ASI_FL16_S:
2250         case ASI_FL16_SL:
2251         case ASI_FL16_P:
2252         case ASI_FL16_PL:
2253             memop = MO_TEUW;
2254             type = GET_ASI_SHORT;
2255             break;
2256         }
2257         /* The little-endian asis all have bit 3 set.  */
2258         if (asi & 8) {
2259             memop ^= MO_BSWAP;
2260         }
2261     }
2262 #endif
2263 
2264     return (DisasASI){ type, asi, mem_idx, memop };
2265 }
2266 
2267 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2268                        int insn, TCGMemOp memop)
2269 {
2270     DisasASI da = get_asi(dc, insn, memop);
2271 
2272     switch (da.type) {
2273     case GET_ASI_EXCP:
2274         break;
2275     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2276         gen_exception(dc, TT_ILL_INSN);
2277         break;
2278     case GET_ASI_DIRECT:
2279         gen_address_mask(dc, addr);
2280         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop);
2281         break;
2282     default:
2283         {
2284             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2285             TCGv_i32 r_mop = tcg_const_i32(memop);
2286 
2287             save_state(dc);
2288 #ifdef TARGET_SPARC64
2289             gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2290 #else
2291             {
2292                 TCGv_i64 t64 = tcg_temp_new_i64();
2293                 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2294                 tcg_gen_trunc_i64_tl(dst, t64);
2295                 tcg_temp_free_i64(t64);
2296             }
2297 #endif
2298             tcg_temp_free_i32(r_mop);
2299             tcg_temp_free_i32(r_asi);
2300         }
2301         break;
2302     }
2303 }
2304 
2305 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2306                        int insn, TCGMemOp memop)
2307 {
2308     DisasASI da = get_asi(dc, insn, memop);
2309 
2310     switch (da.type) {
2311     case GET_ASI_EXCP:
2312         break;
2313     case GET_ASI_DTWINX: /* Reserved for stda.  */
2314 #ifndef TARGET_SPARC64
2315         gen_exception(dc, TT_ILL_INSN);
2316         break;
2317 #else
2318         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2319             /* Pre OpenSPARC CPUs don't have these */
2320             gen_exception(dc, TT_ILL_INSN);
2321             return;
2322         }
2323         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2324          * are ST_BLKINIT_ ASIs */
2325         /* fall through */
2326 #endif
2327     case GET_ASI_DIRECT:
2328         gen_address_mask(dc, addr);
2329         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop);
2330         break;
2331 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2332     case GET_ASI_BCOPY:
2333         /* Copy 32 bytes from the address in SRC to ADDR.  */
2334         /* ??? The original qemu code suggests 4-byte alignment, dropping
2335            the low bits, but the only place I can see this used is in the
2336            Linux kernel with 32 byte alignment, which would make more sense
2337            as a cacheline-style operation.  */
2338         {
2339             TCGv saddr = tcg_temp_new();
2340             TCGv daddr = tcg_temp_new();
2341             TCGv four = tcg_const_tl(4);
2342             TCGv_i32 tmp = tcg_temp_new_i32();
2343             int i;
2344 
2345             tcg_gen_andi_tl(saddr, src, -4);
2346             tcg_gen_andi_tl(daddr, addr, -4);
2347             for (i = 0; i < 32; i += 4) {
2348                 /* Since the loads and stores are paired, allow the
2349                    copy to happen in the host endianness.  */
2350                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2351                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2352                 tcg_gen_add_tl(saddr, saddr, four);
2353                 tcg_gen_add_tl(daddr, daddr, four);
2354             }
2355 
2356             tcg_temp_free(saddr);
2357             tcg_temp_free(daddr);
2358             tcg_temp_free(four);
2359             tcg_temp_free_i32(tmp);
2360         }
2361         break;
2362 #endif
2363     default:
2364         {
2365             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2366             TCGv_i32 r_mop = tcg_const_i32(memop & MO_SIZE);
2367 
2368             save_state(dc);
2369 #ifdef TARGET_SPARC64
2370             gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2371 #else
2372             {
2373                 TCGv_i64 t64 = tcg_temp_new_i64();
2374                 tcg_gen_extu_tl_i64(t64, src);
2375                 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2376                 tcg_temp_free_i64(t64);
2377             }
2378 #endif
2379             tcg_temp_free_i32(r_mop);
2380             tcg_temp_free_i32(r_asi);
2381 
2382             /* A write to a TLB register may alter page maps.  End the TB. */
2383             dc->npc = DYNAMIC_PC;
2384         }
2385         break;
2386     }
2387 }
2388 
2389 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2390                          TCGv addr, int insn)
2391 {
2392     DisasASI da = get_asi(dc, insn, MO_TEUL);
2393 
2394     switch (da.type) {
2395     case GET_ASI_EXCP:
2396         break;
2397     case GET_ASI_DIRECT:
2398         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2399         break;
2400     default:
2401         /* ??? Should be DAE_invalid_asi.  */
2402         gen_exception(dc, TT_DATA_ACCESS);
2403         break;
2404     }
2405 }
2406 
2407 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2408                         int insn, int rd)
2409 {
2410     DisasASI da = get_asi(dc, insn, MO_TEUL);
2411     TCGv oldv;
2412 
2413     switch (da.type) {
2414     case GET_ASI_EXCP:
2415         return;
2416     case GET_ASI_DIRECT:
2417         oldv = tcg_temp_new();
2418         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2419                                   da.mem_idx, da.memop);
2420         gen_store_gpr(dc, rd, oldv);
2421         tcg_temp_free(oldv);
2422         break;
2423     default:
2424         /* ??? Should be DAE_invalid_asi.  */
2425         gen_exception(dc, TT_DATA_ACCESS);
2426         break;
2427     }
2428 }
2429 
2430 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2431 {
2432     DisasASI da = get_asi(dc, insn, MO_UB);
2433 
2434     switch (da.type) {
2435     case GET_ASI_EXCP:
2436         break;
2437     case GET_ASI_DIRECT:
2438         gen_ldstub(dc, dst, addr, da.mem_idx);
2439         break;
2440     default:
2441         /* ??? In theory, this should be raise DAE_invalid_asi.
2442            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2443         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2444             gen_helper_exit_atomic(cpu_env);
2445         } else {
2446             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2447             TCGv_i32 r_mop = tcg_const_i32(MO_UB);
2448             TCGv_i64 s64, t64;
2449 
2450             save_state(dc);
2451             t64 = tcg_temp_new_i64();
2452             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2453 
2454             s64 = tcg_const_i64(0xff);
2455             gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2456             tcg_temp_free_i64(s64);
2457             tcg_temp_free_i32(r_mop);
2458             tcg_temp_free_i32(r_asi);
2459 
2460             tcg_gen_trunc_i64_tl(dst, t64);
2461             tcg_temp_free_i64(t64);
2462 
2463             /* End the TB.  */
2464             dc->npc = DYNAMIC_PC;
2465         }
2466         break;
2467     }
2468 }
2469 #endif
2470 
2471 #ifdef TARGET_SPARC64
2472 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2473                         int insn, int size, int rd)
2474 {
2475     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2476     TCGv_i32 d32;
2477     TCGv_i64 d64;
2478 
2479     switch (da.type) {
2480     case GET_ASI_EXCP:
2481         break;
2482 
2483     case GET_ASI_DIRECT:
2484         gen_address_mask(dc, addr);
2485         switch (size) {
2486         case 4:
2487             d32 = gen_dest_fpr_F(dc);
2488             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop);
2489             gen_store_fpr_F(dc, rd, d32);
2490             break;
2491         case 8:
2492             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2493                                 da.memop | MO_ALIGN_4);
2494             break;
2495         case 16:
2496             d64 = tcg_temp_new_i64();
2497             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2498             tcg_gen_addi_tl(addr, addr, 8);
2499             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2500                                 da.memop | MO_ALIGN_4);
2501             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2502             tcg_temp_free_i64(d64);
2503             break;
2504         default:
2505             g_assert_not_reached();
2506         }
2507         break;
2508 
2509     case GET_ASI_BLOCK:
2510         /* Valid for lddfa on aligned registers only.  */
2511         if (size == 8 && (rd & 7) == 0) {
2512             TCGMemOp memop;
2513             TCGv eight;
2514             int i;
2515 
2516             gen_address_mask(dc, addr);
2517 
2518             /* The first operation checks required alignment.  */
2519             memop = da.memop | MO_ALIGN_64;
2520             eight = tcg_const_tl(8);
2521             for (i = 0; ; ++i) {
2522                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2523                                     da.mem_idx, memop);
2524                 if (i == 7) {
2525                     break;
2526                 }
2527                 tcg_gen_add_tl(addr, addr, eight);
2528                 memop = da.memop;
2529             }
2530             tcg_temp_free(eight);
2531         } else {
2532             gen_exception(dc, TT_ILL_INSN);
2533         }
2534         break;
2535 
2536     case GET_ASI_SHORT:
2537         /* Valid for lddfa only.  */
2538         if (size == 8) {
2539             gen_address_mask(dc, addr);
2540             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2541         } else {
2542             gen_exception(dc, TT_ILL_INSN);
2543         }
2544         break;
2545 
2546     default:
2547         {
2548             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2549             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2550 
2551             save_state(dc);
2552             /* According to the table in the UA2011 manual, the only
2553                other asis that are valid for ldfa/lddfa/ldqfa are
2554                the NO_FAULT asis.  We still need a helper for these,
2555                but we can just use the integer asi helper for them.  */
2556             switch (size) {
2557             case 4:
2558                 d64 = tcg_temp_new_i64();
2559                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2560                 d32 = gen_dest_fpr_F(dc);
2561                 tcg_gen_extrl_i64_i32(d32, d64);
2562                 tcg_temp_free_i64(d64);
2563                 gen_store_fpr_F(dc, rd, d32);
2564                 break;
2565             case 8:
2566                 gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2567                 break;
2568             case 16:
2569                 d64 = tcg_temp_new_i64();
2570                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2571                 tcg_gen_addi_tl(addr, addr, 8);
2572                 gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2573                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2574                 tcg_temp_free_i64(d64);
2575                 break;
2576             default:
2577                 g_assert_not_reached();
2578             }
2579             tcg_temp_free_i32(r_mop);
2580             tcg_temp_free_i32(r_asi);
2581         }
2582         break;
2583     }
2584 }
2585 
2586 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2587                         int insn, int size, int rd)
2588 {
2589     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2590     TCGv_i32 d32;
2591 
2592     switch (da.type) {
2593     case GET_ASI_EXCP:
2594         break;
2595 
2596     case GET_ASI_DIRECT:
2597         gen_address_mask(dc, addr);
2598         switch (size) {
2599         case 4:
2600             d32 = gen_load_fpr_F(dc, rd);
2601             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop);
2602             break;
2603         case 8:
2604             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2605                                 da.memop | MO_ALIGN_4);
2606             break;
2607         case 16:
2608             /* Only 4-byte alignment required.  However, it is legal for the
2609                cpu to signal the alignment fault, and the OS trap handler is
2610                required to fix it up.  Requiring 16-byte alignment here avoids
2611                having to probe the second page before performing the first
2612                write.  */
2613             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2614                                 da.memop | MO_ALIGN_16);
2615             tcg_gen_addi_tl(addr, addr, 8);
2616             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2617             break;
2618         default:
2619             g_assert_not_reached();
2620         }
2621         break;
2622 
2623     case GET_ASI_BLOCK:
2624         /* Valid for stdfa on aligned registers only.  */
2625         if (size == 8 && (rd & 7) == 0) {
2626             TCGMemOp memop;
2627             TCGv eight;
2628             int i;
2629 
2630             gen_address_mask(dc, addr);
2631 
2632             /* The first operation checks required alignment.  */
2633             memop = da.memop | MO_ALIGN_64;
2634             eight = tcg_const_tl(8);
2635             for (i = 0; ; ++i) {
2636                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2637                                     da.mem_idx, memop);
2638                 if (i == 7) {
2639                     break;
2640                 }
2641                 tcg_gen_add_tl(addr, addr, eight);
2642                 memop = da.memop;
2643             }
2644             tcg_temp_free(eight);
2645         } else {
2646             gen_exception(dc, TT_ILL_INSN);
2647         }
2648         break;
2649 
2650     case GET_ASI_SHORT:
2651         /* Valid for stdfa only.  */
2652         if (size == 8) {
2653             gen_address_mask(dc, addr);
2654             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2655         } else {
2656             gen_exception(dc, TT_ILL_INSN);
2657         }
2658         break;
2659 
2660     default:
2661         /* According to the table in the UA2011 manual, the only
2662            other asis that are valid for ldfa/lddfa/ldqfa are
2663            the PST* asis, which aren't currently handled.  */
2664         gen_exception(dc, TT_ILL_INSN);
2665         break;
2666     }
2667 }
2668 
2669 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2670 {
2671     DisasASI da = get_asi(dc, insn, MO_TEQ);
2672     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2673     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2674 
2675     switch (da.type) {
2676     case GET_ASI_EXCP:
2677         return;
2678 
2679     case GET_ASI_DTWINX:
2680         gen_address_mask(dc, addr);
2681         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2682         tcg_gen_addi_tl(addr, addr, 8);
2683         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2684         break;
2685 
2686     case GET_ASI_DIRECT:
2687         {
2688             TCGv_i64 tmp = tcg_temp_new_i64();
2689 
2690             gen_address_mask(dc, addr);
2691             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop);
2692 
2693             /* Note that LE ldda acts as if each 32-bit register
2694                result is byte swapped.  Having just performed one
2695                64-bit bswap, we need now to swap the writebacks.  */
2696             if ((da.memop & MO_BSWAP) == MO_TE) {
2697                 tcg_gen_extr32_i64(lo, hi, tmp);
2698             } else {
2699                 tcg_gen_extr32_i64(hi, lo, tmp);
2700             }
2701             tcg_temp_free_i64(tmp);
2702         }
2703         break;
2704 
2705     default:
2706         /* ??? In theory we've handled all of the ASIs that are valid
2707            for ldda, and this should raise DAE_invalid_asi.  However,
2708            real hardware allows others.  This can be seen with e.g.
2709            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2710         {
2711             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2712             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2713             TCGv_i64 tmp = tcg_temp_new_i64();
2714 
2715             save_state(dc);
2716             gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2717             tcg_temp_free_i32(r_asi);
2718             tcg_temp_free_i32(r_mop);
2719 
2720             /* See above.  */
2721             if ((da.memop & MO_BSWAP) == MO_TE) {
2722                 tcg_gen_extr32_i64(lo, hi, tmp);
2723             } else {
2724                 tcg_gen_extr32_i64(hi, lo, tmp);
2725             }
2726             tcg_temp_free_i64(tmp);
2727         }
2728         break;
2729     }
2730 
2731     gen_store_gpr(dc, rd, hi);
2732     gen_store_gpr(dc, rd + 1, lo);
2733 }
2734 
2735 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2736                          int insn, int rd)
2737 {
2738     DisasASI da = get_asi(dc, insn, MO_TEQ);
2739     TCGv lo = gen_load_gpr(dc, rd + 1);
2740 
2741     switch (da.type) {
2742     case GET_ASI_EXCP:
2743         break;
2744 
2745     case GET_ASI_DTWINX:
2746         gen_address_mask(dc, addr);
2747         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2748         tcg_gen_addi_tl(addr, addr, 8);
2749         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2750         break;
2751 
2752     case GET_ASI_DIRECT:
2753         {
2754             TCGv_i64 t64 = tcg_temp_new_i64();
2755 
2756             /* Note that LE stda acts as if each 32-bit register result is
2757                byte swapped.  We will perform one 64-bit LE store, so now
2758                we must swap the order of the construction.  */
2759             if ((da.memop & MO_BSWAP) == MO_TE) {
2760                 tcg_gen_concat32_i64(t64, lo, hi);
2761             } else {
2762                 tcg_gen_concat32_i64(t64, hi, lo);
2763             }
2764             gen_address_mask(dc, addr);
2765             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2766             tcg_temp_free_i64(t64);
2767         }
2768         break;
2769 
2770     default:
2771         /* ??? In theory we've handled all of the ASIs that are valid
2772            for stda, and this should raise DAE_invalid_asi.  */
2773         {
2774             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2775             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2776             TCGv_i64 t64 = tcg_temp_new_i64();
2777 
2778             /* See above.  */
2779             if ((da.memop & MO_BSWAP) == MO_TE) {
2780                 tcg_gen_concat32_i64(t64, lo, hi);
2781             } else {
2782                 tcg_gen_concat32_i64(t64, hi, lo);
2783             }
2784 
2785             save_state(dc);
2786             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2787             tcg_temp_free_i32(r_mop);
2788             tcg_temp_free_i32(r_asi);
2789             tcg_temp_free_i64(t64);
2790         }
2791         break;
2792     }
2793 }
2794 
2795 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2796                          int insn, int rd)
2797 {
2798     DisasASI da = get_asi(dc, insn, MO_TEQ);
2799     TCGv oldv;
2800 
2801     switch (da.type) {
2802     case GET_ASI_EXCP:
2803         return;
2804     case GET_ASI_DIRECT:
2805         oldv = tcg_temp_new();
2806         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2807                                   da.mem_idx, da.memop);
2808         gen_store_gpr(dc, rd, oldv);
2809         tcg_temp_free(oldv);
2810         break;
2811     default:
2812         /* ??? Should be DAE_invalid_asi.  */
2813         gen_exception(dc, TT_DATA_ACCESS);
2814         break;
2815     }
2816 }
2817 
2818 #elif !defined(CONFIG_USER_ONLY)
2819 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2820 {
2821     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2822        whereby "rd + 1" elicits "error: array subscript is above array".
2823        Since we have already asserted that rd is even, the semantics
2824        are unchanged.  */
2825     TCGv lo = gen_dest_gpr(dc, rd | 1);
2826     TCGv hi = gen_dest_gpr(dc, rd);
2827     TCGv_i64 t64 = tcg_temp_new_i64();
2828     DisasASI da = get_asi(dc, insn, MO_TEQ);
2829 
2830     switch (da.type) {
2831     case GET_ASI_EXCP:
2832         tcg_temp_free_i64(t64);
2833         return;
2834     case GET_ASI_DIRECT:
2835         gen_address_mask(dc, addr);
2836         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop);
2837         break;
2838     default:
2839         {
2840             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2841             TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2842 
2843             save_state(dc);
2844             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2845             tcg_temp_free_i32(r_mop);
2846             tcg_temp_free_i32(r_asi);
2847         }
2848         break;
2849     }
2850 
2851     tcg_gen_extr_i64_i32(lo, hi, t64);
2852     tcg_temp_free_i64(t64);
2853     gen_store_gpr(dc, rd | 1, lo);
2854     gen_store_gpr(dc, rd, hi);
2855 }
2856 
2857 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2858                          int insn, int rd)
2859 {
2860     DisasASI da = get_asi(dc, insn, MO_TEQ);
2861     TCGv lo = gen_load_gpr(dc, rd + 1);
2862     TCGv_i64 t64 = tcg_temp_new_i64();
2863 
2864     tcg_gen_concat_tl_i64(t64, lo, hi);
2865 
2866     switch (da.type) {
2867     case GET_ASI_EXCP:
2868         break;
2869     case GET_ASI_DIRECT:
2870         gen_address_mask(dc, addr);
2871         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2872         break;
2873     case GET_ASI_BFILL:
2874         /* Store 32 bytes of T64 to ADDR.  */
2875         /* ??? The original qemu code suggests 8-byte alignment, dropping
2876            the low bits, but the only place I can see this used is in the
2877            Linux kernel with 32 byte alignment, which would make more sense
2878            as a cacheline-style operation.  */
2879         {
2880             TCGv d_addr = tcg_temp_new();
2881             TCGv eight = tcg_const_tl(8);
2882             int i;
2883 
2884             tcg_gen_andi_tl(d_addr, addr, -8);
2885             for (i = 0; i < 32; i += 8) {
2886                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2887                 tcg_gen_add_tl(d_addr, d_addr, eight);
2888             }
2889 
2890             tcg_temp_free(d_addr);
2891             tcg_temp_free(eight);
2892         }
2893         break;
2894     default:
2895         {
2896             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2897             TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2898 
2899             save_state(dc);
2900             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2901             tcg_temp_free_i32(r_mop);
2902             tcg_temp_free_i32(r_asi);
2903         }
2904         break;
2905     }
2906 
2907     tcg_temp_free_i64(t64);
2908 }
2909 #endif
2910 
2911 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2912 {
2913     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2914     return gen_load_gpr(dc, rs1);
2915 }
2916 
2917 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2918 {
2919     if (IS_IMM) { /* immediate */
2920         target_long simm = GET_FIELDs(insn, 19, 31);
2921         TCGv t = get_temp_tl(dc);
2922         tcg_gen_movi_tl(t, simm);
2923         return t;
2924     } else {      /* register */
2925         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2926         return gen_load_gpr(dc, rs2);
2927     }
2928 }
2929 
2930 #ifdef TARGET_SPARC64
2931 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2932 {
2933     TCGv_i32 c32, zero, dst, s1, s2;
2934 
2935     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2936        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2937        the later.  */
2938     c32 = tcg_temp_new_i32();
2939     if (cmp->is_bool) {
2940         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2941     } else {
2942         TCGv_i64 c64 = tcg_temp_new_i64();
2943         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2944         tcg_gen_extrl_i64_i32(c32, c64);
2945         tcg_temp_free_i64(c64);
2946     }
2947 
2948     s1 = gen_load_fpr_F(dc, rs);
2949     s2 = gen_load_fpr_F(dc, rd);
2950     dst = gen_dest_fpr_F(dc);
2951     zero = tcg_const_i32(0);
2952 
2953     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2954 
2955     tcg_temp_free_i32(c32);
2956     tcg_temp_free_i32(zero);
2957     gen_store_fpr_F(dc, rd, dst);
2958 }
2959 
2960 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2961 {
2962     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2963     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2964                         gen_load_fpr_D(dc, rs),
2965                         gen_load_fpr_D(dc, rd));
2966     gen_store_fpr_D(dc, rd, dst);
2967 }
2968 
2969 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2970 {
2971     int qd = QFPREG(rd);
2972     int qs = QFPREG(rs);
2973 
2974     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2975                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2976     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2977                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2978 
2979     gen_update_fprs_dirty(dc, qd);
2980 }
2981 
2982 #ifndef CONFIG_USER_ONLY
2983 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2984 {
2985     TCGv_i32 r_tl = tcg_temp_new_i32();
2986 
2987     /* load env->tl into r_tl */
2988     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2989 
2990     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2991     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2992 
2993     /* calculate offset to current trap state from env->ts, reuse r_tl */
2994     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2995     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2996 
2997     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2998     {
2999         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
3000         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
3001         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
3002         tcg_temp_free_ptr(r_tl_tmp);
3003     }
3004 
3005     tcg_temp_free_i32(r_tl);
3006 }
3007 #endif
3008 
3009 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
3010                      int width, bool cc, bool left)
3011 {
3012     TCGv lo1, lo2, t1, t2;
3013     uint64_t amask, tabl, tabr;
3014     int shift, imask, omask;
3015 
3016     if (cc) {
3017         tcg_gen_mov_tl(cpu_cc_src, s1);
3018         tcg_gen_mov_tl(cpu_cc_src2, s2);
3019         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
3020         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3021         dc->cc_op = CC_OP_SUB;
3022     }
3023 
3024     /* Theory of operation: there are two tables, left and right (not to
3025        be confused with the left and right versions of the opcode).  These
3026        are indexed by the low 3 bits of the inputs.  To make things "easy",
3027        these tables are loaded into two constants, TABL and TABR below.
3028        The operation index = (input & imask) << shift calculates the index
3029        into the constant, while val = (table >> index) & omask calculates
3030        the value we're looking for.  */
3031     switch (width) {
3032     case 8:
3033         imask = 0x7;
3034         shift = 3;
3035         omask = 0xff;
3036         if (left) {
3037             tabl = 0x80c0e0f0f8fcfeffULL;
3038             tabr = 0xff7f3f1f0f070301ULL;
3039         } else {
3040             tabl = 0x0103070f1f3f7fffULL;
3041             tabr = 0xfffefcf8f0e0c080ULL;
3042         }
3043         break;
3044     case 16:
3045         imask = 0x6;
3046         shift = 1;
3047         omask = 0xf;
3048         if (left) {
3049             tabl = 0x8cef;
3050             tabr = 0xf731;
3051         } else {
3052             tabl = 0x137f;
3053             tabr = 0xfec8;
3054         }
3055         break;
3056     case 32:
3057         imask = 0x4;
3058         shift = 0;
3059         omask = 0x3;
3060         if (left) {
3061             tabl = (2 << 2) | 3;
3062             tabr = (3 << 2) | 1;
3063         } else {
3064             tabl = (1 << 2) | 3;
3065             tabr = (3 << 2) | 2;
3066         }
3067         break;
3068     default:
3069         abort();
3070     }
3071 
3072     lo1 = tcg_temp_new();
3073     lo2 = tcg_temp_new();
3074     tcg_gen_andi_tl(lo1, s1, imask);
3075     tcg_gen_andi_tl(lo2, s2, imask);
3076     tcg_gen_shli_tl(lo1, lo1, shift);
3077     tcg_gen_shli_tl(lo2, lo2, shift);
3078 
3079     t1 = tcg_const_tl(tabl);
3080     t2 = tcg_const_tl(tabr);
3081     tcg_gen_shr_tl(lo1, t1, lo1);
3082     tcg_gen_shr_tl(lo2, t2, lo2);
3083     tcg_gen_andi_tl(dst, lo1, omask);
3084     tcg_gen_andi_tl(lo2, lo2, omask);
3085 
3086     amask = -8;
3087     if (AM_CHECK(dc)) {
3088         amask &= 0xffffffffULL;
3089     }
3090     tcg_gen_andi_tl(s1, s1, amask);
3091     tcg_gen_andi_tl(s2, s2, amask);
3092 
3093     /* We want to compute
3094         dst = (s1 == s2 ? lo1 : lo1 & lo2).
3095        We've already done dst = lo1, so this reduces to
3096         dst &= (s1 == s2 ? -1 : lo2)
3097        Which we perform by
3098         lo2 |= -(s1 == s2)
3099         dst &= lo2
3100     */
3101     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
3102     tcg_gen_neg_tl(t1, t1);
3103     tcg_gen_or_tl(lo2, lo2, t1);
3104     tcg_gen_and_tl(dst, dst, lo2);
3105 
3106     tcg_temp_free(lo1);
3107     tcg_temp_free(lo2);
3108     tcg_temp_free(t1);
3109     tcg_temp_free(t2);
3110 }
3111 
3112 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
3113 {
3114     TCGv tmp = tcg_temp_new();
3115 
3116     tcg_gen_add_tl(tmp, s1, s2);
3117     tcg_gen_andi_tl(dst, tmp, -8);
3118     if (left) {
3119         tcg_gen_neg_tl(tmp, tmp);
3120     }
3121     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
3122 
3123     tcg_temp_free(tmp);
3124 }
3125 
3126 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
3127 {
3128     TCGv t1, t2, shift;
3129 
3130     t1 = tcg_temp_new();
3131     t2 = tcg_temp_new();
3132     shift = tcg_temp_new();
3133 
3134     tcg_gen_andi_tl(shift, gsr, 7);
3135     tcg_gen_shli_tl(shift, shift, 3);
3136     tcg_gen_shl_tl(t1, s1, shift);
3137 
3138     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
3139        shift of (up to 63) followed by a constant shift of 1.  */
3140     tcg_gen_xori_tl(shift, shift, 63);
3141     tcg_gen_shr_tl(t2, s2, shift);
3142     tcg_gen_shri_tl(t2, t2, 1);
3143 
3144     tcg_gen_or_tl(dst, t1, t2);
3145 
3146     tcg_temp_free(t1);
3147     tcg_temp_free(t2);
3148     tcg_temp_free(shift);
3149 }
3150 #endif
3151 
3152 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3153     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3154         goto illegal_insn;
3155 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3156     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3157         goto nfpu_insn;
3158 
3159 /* before an instruction, dc->pc must be static */
3160 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
3161 {
3162     unsigned int opc, rs1, rs2, rd;
3163     TCGv cpu_src1, cpu_src2;
3164     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3165     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3166     target_long simm;
3167 
3168     opc = GET_FIELD(insn, 0, 1);
3169     rd = GET_FIELD(insn, 2, 6);
3170 
3171     switch (opc) {
3172     case 0:                     /* branches/sethi */
3173         {
3174             unsigned int xop = GET_FIELD(insn, 7, 9);
3175             int32_t target;
3176             switch (xop) {
3177 #ifdef TARGET_SPARC64
3178             case 0x1:           /* V9 BPcc */
3179                 {
3180                     int cc;
3181 
3182                     target = GET_FIELD_SP(insn, 0, 18);
3183                     target = sign_extend(target, 19);
3184                     target <<= 2;
3185                     cc = GET_FIELD_SP(insn, 20, 21);
3186                     if (cc == 0)
3187                         do_branch(dc, target, insn, 0);
3188                     else if (cc == 2)
3189                         do_branch(dc, target, insn, 1);
3190                     else
3191                         goto illegal_insn;
3192                     goto jmp_insn;
3193                 }
3194             case 0x3:           /* V9 BPr */
3195                 {
3196                     target = GET_FIELD_SP(insn, 0, 13) |
3197                         (GET_FIELD_SP(insn, 20, 21) << 14);
3198                     target = sign_extend(target, 16);
3199                     target <<= 2;
3200                     cpu_src1 = get_src1(dc, insn);
3201                     do_branch_reg(dc, target, insn, cpu_src1);
3202                     goto jmp_insn;
3203                 }
3204             case 0x5:           /* V9 FBPcc */
3205                 {
3206                     int cc = GET_FIELD_SP(insn, 20, 21);
3207                     if (gen_trap_ifnofpu(dc)) {
3208                         goto jmp_insn;
3209                     }
3210                     target = GET_FIELD_SP(insn, 0, 18);
3211                     target = sign_extend(target, 19);
3212                     target <<= 2;
3213                     do_fbranch(dc, target, insn, cc);
3214                     goto jmp_insn;
3215                 }
3216 #else
3217             case 0x7:           /* CBN+x */
3218                 {
3219                     goto ncp_insn;
3220                 }
3221 #endif
3222             case 0x2:           /* BN+x */
3223                 {
3224                     target = GET_FIELD(insn, 10, 31);
3225                     target = sign_extend(target, 22);
3226                     target <<= 2;
3227                     do_branch(dc, target, insn, 0);
3228                     goto jmp_insn;
3229                 }
3230             case 0x6:           /* FBN+x */
3231                 {
3232                     if (gen_trap_ifnofpu(dc)) {
3233                         goto jmp_insn;
3234                     }
3235                     target = GET_FIELD(insn, 10, 31);
3236                     target = sign_extend(target, 22);
3237                     target <<= 2;
3238                     do_fbranch(dc, target, insn, 0);
3239                     goto jmp_insn;
3240                 }
3241             case 0x4:           /* SETHI */
3242                 /* Special-case %g0 because that's the canonical nop.  */
3243                 if (rd) {
3244                     uint32_t value = GET_FIELD(insn, 10, 31);
3245                     TCGv t = gen_dest_gpr(dc, rd);
3246                     tcg_gen_movi_tl(t, value << 10);
3247                     gen_store_gpr(dc, rd, t);
3248                 }
3249                 break;
3250             case 0x0:           /* UNIMPL */
3251             default:
3252                 goto illegal_insn;
3253             }
3254             break;
3255         }
3256         break;
3257     case 1:                     /*CALL*/
3258         {
3259             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3260             TCGv o7 = gen_dest_gpr(dc, 15);
3261 
3262             tcg_gen_movi_tl(o7, dc->pc);
3263             gen_store_gpr(dc, 15, o7);
3264             target += dc->pc;
3265             gen_mov_pc_npc(dc);
3266 #ifdef TARGET_SPARC64
3267             if (unlikely(AM_CHECK(dc))) {
3268                 target &= 0xffffffffULL;
3269             }
3270 #endif
3271             dc->npc = target;
3272         }
3273         goto jmp_insn;
3274     case 2:                     /* FPU & Logical Operations */
3275         {
3276             unsigned int xop = GET_FIELD(insn, 7, 12);
3277             TCGv cpu_dst = get_temp_tl(dc);
3278             TCGv cpu_tmp0;
3279 
3280             if (xop == 0x3a) {  /* generate trap */
3281                 int cond = GET_FIELD(insn, 3, 6);
3282                 TCGv_i32 trap;
3283                 TCGLabel *l1 = NULL;
3284                 int mask;
3285 
3286                 if (cond == 0) {
3287                     /* Trap never.  */
3288                     break;
3289                 }
3290 
3291                 save_state(dc);
3292 
3293                 if (cond != 8) {
3294                     /* Conditional trap.  */
3295                     DisasCompare cmp;
3296 #ifdef TARGET_SPARC64
3297                     /* V9 icc/xcc */
3298                     int cc = GET_FIELD_SP(insn, 11, 12);
3299                     if (cc == 0) {
3300                         gen_compare(&cmp, 0, cond, dc);
3301                     } else if (cc == 2) {
3302                         gen_compare(&cmp, 1, cond, dc);
3303                     } else {
3304                         goto illegal_insn;
3305                     }
3306 #else
3307                     gen_compare(&cmp, 0, cond, dc);
3308 #endif
3309                     l1 = gen_new_label();
3310                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3311                                       cmp.c1, cmp.c2, l1);
3312                     free_compare(&cmp);
3313                 }
3314 
3315                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3316                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3317 
3318                 /* Don't use the normal temporaries, as they may well have
3319                    gone out of scope with the branch above.  While we're
3320                    doing that we might as well pre-truncate to 32-bit.  */
3321                 trap = tcg_temp_new_i32();
3322 
3323                 rs1 = GET_FIELD_SP(insn, 14, 18);
3324                 if (IS_IMM) {
3325                     rs2 = GET_FIELD_SP(insn, 0, 7);
3326                     if (rs1 == 0) {
3327                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3328                         /* Signal that the trap value is fully constant.  */
3329                         mask = 0;
3330                     } else {
3331                         TCGv t1 = gen_load_gpr(dc, rs1);
3332                         tcg_gen_trunc_tl_i32(trap, t1);
3333                         tcg_gen_addi_i32(trap, trap, rs2);
3334                     }
3335                 } else {
3336                     TCGv t1, t2;
3337                     rs2 = GET_FIELD_SP(insn, 0, 4);
3338                     t1 = gen_load_gpr(dc, rs1);
3339                     t2 = gen_load_gpr(dc, rs2);
3340                     tcg_gen_add_tl(t1, t1, t2);
3341                     tcg_gen_trunc_tl_i32(trap, t1);
3342                 }
3343                 if (mask != 0) {
3344                     tcg_gen_andi_i32(trap, trap, mask);
3345                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3346                 }
3347 
3348                 gen_helper_raise_exception(cpu_env, trap);
3349                 tcg_temp_free_i32(trap);
3350 
3351                 if (cond == 8) {
3352                     /* An unconditional trap ends the TB.  */
3353                     dc->base.is_jmp = DISAS_NORETURN;
3354                     goto jmp_insn;
3355                 } else {
3356                     /* A conditional trap falls through to the next insn.  */
3357                     gen_set_label(l1);
3358                     break;
3359                 }
3360             } else if (xop == 0x28) {
3361                 rs1 = GET_FIELD(insn, 13, 17);
3362                 switch(rs1) {
3363                 case 0: /* rdy */
3364 #ifndef TARGET_SPARC64
3365                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3366                                        manual, rdy on the microSPARC
3367                                        II */
3368                 case 0x0f:          /* stbar in the SPARCv8 manual,
3369                                        rdy on the microSPARC II */
3370                 case 0x10 ... 0x1f: /* implementation-dependent in the
3371                                        SPARCv8 manual, rdy on the
3372                                        microSPARC II */
3373                     /* Read Asr17 */
3374                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3375                         TCGv t = gen_dest_gpr(dc, rd);
3376                         /* Read Asr17 for a Leon3 monoprocessor */
3377                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3378                         gen_store_gpr(dc, rd, t);
3379                         break;
3380                     }
3381 #endif
3382                     gen_store_gpr(dc, rd, cpu_y);
3383                     break;
3384 #ifdef TARGET_SPARC64
3385                 case 0x2: /* V9 rdccr */
3386                     update_psr(dc);
3387                     gen_helper_rdccr(cpu_dst, cpu_env);
3388                     gen_store_gpr(dc, rd, cpu_dst);
3389                     break;
3390                 case 0x3: /* V9 rdasi */
3391                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3392                     gen_store_gpr(dc, rd, cpu_dst);
3393                     break;
3394                 case 0x4: /* V9 rdtick */
3395                     {
3396                         TCGv_ptr r_tickptr;
3397                         TCGv_i32 r_const;
3398 
3399                         r_tickptr = tcg_temp_new_ptr();
3400                         r_const = tcg_const_i32(dc->mem_idx);
3401                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3402                                        offsetof(CPUSPARCState, tick));
3403                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3404                                                   r_const);
3405                         tcg_temp_free_ptr(r_tickptr);
3406                         tcg_temp_free_i32(r_const);
3407                         gen_store_gpr(dc, rd, cpu_dst);
3408                     }
3409                     break;
3410                 case 0x5: /* V9 rdpc */
3411                     {
3412                         TCGv t = gen_dest_gpr(dc, rd);
3413                         if (unlikely(AM_CHECK(dc))) {
3414                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3415                         } else {
3416                             tcg_gen_movi_tl(t, dc->pc);
3417                         }
3418                         gen_store_gpr(dc, rd, t);
3419                     }
3420                     break;
3421                 case 0x6: /* V9 rdfprs */
3422                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3423                     gen_store_gpr(dc, rd, cpu_dst);
3424                     break;
3425                 case 0xf: /* V9 membar */
3426                     break; /* no effect */
3427                 case 0x13: /* Graphics Status */
3428                     if (gen_trap_ifnofpu(dc)) {
3429                         goto jmp_insn;
3430                     }
3431                     gen_store_gpr(dc, rd, cpu_gsr);
3432                     break;
3433                 case 0x16: /* Softint */
3434                     tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3435                                      offsetof(CPUSPARCState, softint));
3436                     gen_store_gpr(dc, rd, cpu_dst);
3437                     break;
3438                 case 0x17: /* Tick compare */
3439                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3440                     break;
3441                 case 0x18: /* System tick */
3442                     {
3443                         TCGv_ptr r_tickptr;
3444                         TCGv_i32 r_const;
3445 
3446                         r_tickptr = tcg_temp_new_ptr();
3447                         r_const = tcg_const_i32(dc->mem_idx);
3448                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3449                                        offsetof(CPUSPARCState, stick));
3450                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3451                                                   r_const);
3452                         tcg_temp_free_ptr(r_tickptr);
3453                         tcg_temp_free_i32(r_const);
3454                         gen_store_gpr(dc, rd, cpu_dst);
3455                     }
3456                     break;
3457                 case 0x19: /* System tick compare */
3458                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3459                     break;
3460                 case 0x1a: /* UltraSPARC-T1 Strand status */
3461                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3462                      * this ASR as impl. dep
3463                      */
3464                     CHECK_IU_FEATURE(dc, HYPV);
3465                     {
3466                         TCGv t = gen_dest_gpr(dc, rd);
3467                         tcg_gen_movi_tl(t, 1UL);
3468                         gen_store_gpr(dc, rd, t);
3469                     }
3470                     break;
3471                 case 0x10: /* Performance Control */
3472                 case 0x11: /* Performance Instrumentation Counter */
3473                 case 0x12: /* Dispatch Control */
3474                 case 0x14: /* Softint set, WO */
3475                 case 0x15: /* Softint clear, WO */
3476 #endif
3477                 default:
3478                     goto illegal_insn;
3479                 }
3480 #if !defined(CONFIG_USER_ONLY)
3481             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3482 #ifndef TARGET_SPARC64
3483                 if (!supervisor(dc)) {
3484                     goto priv_insn;
3485                 }
3486                 update_psr(dc);
3487                 gen_helper_rdpsr(cpu_dst, cpu_env);
3488 #else
3489                 CHECK_IU_FEATURE(dc, HYPV);
3490                 if (!hypervisor(dc))
3491                     goto priv_insn;
3492                 rs1 = GET_FIELD(insn, 13, 17);
3493                 switch (rs1) {
3494                 case 0: // hpstate
3495                     tcg_gen_ld_i64(cpu_dst, cpu_env,
3496                                    offsetof(CPUSPARCState, hpstate));
3497                     break;
3498                 case 1: // htstate
3499                     // gen_op_rdhtstate();
3500                     break;
3501                 case 3: // hintp
3502                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3503                     break;
3504                 case 5: // htba
3505                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3506                     break;
3507                 case 6: // hver
3508                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3509                     break;
3510                 case 31: // hstick_cmpr
3511                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3512                     break;
3513                 default:
3514                     goto illegal_insn;
3515                 }
3516 #endif
3517                 gen_store_gpr(dc, rd, cpu_dst);
3518                 break;
3519             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3520                 if (!supervisor(dc)) {
3521                     goto priv_insn;
3522                 }
3523                 cpu_tmp0 = get_temp_tl(dc);
3524 #ifdef TARGET_SPARC64
3525                 rs1 = GET_FIELD(insn, 13, 17);
3526                 switch (rs1) {
3527                 case 0: // tpc
3528                     {
3529                         TCGv_ptr r_tsptr;
3530 
3531                         r_tsptr = tcg_temp_new_ptr();
3532                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3533                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3534                                       offsetof(trap_state, tpc));
3535                         tcg_temp_free_ptr(r_tsptr);
3536                     }
3537                     break;
3538                 case 1: // tnpc
3539                     {
3540                         TCGv_ptr r_tsptr;
3541 
3542                         r_tsptr = tcg_temp_new_ptr();
3543                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3544                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3545                                       offsetof(trap_state, tnpc));
3546                         tcg_temp_free_ptr(r_tsptr);
3547                     }
3548                     break;
3549                 case 2: // tstate
3550                     {
3551                         TCGv_ptr r_tsptr;
3552 
3553                         r_tsptr = tcg_temp_new_ptr();
3554                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3555                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3556                                       offsetof(trap_state, tstate));
3557                         tcg_temp_free_ptr(r_tsptr);
3558                     }
3559                     break;
3560                 case 3: // tt
3561                     {
3562                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3563 
3564                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3565                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3566                                          offsetof(trap_state, tt));
3567                         tcg_temp_free_ptr(r_tsptr);
3568                     }
3569                     break;
3570                 case 4: // tick
3571                     {
3572                         TCGv_ptr r_tickptr;
3573                         TCGv_i32 r_const;
3574 
3575                         r_tickptr = tcg_temp_new_ptr();
3576                         r_const = tcg_const_i32(dc->mem_idx);
3577                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3578                                        offsetof(CPUSPARCState, tick));
3579                         gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3580                                                   r_tickptr, r_const);
3581                         tcg_temp_free_ptr(r_tickptr);
3582                         tcg_temp_free_i32(r_const);
3583                     }
3584                     break;
3585                 case 5: // tba
3586                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3587                     break;
3588                 case 6: // pstate
3589                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3590                                      offsetof(CPUSPARCState, pstate));
3591                     break;
3592                 case 7: // tl
3593                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3594                                      offsetof(CPUSPARCState, tl));
3595                     break;
3596                 case 8: // pil
3597                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3598                                      offsetof(CPUSPARCState, psrpil));
3599                     break;
3600                 case 9: // cwp
3601                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
3602                     break;
3603                 case 10: // cansave
3604                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3605                                      offsetof(CPUSPARCState, cansave));
3606                     break;
3607                 case 11: // canrestore
3608                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3609                                      offsetof(CPUSPARCState, canrestore));
3610                     break;
3611                 case 12: // cleanwin
3612                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3613                                      offsetof(CPUSPARCState, cleanwin));
3614                     break;
3615                 case 13: // otherwin
3616                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3617                                      offsetof(CPUSPARCState, otherwin));
3618                     break;
3619                 case 14: // wstate
3620                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3621                                      offsetof(CPUSPARCState, wstate));
3622                     break;
3623                 case 16: // UA2005 gl
3624                     CHECK_IU_FEATURE(dc, GL);
3625                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3626                                      offsetof(CPUSPARCState, gl));
3627                     break;
3628                 case 26: // UA2005 strand status
3629                     CHECK_IU_FEATURE(dc, HYPV);
3630                     if (!hypervisor(dc))
3631                         goto priv_insn;
3632                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3633                     break;
3634                 case 31: // ver
3635                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3636                     break;
3637                 case 15: // fq
3638                 default:
3639                     goto illegal_insn;
3640                 }
3641 #else
3642                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3643 #endif
3644                 gen_store_gpr(dc, rd, cpu_tmp0);
3645                 break;
3646             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3647 #ifdef TARGET_SPARC64
3648                 gen_helper_flushw(cpu_env);
3649 #else
3650                 if (!supervisor(dc))
3651                     goto priv_insn;
3652                 gen_store_gpr(dc, rd, cpu_tbr);
3653 #endif
3654                 break;
3655 #endif
3656             } else if (xop == 0x34) {   /* FPU Operations */
3657                 if (gen_trap_ifnofpu(dc)) {
3658                     goto jmp_insn;
3659                 }
3660                 gen_op_clear_ieee_excp_and_FTT();
3661                 rs1 = GET_FIELD(insn, 13, 17);
3662                 rs2 = GET_FIELD(insn, 27, 31);
3663                 xop = GET_FIELD(insn, 18, 26);
3664 
3665                 switch (xop) {
3666                 case 0x1: /* fmovs */
3667                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3668                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3669                     break;
3670                 case 0x5: /* fnegs */
3671                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3672                     break;
3673                 case 0x9: /* fabss */
3674                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3675                     break;
3676                 case 0x29: /* fsqrts */
3677                     CHECK_FPU_FEATURE(dc, FSQRT);
3678                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3679                     break;
3680                 case 0x2a: /* fsqrtd */
3681                     CHECK_FPU_FEATURE(dc, FSQRT);
3682                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3683                     break;
3684                 case 0x2b: /* fsqrtq */
3685                     CHECK_FPU_FEATURE(dc, FLOAT128);
3686                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3687                     break;
3688                 case 0x41: /* fadds */
3689                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3690                     break;
3691                 case 0x42: /* faddd */
3692                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3693                     break;
3694                 case 0x43: /* faddq */
3695                     CHECK_FPU_FEATURE(dc, FLOAT128);
3696                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3697                     break;
3698                 case 0x45: /* fsubs */
3699                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3700                     break;
3701                 case 0x46: /* fsubd */
3702                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3703                     break;
3704                 case 0x47: /* fsubq */
3705                     CHECK_FPU_FEATURE(dc, FLOAT128);
3706                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3707                     break;
3708                 case 0x49: /* fmuls */
3709                     CHECK_FPU_FEATURE(dc, FMUL);
3710                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3711                     break;
3712                 case 0x4a: /* fmuld */
3713                     CHECK_FPU_FEATURE(dc, FMUL);
3714                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3715                     break;
3716                 case 0x4b: /* fmulq */
3717                     CHECK_FPU_FEATURE(dc, FLOAT128);
3718                     CHECK_FPU_FEATURE(dc, FMUL);
3719                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3720                     break;
3721                 case 0x4d: /* fdivs */
3722                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3723                     break;
3724                 case 0x4e: /* fdivd */
3725                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3726                     break;
3727                 case 0x4f: /* fdivq */
3728                     CHECK_FPU_FEATURE(dc, FLOAT128);
3729                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3730                     break;
3731                 case 0x69: /* fsmuld */
3732                     CHECK_FPU_FEATURE(dc, FSMULD);
3733                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3734                     break;
3735                 case 0x6e: /* fdmulq */
3736                     CHECK_FPU_FEATURE(dc, FLOAT128);
3737                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3738                     break;
3739                 case 0xc4: /* fitos */
3740                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3741                     break;
3742                 case 0xc6: /* fdtos */
3743                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3744                     break;
3745                 case 0xc7: /* fqtos */
3746                     CHECK_FPU_FEATURE(dc, FLOAT128);
3747                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3748                     break;
3749                 case 0xc8: /* fitod */
3750                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3751                     break;
3752                 case 0xc9: /* fstod */
3753                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3754                     break;
3755                 case 0xcb: /* fqtod */
3756                     CHECK_FPU_FEATURE(dc, FLOAT128);
3757                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3758                     break;
3759                 case 0xcc: /* fitoq */
3760                     CHECK_FPU_FEATURE(dc, FLOAT128);
3761                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3762                     break;
3763                 case 0xcd: /* fstoq */
3764                     CHECK_FPU_FEATURE(dc, FLOAT128);
3765                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3766                     break;
3767                 case 0xce: /* fdtoq */
3768                     CHECK_FPU_FEATURE(dc, FLOAT128);
3769                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3770                     break;
3771                 case 0xd1: /* fstoi */
3772                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3773                     break;
3774                 case 0xd2: /* fdtoi */
3775                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3776                     break;
3777                 case 0xd3: /* fqtoi */
3778                     CHECK_FPU_FEATURE(dc, FLOAT128);
3779                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3780                     break;
3781 #ifdef TARGET_SPARC64
3782                 case 0x2: /* V9 fmovd */
3783                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3784                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3785                     break;
3786                 case 0x3: /* V9 fmovq */
3787                     CHECK_FPU_FEATURE(dc, FLOAT128);
3788                     gen_move_Q(dc, rd, rs2);
3789                     break;
3790                 case 0x6: /* V9 fnegd */
3791                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3792                     break;
3793                 case 0x7: /* V9 fnegq */
3794                     CHECK_FPU_FEATURE(dc, FLOAT128);
3795                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3796                     break;
3797                 case 0xa: /* V9 fabsd */
3798                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3799                     break;
3800                 case 0xb: /* V9 fabsq */
3801                     CHECK_FPU_FEATURE(dc, FLOAT128);
3802                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3803                     break;
3804                 case 0x81: /* V9 fstox */
3805                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3806                     break;
3807                 case 0x82: /* V9 fdtox */
3808                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3809                     break;
3810                 case 0x83: /* V9 fqtox */
3811                     CHECK_FPU_FEATURE(dc, FLOAT128);
3812                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3813                     break;
3814                 case 0x84: /* V9 fxtos */
3815                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3816                     break;
3817                 case 0x88: /* V9 fxtod */
3818                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3819                     break;
3820                 case 0x8c: /* V9 fxtoq */
3821                     CHECK_FPU_FEATURE(dc, FLOAT128);
3822                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3823                     break;
3824 #endif
3825                 default:
3826                     goto illegal_insn;
3827                 }
3828             } else if (xop == 0x35) {   /* FPU Operations */
3829 #ifdef TARGET_SPARC64
3830                 int cond;
3831 #endif
3832                 if (gen_trap_ifnofpu(dc)) {
3833                     goto jmp_insn;
3834                 }
3835                 gen_op_clear_ieee_excp_and_FTT();
3836                 rs1 = GET_FIELD(insn, 13, 17);
3837                 rs2 = GET_FIELD(insn, 27, 31);
3838                 xop = GET_FIELD(insn, 18, 26);
3839 
3840 #ifdef TARGET_SPARC64
3841 #define FMOVR(sz)                                                  \
3842                 do {                                               \
3843                     DisasCompare cmp;                              \
3844                     cond = GET_FIELD_SP(insn, 10, 12);             \
3845                     cpu_src1 = get_src1(dc, insn);                 \
3846                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3847                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3848                     free_compare(&cmp);                            \
3849                 } while (0)
3850 
3851                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3852                     FMOVR(s);
3853                     break;
3854                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3855                     FMOVR(d);
3856                     break;
3857                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3858                     CHECK_FPU_FEATURE(dc, FLOAT128);
3859                     FMOVR(q);
3860                     break;
3861                 }
3862 #undef FMOVR
3863 #endif
3864                 switch (xop) {
3865 #ifdef TARGET_SPARC64
3866 #define FMOVCC(fcc, sz)                                                 \
3867                     do {                                                \
3868                         DisasCompare cmp;                               \
3869                         cond = GET_FIELD_SP(insn, 14, 17);              \
3870                         gen_fcompare(&cmp, fcc, cond);                  \
3871                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3872                         free_compare(&cmp);                             \
3873                     } while (0)
3874 
3875                     case 0x001: /* V9 fmovscc %fcc0 */
3876                         FMOVCC(0, s);
3877                         break;
3878                     case 0x002: /* V9 fmovdcc %fcc0 */
3879                         FMOVCC(0, d);
3880                         break;
3881                     case 0x003: /* V9 fmovqcc %fcc0 */
3882                         CHECK_FPU_FEATURE(dc, FLOAT128);
3883                         FMOVCC(0, q);
3884                         break;
3885                     case 0x041: /* V9 fmovscc %fcc1 */
3886                         FMOVCC(1, s);
3887                         break;
3888                     case 0x042: /* V9 fmovdcc %fcc1 */
3889                         FMOVCC(1, d);
3890                         break;
3891                     case 0x043: /* V9 fmovqcc %fcc1 */
3892                         CHECK_FPU_FEATURE(dc, FLOAT128);
3893                         FMOVCC(1, q);
3894                         break;
3895                     case 0x081: /* V9 fmovscc %fcc2 */
3896                         FMOVCC(2, s);
3897                         break;
3898                     case 0x082: /* V9 fmovdcc %fcc2 */
3899                         FMOVCC(2, d);
3900                         break;
3901                     case 0x083: /* V9 fmovqcc %fcc2 */
3902                         CHECK_FPU_FEATURE(dc, FLOAT128);
3903                         FMOVCC(2, q);
3904                         break;
3905                     case 0x0c1: /* V9 fmovscc %fcc3 */
3906                         FMOVCC(3, s);
3907                         break;
3908                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3909                         FMOVCC(3, d);
3910                         break;
3911                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3912                         CHECK_FPU_FEATURE(dc, FLOAT128);
3913                         FMOVCC(3, q);
3914                         break;
3915 #undef FMOVCC
3916 #define FMOVCC(xcc, sz)                                                 \
3917                     do {                                                \
3918                         DisasCompare cmp;                               \
3919                         cond = GET_FIELD_SP(insn, 14, 17);              \
3920                         gen_compare(&cmp, xcc, cond, dc);               \
3921                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3922                         free_compare(&cmp);                             \
3923                     } while (0)
3924 
3925                     case 0x101: /* V9 fmovscc %icc */
3926                         FMOVCC(0, s);
3927                         break;
3928                     case 0x102: /* V9 fmovdcc %icc */
3929                         FMOVCC(0, d);
3930                         break;
3931                     case 0x103: /* V9 fmovqcc %icc */
3932                         CHECK_FPU_FEATURE(dc, FLOAT128);
3933                         FMOVCC(0, q);
3934                         break;
3935                     case 0x181: /* V9 fmovscc %xcc */
3936                         FMOVCC(1, s);
3937                         break;
3938                     case 0x182: /* V9 fmovdcc %xcc */
3939                         FMOVCC(1, d);
3940                         break;
3941                     case 0x183: /* V9 fmovqcc %xcc */
3942                         CHECK_FPU_FEATURE(dc, FLOAT128);
3943                         FMOVCC(1, q);
3944                         break;
3945 #undef FMOVCC
3946 #endif
3947                     case 0x51: /* fcmps, V9 %fcc */
3948                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3949                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3950                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3951                         break;
3952                     case 0x52: /* fcmpd, V9 %fcc */
3953                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3954                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3955                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3956                         break;
3957                     case 0x53: /* fcmpq, V9 %fcc */
3958                         CHECK_FPU_FEATURE(dc, FLOAT128);
3959                         gen_op_load_fpr_QT0(QFPREG(rs1));
3960                         gen_op_load_fpr_QT1(QFPREG(rs2));
3961                         gen_op_fcmpq(rd & 3);
3962                         break;
3963                     case 0x55: /* fcmpes, V9 %fcc */
3964                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3965                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3966                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3967                         break;
3968                     case 0x56: /* fcmped, V9 %fcc */
3969                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3970                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3971                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3972                         break;
3973                     case 0x57: /* fcmpeq, V9 %fcc */
3974                         CHECK_FPU_FEATURE(dc, FLOAT128);
3975                         gen_op_load_fpr_QT0(QFPREG(rs1));
3976                         gen_op_load_fpr_QT1(QFPREG(rs2));
3977                         gen_op_fcmpeq(rd & 3);
3978                         break;
3979                     default:
3980                         goto illegal_insn;
3981                 }
3982             } else if (xop == 0x2) {
3983                 TCGv dst = gen_dest_gpr(dc, rd);
3984                 rs1 = GET_FIELD(insn, 13, 17);
3985                 if (rs1 == 0) {
3986                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3987                     if (IS_IMM) {       /* immediate */
3988                         simm = GET_FIELDs(insn, 19, 31);
3989                         tcg_gen_movi_tl(dst, simm);
3990                         gen_store_gpr(dc, rd, dst);
3991                     } else {            /* register */
3992                         rs2 = GET_FIELD(insn, 27, 31);
3993                         if (rs2 == 0) {
3994                             tcg_gen_movi_tl(dst, 0);
3995                             gen_store_gpr(dc, rd, dst);
3996                         } else {
3997                             cpu_src2 = gen_load_gpr(dc, rs2);
3998                             gen_store_gpr(dc, rd, cpu_src2);
3999                         }
4000                     }
4001                 } else {
4002                     cpu_src1 = get_src1(dc, insn);
4003                     if (IS_IMM) {       /* immediate */
4004                         simm = GET_FIELDs(insn, 19, 31);
4005                         tcg_gen_ori_tl(dst, cpu_src1, simm);
4006                         gen_store_gpr(dc, rd, dst);
4007                     } else {            /* register */
4008                         rs2 = GET_FIELD(insn, 27, 31);
4009                         if (rs2 == 0) {
4010                             /* mov shortcut:  or x, %g0, y -> mov x, y */
4011                             gen_store_gpr(dc, rd, cpu_src1);
4012                         } else {
4013                             cpu_src2 = gen_load_gpr(dc, rs2);
4014                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
4015                             gen_store_gpr(dc, rd, dst);
4016                         }
4017                     }
4018                 }
4019 #ifdef TARGET_SPARC64
4020             } else if (xop == 0x25) { /* sll, V9 sllx */
4021                 cpu_src1 = get_src1(dc, insn);
4022                 if (IS_IMM) {   /* immediate */
4023                     simm = GET_FIELDs(insn, 20, 31);
4024                     if (insn & (1 << 12)) {
4025                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
4026                     } else {
4027                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
4028                     }
4029                 } else {                /* register */
4030                     rs2 = GET_FIELD(insn, 27, 31);
4031                     cpu_src2 = gen_load_gpr(dc, rs2);
4032                     cpu_tmp0 = get_temp_tl(dc);
4033                     if (insn & (1 << 12)) {
4034                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4035                     } else {
4036                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4037                     }
4038                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
4039                 }
4040                 gen_store_gpr(dc, rd, cpu_dst);
4041             } else if (xop == 0x26) { /* srl, V9 srlx */
4042                 cpu_src1 = get_src1(dc, insn);
4043                 if (IS_IMM) {   /* immediate */
4044                     simm = GET_FIELDs(insn, 20, 31);
4045                     if (insn & (1 << 12)) {
4046                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
4047                     } else {
4048                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4049                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
4050                     }
4051                 } else {                /* register */
4052                     rs2 = GET_FIELD(insn, 27, 31);
4053                     cpu_src2 = gen_load_gpr(dc, rs2);
4054                     cpu_tmp0 = get_temp_tl(dc);
4055                     if (insn & (1 << 12)) {
4056                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4057                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
4058                     } else {
4059                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4060                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4061                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
4062                     }
4063                 }
4064                 gen_store_gpr(dc, rd, cpu_dst);
4065             } else if (xop == 0x27) { /* sra, V9 srax */
4066                 cpu_src1 = get_src1(dc, insn);
4067                 if (IS_IMM) {   /* immediate */
4068                     simm = GET_FIELDs(insn, 20, 31);
4069                     if (insn & (1 << 12)) {
4070                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
4071                     } else {
4072                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4073                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
4074                     }
4075                 } else {                /* register */
4076                     rs2 = GET_FIELD(insn, 27, 31);
4077                     cpu_src2 = gen_load_gpr(dc, rs2);
4078                     cpu_tmp0 = get_temp_tl(dc);
4079                     if (insn & (1 << 12)) {
4080                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4081                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
4082                     } else {
4083                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4084                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4085                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
4086                     }
4087                 }
4088                 gen_store_gpr(dc, rd, cpu_dst);
4089 #endif
4090             } else if (xop < 0x36) {
4091                 if (xop < 0x20) {
4092                     cpu_src1 = get_src1(dc, insn);
4093                     cpu_src2 = get_src2(dc, insn);
4094                     switch (xop & ~0x10) {
4095                     case 0x0: /* add */
4096                         if (xop & 0x10) {
4097                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4098                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4099                             dc->cc_op = CC_OP_ADD;
4100                         } else {
4101                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4102                         }
4103                         break;
4104                     case 0x1: /* and */
4105                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
4106                         if (xop & 0x10) {
4107                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4108                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4109                             dc->cc_op = CC_OP_LOGIC;
4110                         }
4111                         break;
4112                     case 0x2: /* or */
4113                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
4114                         if (xop & 0x10) {
4115                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4116                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4117                             dc->cc_op = CC_OP_LOGIC;
4118                         }
4119                         break;
4120                     case 0x3: /* xor */
4121                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
4122                         if (xop & 0x10) {
4123                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4124                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4125                             dc->cc_op = CC_OP_LOGIC;
4126                         }
4127                         break;
4128                     case 0x4: /* sub */
4129                         if (xop & 0x10) {
4130                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4131                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
4132                             dc->cc_op = CC_OP_SUB;
4133                         } else {
4134                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
4135                         }
4136                         break;
4137                     case 0x5: /* andn */
4138                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
4139                         if (xop & 0x10) {
4140                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4141                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4142                             dc->cc_op = CC_OP_LOGIC;
4143                         }
4144                         break;
4145                     case 0x6: /* orn */
4146                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
4147                         if (xop & 0x10) {
4148                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4149                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4150                             dc->cc_op = CC_OP_LOGIC;
4151                         }
4152                         break;
4153                     case 0x7: /* xorn */
4154                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4155                         if (xop & 0x10) {
4156                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4157                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4158                             dc->cc_op = CC_OP_LOGIC;
4159                         }
4160                         break;
4161                     case 0x8: /* addx, V9 addc */
4162                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4163                                         (xop & 0x10));
4164                         break;
4165 #ifdef TARGET_SPARC64
4166                     case 0x9: /* V9 mulx */
4167                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4168                         break;
4169 #endif
4170                     case 0xa: /* umul */
4171                         CHECK_IU_FEATURE(dc, MUL);
4172                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4173                         if (xop & 0x10) {
4174                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4175                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4176                             dc->cc_op = CC_OP_LOGIC;
4177                         }
4178                         break;
4179                     case 0xb: /* smul */
4180                         CHECK_IU_FEATURE(dc, MUL);
4181                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4182                         if (xop & 0x10) {
4183                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4184                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4185                             dc->cc_op = CC_OP_LOGIC;
4186                         }
4187                         break;
4188                     case 0xc: /* subx, V9 subc */
4189                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4190                                         (xop & 0x10));
4191                         break;
4192 #ifdef TARGET_SPARC64
4193                     case 0xd: /* V9 udivx */
4194                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4195                         break;
4196 #endif
4197                     case 0xe: /* udiv */
4198                         CHECK_IU_FEATURE(dc, DIV);
4199                         if (xop & 0x10) {
4200                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4201                                                cpu_src2);
4202                             dc->cc_op = CC_OP_DIV;
4203                         } else {
4204                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4205                                             cpu_src2);
4206                         }
4207                         break;
4208                     case 0xf: /* sdiv */
4209                         CHECK_IU_FEATURE(dc, DIV);
4210                         if (xop & 0x10) {
4211                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4212                                                cpu_src2);
4213                             dc->cc_op = CC_OP_DIV;
4214                         } else {
4215                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4216                                             cpu_src2);
4217                         }
4218                         break;
4219                     default:
4220                         goto illegal_insn;
4221                     }
4222                     gen_store_gpr(dc, rd, cpu_dst);
4223                 } else {
4224                     cpu_src1 = get_src1(dc, insn);
4225                     cpu_src2 = get_src2(dc, insn);
4226                     switch (xop) {
4227                     case 0x20: /* taddcc */
4228                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4229                         gen_store_gpr(dc, rd, cpu_dst);
4230                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4231                         dc->cc_op = CC_OP_TADD;
4232                         break;
4233                     case 0x21: /* tsubcc */
4234                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4235                         gen_store_gpr(dc, rd, cpu_dst);
4236                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4237                         dc->cc_op = CC_OP_TSUB;
4238                         break;
4239                     case 0x22: /* taddcctv */
4240                         gen_helper_taddcctv(cpu_dst, cpu_env,
4241                                             cpu_src1, cpu_src2);
4242                         gen_store_gpr(dc, rd, cpu_dst);
4243                         dc->cc_op = CC_OP_TADDTV;
4244                         break;
4245                     case 0x23: /* tsubcctv */
4246                         gen_helper_tsubcctv(cpu_dst, cpu_env,
4247                                             cpu_src1, cpu_src2);
4248                         gen_store_gpr(dc, rd, cpu_dst);
4249                         dc->cc_op = CC_OP_TSUBTV;
4250                         break;
4251                     case 0x24: /* mulscc */
4252                         update_psr(dc);
4253                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4254                         gen_store_gpr(dc, rd, cpu_dst);
4255                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4256                         dc->cc_op = CC_OP_ADD;
4257                         break;
4258 #ifndef TARGET_SPARC64
4259                     case 0x25:  /* sll */
4260                         if (IS_IMM) { /* immediate */
4261                             simm = GET_FIELDs(insn, 20, 31);
4262                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4263                         } else { /* register */
4264                             cpu_tmp0 = get_temp_tl(dc);
4265                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4266                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4267                         }
4268                         gen_store_gpr(dc, rd, cpu_dst);
4269                         break;
4270                     case 0x26:  /* srl */
4271                         if (IS_IMM) { /* immediate */
4272                             simm = GET_FIELDs(insn, 20, 31);
4273                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4274                         } else { /* register */
4275                             cpu_tmp0 = get_temp_tl(dc);
4276                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4277                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4278                         }
4279                         gen_store_gpr(dc, rd, cpu_dst);
4280                         break;
4281                     case 0x27:  /* sra */
4282                         if (IS_IMM) { /* immediate */
4283                             simm = GET_FIELDs(insn, 20, 31);
4284                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4285                         } else { /* register */
4286                             cpu_tmp0 = get_temp_tl(dc);
4287                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4288                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4289                         }
4290                         gen_store_gpr(dc, rd, cpu_dst);
4291                         break;
4292 #endif
4293                     case 0x30:
4294                         {
4295                             cpu_tmp0 = get_temp_tl(dc);
4296                             switch(rd) {
4297                             case 0: /* wry */
4298                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4299                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4300                                 break;
4301 #ifndef TARGET_SPARC64
4302                             case 0x01 ... 0x0f: /* undefined in the
4303                                                    SPARCv8 manual, nop
4304                                                    on the microSPARC
4305                                                    II */
4306                             case 0x10 ... 0x1f: /* implementation-dependent
4307                                                    in the SPARCv8
4308                                                    manual, nop on the
4309                                                    microSPARC II */
4310                                 if ((rd == 0x13) && (dc->def->features &
4311                                                      CPU_FEATURE_POWERDOWN)) {
4312                                     /* LEON3 power-down */
4313                                     save_state(dc);
4314                                     gen_helper_power_down(cpu_env);
4315                                 }
4316                                 break;
4317 #else
4318                             case 0x2: /* V9 wrccr */
4319                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4320                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
4321                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4322                                 dc->cc_op = CC_OP_FLAGS;
4323                                 break;
4324                             case 0x3: /* V9 wrasi */
4325                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4326                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4327                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4328                                                 offsetof(CPUSPARCState, asi));
4329                                 /* End TB to notice changed ASI.  */
4330                                 save_state(dc);
4331                                 gen_op_next_insn();
4332                                 tcg_gen_exit_tb(0);
4333                                 dc->base.is_jmp = DISAS_NORETURN;
4334                                 break;
4335                             case 0x6: /* V9 wrfprs */
4336                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4337                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4338                                 dc->fprs_dirty = 0;
4339                                 save_state(dc);
4340                                 gen_op_next_insn();
4341                                 tcg_gen_exit_tb(0);
4342                                 dc->base.is_jmp = DISAS_NORETURN;
4343                                 break;
4344                             case 0xf: /* V9 sir, nop if user */
4345 #if !defined(CONFIG_USER_ONLY)
4346                                 if (supervisor(dc)) {
4347                                     ; // XXX
4348                                 }
4349 #endif
4350                                 break;
4351                             case 0x13: /* Graphics Status */
4352                                 if (gen_trap_ifnofpu(dc)) {
4353                                     goto jmp_insn;
4354                                 }
4355                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4356                                 break;
4357                             case 0x14: /* Softint set */
4358                                 if (!supervisor(dc))
4359                                     goto illegal_insn;
4360                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4361                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
4362                                 break;
4363                             case 0x15: /* Softint clear */
4364                                 if (!supervisor(dc))
4365                                     goto illegal_insn;
4366                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4367                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
4368                                 break;
4369                             case 0x16: /* Softint write */
4370                                 if (!supervisor(dc))
4371                                     goto illegal_insn;
4372                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4373                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
4374                                 break;
4375                             case 0x17: /* Tick compare */
4376 #if !defined(CONFIG_USER_ONLY)
4377                                 if (!supervisor(dc))
4378                                     goto illegal_insn;
4379 #endif
4380                                 {
4381                                     TCGv_ptr r_tickptr;
4382 
4383                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4384                                                    cpu_src2);
4385                                     r_tickptr = tcg_temp_new_ptr();
4386                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4387                                                    offsetof(CPUSPARCState, tick));
4388                                     gen_helper_tick_set_limit(r_tickptr,
4389                                                               cpu_tick_cmpr);
4390                                     tcg_temp_free_ptr(r_tickptr);
4391                                 }
4392                                 break;
4393                             case 0x18: /* System tick */
4394 #if !defined(CONFIG_USER_ONLY)
4395                                 if (!supervisor(dc))
4396                                     goto illegal_insn;
4397 #endif
4398                                 {
4399                                     TCGv_ptr r_tickptr;
4400 
4401                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4402                                                    cpu_src2);
4403                                     r_tickptr = tcg_temp_new_ptr();
4404                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4405                                                    offsetof(CPUSPARCState, stick));
4406                                     gen_helper_tick_set_count(r_tickptr,
4407                                                               cpu_tmp0);
4408                                     tcg_temp_free_ptr(r_tickptr);
4409                                 }
4410                                 break;
4411                             case 0x19: /* System tick compare */
4412 #if !defined(CONFIG_USER_ONLY)
4413                                 if (!supervisor(dc))
4414                                     goto illegal_insn;
4415 #endif
4416                                 {
4417                                     TCGv_ptr r_tickptr;
4418 
4419                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4420                                                    cpu_src2);
4421                                     r_tickptr = tcg_temp_new_ptr();
4422                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4423                                                    offsetof(CPUSPARCState, stick));
4424                                     gen_helper_tick_set_limit(r_tickptr,
4425                                                               cpu_stick_cmpr);
4426                                     tcg_temp_free_ptr(r_tickptr);
4427                                 }
4428                                 break;
4429 
4430                             case 0x10: /* Performance Control */
4431                             case 0x11: /* Performance Instrumentation
4432                                           Counter */
4433                             case 0x12: /* Dispatch Control */
4434 #endif
4435                             default:
4436                                 goto illegal_insn;
4437                             }
4438                         }
4439                         break;
4440 #if !defined(CONFIG_USER_ONLY)
4441                     case 0x31: /* wrpsr, V9 saved, restored */
4442                         {
4443                             if (!supervisor(dc))
4444                                 goto priv_insn;
4445 #ifdef TARGET_SPARC64
4446                             switch (rd) {
4447                             case 0:
4448                                 gen_helper_saved(cpu_env);
4449                                 break;
4450                             case 1:
4451                                 gen_helper_restored(cpu_env);
4452                                 break;
4453                             case 2: /* UA2005 allclean */
4454                             case 3: /* UA2005 otherw */
4455                             case 4: /* UA2005 normalw */
4456                             case 5: /* UA2005 invalw */
4457                                 // XXX
4458                             default:
4459                                 goto illegal_insn;
4460                             }
4461 #else
4462                             cpu_tmp0 = get_temp_tl(dc);
4463                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4464                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
4465                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4466                             dc->cc_op = CC_OP_FLAGS;
4467                             save_state(dc);
4468                             gen_op_next_insn();
4469                             tcg_gen_exit_tb(0);
4470                             dc->base.is_jmp = DISAS_NORETURN;
4471 #endif
4472                         }
4473                         break;
4474                     case 0x32: /* wrwim, V9 wrpr */
4475                         {
4476                             if (!supervisor(dc))
4477                                 goto priv_insn;
4478                             cpu_tmp0 = get_temp_tl(dc);
4479                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4480 #ifdef TARGET_SPARC64
4481                             switch (rd) {
4482                             case 0: // tpc
4483                                 {
4484                                     TCGv_ptr r_tsptr;
4485 
4486                                     r_tsptr = tcg_temp_new_ptr();
4487                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4488                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4489                                                   offsetof(trap_state, tpc));
4490                                     tcg_temp_free_ptr(r_tsptr);
4491                                 }
4492                                 break;
4493                             case 1: // tnpc
4494                                 {
4495                                     TCGv_ptr r_tsptr;
4496 
4497                                     r_tsptr = tcg_temp_new_ptr();
4498                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4499                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4500                                                   offsetof(trap_state, tnpc));
4501                                     tcg_temp_free_ptr(r_tsptr);
4502                                 }
4503                                 break;
4504                             case 2: // tstate
4505                                 {
4506                                     TCGv_ptr r_tsptr;
4507 
4508                                     r_tsptr = tcg_temp_new_ptr();
4509                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4510                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4511                                                   offsetof(trap_state,
4512                                                            tstate));
4513                                     tcg_temp_free_ptr(r_tsptr);
4514                                 }
4515                                 break;
4516                             case 3: // tt
4517                                 {
4518                                     TCGv_ptr r_tsptr;
4519 
4520                                     r_tsptr = tcg_temp_new_ptr();
4521                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4522                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4523                                                     offsetof(trap_state, tt));
4524                                     tcg_temp_free_ptr(r_tsptr);
4525                                 }
4526                                 break;
4527                             case 4: // tick
4528                                 {
4529                                     TCGv_ptr r_tickptr;
4530 
4531                                     r_tickptr = tcg_temp_new_ptr();
4532                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4533                                                    offsetof(CPUSPARCState, tick));
4534                                     gen_helper_tick_set_count(r_tickptr,
4535                                                               cpu_tmp0);
4536                                     tcg_temp_free_ptr(r_tickptr);
4537                                 }
4538                                 break;
4539                             case 5: // tba
4540                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4541                                 break;
4542                             case 6: // pstate
4543                                 save_state(dc);
4544                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
4545                                 dc->npc = DYNAMIC_PC;
4546                                 break;
4547                             case 7: // tl
4548                                 save_state(dc);
4549                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4550                                                offsetof(CPUSPARCState, tl));
4551                                 dc->npc = DYNAMIC_PC;
4552                                 break;
4553                             case 8: // pil
4554                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
4555                                 break;
4556                             case 9: // cwp
4557                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
4558                                 break;
4559                             case 10: // cansave
4560                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4561                                                 offsetof(CPUSPARCState,
4562                                                          cansave));
4563                                 break;
4564                             case 11: // canrestore
4565                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4566                                                 offsetof(CPUSPARCState,
4567                                                          canrestore));
4568                                 break;
4569                             case 12: // cleanwin
4570                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4571                                                 offsetof(CPUSPARCState,
4572                                                          cleanwin));
4573                                 break;
4574                             case 13: // otherwin
4575                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4576                                                 offsetof(CPUSPARCState,
4577                                                          otherwin));
4578                                 break;
4579                             case 14: // wstate
4580                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4581                                                 offsetof(CPUSPARCState,
4582                                                          wstate));
4583                                 break;
4584                             case 16: // UA2005 gl
4585                                 CHECK_IU_FEATURE(dc, GL);
4586                                 gen_helper_wrgl(cpu_env, cpu_tmp0);
4587                                 break;
4588                             case 26: // UA2005 strand status
4589                                 CHECK_IU_FEATURE(dc, HYPV);
4590                                 if (!hypervisor(dc))
4591                                     goto priv_insn;
4592                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4593                                 break;
4594                             default:
4595                                 goto illegal_insn;
4596                             }
4597 #else
4598                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4599                             if (dc->def->nwindows != 32) {
4600                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4601                                                 (1 << dc->def->nwindows) - 1);
4602                             }
4603 #endif
4604                         }
4605                         break;
4606                     case 0x33: /* wrtbr, UA2005 wrhpr */
4607                         {
4608 #ifndef TARGET_SPARC64
4609                             if (!supervisor(dc))
4610                                 goto priv_insn;
4611                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4612 #else
4613                             CHECK_IU_FEATURE(dc, HYPV);
4614                             if (!hypervisor(dc))
4615                                 goto priv_insn;
4616                             cpu_tmp0 = get_temp_tl(dc);
4617                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4618                             switch (rd) {
4619                             case 0: // hpstate
4620                                 tcg_gen_st_i64(cpu_tmp0, cpu_env,
4621                                                offsetof(CPUSPARCState,
4622                                                         hpstate));
4623                                 save_state(dc);
4624                                 gen_op_next_insn();
4625                                 tcg_gen_exit_tb(0);
4626                                 dc->base.is_jmp = DISAS_NORETURN;
4627                                 break;
4628                             case 1: // htstate
4629                                 // XXX gen_op_wrhtstate();
4630                                 break;
4631                             case 3: // hintp
4632                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4633                                 break;
4634                             case 5: // htba
4635                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4636                                 break;
4637                             case 31: // hstick_cmpr
4638                                 {
4639                                     TCGv_ptr r_tickptr;
4640 
4641                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4642                                     r_tickptr = tcg_temp_new_ptr();
4643                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4644                                                    offsetof(CPUSPARCState, hstick));
4645                                     gen_helper_tick_set_limit(r_tickptr,
4646                                                               cpu_hstick_cmpr);
4647                                     tcg_temp_free_ptr(r_tickptr);
4648                                 }
4649                                 break;
4650                             case 6: // hver readonly
4651                             default:
4652                                 goto illegal_insn;
4653                             }
4654 #endif
4655                         }
4656                         break;
4657 #endif
4658 #ifdef TARGET_SPARC64
4659                     case 0x2c: /* V9 movcc */
4660                         {
4661                             int cc = GET_FIELD_SP(insn, 11, 12);
4662                             int cond = GET_FIELD_SP(insn, 14, 17);
4663                             DisasCompare cmp;
4664                             TCGv dst;
4665 
4666                             if (insn & (1 << 18)) {
4667                                 if (cc == 0) {
4668                                     gen_compare(&cmp, 0, cond, dc);
4669                                 } else if (cc == 2) {
4670                                     gen_compare(&cmp, 1, cond, dc);
4671                                 } else {
4672                                     goto illegal_insn;
4673                                 }
4674                             } else {
4675                                 gen_fcompare(&cmp, cc, cond);
4676                             }
4677 
4678                             /* The get_src2 above loaded the normal 13-bit
4679                                immediate field, not the 11-bit field we have
4680                                in movcc.  But it did handle the reg case.  */
4681                             if (IS_IMM) {
4682                                 simm = GET_FIELD_SPs(insn, 0, 10);
4683                                 tcg_gen_movi_tl(cpu_src2, simm);
4684                             }
4685 
4686                             dst = gen_load_gpr(dc, rd);
4687                             tcg_gen_movcond_tl(cmp.cond, dst,
4688                                                cmp.c1, cmp.c2,
4689                                                cpu_src2, dst);
4690                             free_compare(&cmp);
4691                             gen_store_gpr(dc, rd, dst);
4692                             break;
4693                         }
4694                     case 0x2d: /* V9 sdivx */
4695                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4696                         gen_store_gpr(dc, rd, cpu_dst);
4697                         break;
4698                     case 0x2e: /* V9 popc */
4699                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4700                         gen_store_gpr(dc, rd, cpu_dst);
4701                         break;
4702                     case 0x2f: /* V9 movr */
4703                         {
4704                             int cond = GET_FIELD_SP(insn, 10, 12);
4705                             DisasCompare cmp;
4706                             TCGv dst;
4707 
4708                             gen_compare_reg(&cmp, cond, cpu_src1);
4709 
4710                             /* The get_src2 above loaded the normal 13-bit
4711                                immediate field, not the 10-bit field we have
4712                                in movr.  But it did handle the reg case.  */
4713                             if (IS_IMM) {
4714                                 simm = GET_FIELD_SPs(insn, 0, 9);
4715                                 tcg_gen_movi_tl(cpu_src2, simm);
4716                             }
4717 
4718                             dst = gen_load_gpr(dc, rd);
4719                             tcg_gen_movcond_tl(cmp.cond, dst,
4720                                                cmp.c1, cmp.c2,
4721                                                cpu_src2, dst);
4722                             free_compare(&cmp);
4723                             gen_store_gpr(dc, rd, dst);
4724                             break;
4725                         }
4726 #endif
4727                     default:
4728                         goto illegal_insn;
4729                     }
4730                 }
4731             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4732 #ifdef TARGET_SPARC64
4733                 int opf = GET_FIELD_SP(insn, 5, 13);
4734                 rs1 = GET_FIELD(insn, 13, 17);
4735                 rs2 = GET_FIELD(insn, 27, 31);
4736                 if (gen_trap_ifnofpu(dc)) {
4737                     goto jmp_insn;
4738                 }
4739 
4740                 switch (opf) {
4741                 case 0x000: /* VIS I edge8cc */
4742                     CHECK_FPU_FEATURE(dc, VIS1);
4743                     cpu_src1 = gen_load_gpr(dc, rs1);
4744                     cpu_src2 = gen_load_gpr(dc, rs2);
4745                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4746                     gen_store_gpr(dc, rd, cpu_dst);
4747                     break;
4748                 case 0x001: /* VIS II edge8n */
4749                     CHECK_FPU_FEATURE(dc, VIS2);
4750                     cpu_src1 = gen_load_gpr(dc, rs1);
4751                     cpu_src2 = gen_load_gpr(dc, rs2);
4752                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4753                     gen_store_gpr(dc, rd, cpu_dst);
4754                     break;
4755                 case 0x002: /* VIS I edge8lcc */
4756                     CHECK_FPU_FEATURE(dc, VIS1);
4757                     cpu_src1 = gen_load_gpr(dc, rs1);
4758                     cpu_src2 = gen_load_gpr(dc, rs2);
4759                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4760                     gen_store_gpr(dc, rd, cpu_dst);
4761                     break;
4762                 case 0x003: /* VIS II edge8ln */
4763                     CHECK_FPU_FEATURE(dc, VIS2);
4764                     cpu_src1 = gen_load_gpr(dc, rs1);
4765                     cpu_src2 = gen_load_gpr(dc, rs2);
4766                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4767                     gen_store_gpr(dc, rd, cpu_dst);
4768                     break;
4769                 case 0x004: /* VIS I edge16cc */
4770                     CHECK_FPU_FEATURE(dc, VIS1);
4771                     cpu_src1 = gen_load_gpr(dc, rs1);
4772                     cpu_src2 = gen_load_gpr(dc, rs2);
4773                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4774                     gen_store_gpr(dc, rd, cpu_dst);
4775                     break;
4776                 case 0x005: /* VIS II edge16n */
4777                     CHECK_FPU_FEATURE(dc, VIS2);
4778                     cpu_src1 = gen_load_gpr(dc, rs1);
4779                     cpu_src2 = gen_load_gpr(dc, rs2);
4780                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4781                     gen_store_gpr(dc, rd, cpu_dst);
4782                     break;
4783                 case 0x006: /* VIS I edge16lcc */
4784                     CHECK_FPU_FEATURE(dc, VIS1);
4785                     cpu_src1 = gen_load_gpr(dc, rs1);
4786                     cpu_src2 = gen_load_gpr(dc, rs2);
4787                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4788                     gen_store_gpr(dc, rd, cpu_dst);
4789                     break;
4790                 case 0x007: /* VIS II edge16ln */
4791                     CHECK_FPU_FEATURE(dc, VIS2);
4792                     cpu_src1 = gen_load_gpr(dc, rs1);
4793                     cpu_src2 = gen_load_gpr(dc, rs2);
4794                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4795                     gen_store_gpr(dc, rd, cpu_dst);
4796                     break;
4797                 case 0x008: /* VIS I edge32cc */
4798                     CHECK_FPU_FEATURE(dc, VIS1);
4799                     cpu_src1 = gen_load_gpr(dc, rs1);
4800                     cpu_src2 = gen_load_gpr(dc, rs2);
4801                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4802                     gen_store_gpr(dc, rd, cpu_dst);
4803                     break;
4804                 case 0x009: /* VIS II edge32n */
4805                     CHECK_FPU_FEATURE(dc, VIS2);
4806                     cpu_src1 = gen_load_gpr(dc, rs1);
4807                     cpu_src2 = gen_load_gpr(dc, rs2);
4808                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4809                     gen_store_gpr(dc, rd, cpu_dst);
4810                     break;
4811                 case 0x00a: /* VIS I edge32lcc */
4812                     CHECK_FPU_FEATURE(dc, VIS1);
4813                     cpu_src1 = gen_load_gpr(dc, rs1);
4814                     cpu_src2 = gen_load_gpr(dc, rs2);
4815                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4816                     gen_store_gpr(dc, rd, cpu_dst);
4817                     break;
4818                 case 0x00b: /* VIS II edge32ln */
4819                     CHECK_FPU_FEATURE(dc, VIS2);
4820                     cpu_src1 = gen_load_gpr(dc, rs1);
4821                     cpu_src2 = gen_load_gpr(dc, rs2);
4822                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4823                     gen_store_gpr(dc, rd, cpu_dst);
4824                     break;
4825                 case 0x010: /* VIS I array8 */
4826                     CHECK_FPU_FEATURE(dc, VIS1);
4827                     cpu_src1 = gen_load_gpr(dc, rs1);
4828                     cpu_src2 = gen_load_gpr(dc, rs2);
4829                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4830                     gen_store_gpr(dc, rd, cpu_dst);
4831                     break;
4832                 case 0x012: /* VIS I array16 */
4833                     CHECK_FPU_FEATURE(dc, VIS1);
4834                     cpu_src1 = gen_load_gpr(dc, rs1);
4835                     cpu_src2 = gen_load_gpr(dc, rs2);
4836                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4837                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4838                     gen_store_gpr(dc, rd, cpu_dst);
4839                     break;
4840                 case 0x014: /* VIS I array32 */
4841                     CHECK_FPU_FEATURE(dc, VIS1);
4842                     cpu_src1 = gen_load_gpr(dc, rs1);
4843                     cpu_src2 = gen_load_gpr(dc, rs2);
4844                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4845                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4846                     gen_store_gpr(dc, rd, cpu_dst);
4847                     break;
4848                 case 0x018: /* VIS I alignaddr */
4849                     CHECK_FPU_FEATURE(dc, VIS1);
4850                     cpu_src1 = gen_load_gpr(dc, rs1);
4851                     cpu_src2 = gen_load_gpr(dc, rs2);
4852                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4853                     gen_store_gpr(dc, rd, cpu_dst);
4854                     break;
4855                 case 0x01a: /* VIS I alignaddrl */
4856                     CHECK_FPU_FEATURE(dc, VIS1);
4857                     cpu_src1 = gen_load_gpr(dc, rs1);
4858                     cpu_src2 = gen_load_gpr(dc, rs2);
4859                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4860                     gen_store_gpr(dc, rd, cpu_dst);
4861                     break;
4862                 case 0x019: /* VIS II bmask */
4863                     CHECK_FPU_FEATURE(dc, VIS2);
4864                     cpu_src1 = gen_load_gpr(dc, rs1);
4865                     cpu_src2 = gen_load_gpr(dc, rs2);
4866                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4867                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4868                     gen_store_gpr(dc, rd, cpu_dst);
4869                     break;
4870                 case 0x020: /* VIS I fcmple16 */
4871                     CHECK_FPU_FEATURE(dc, VIS1);
4872                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4873                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4874                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4875                     gen_store_gpr(dc, rd, cpu_dst);
4876                     break;
4877                 case 0x022: /* VIS I fcmpne16 */
4878                     CHECK_FPU_FEATURE(dc, VIS1);
4879                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4880                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4881                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4882                     gen_store_gpr(dc, rd, cpu_dst);
4883                     break;
4884                 case 0x024: /* VIS I fcmple32 */
4885                     CHECK_FPU_FEATURE(dc, VIS1);
4886                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4887                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4888                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4889                     gen_store_gpr(dc, rd, cpu_dst);
4890                     break;
4891                 case 0x026: /* VIS I fcmpne32 */
4892                     CHECK_FPU_FEATURE(dc, VIS1);
4893                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4894                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4895                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4896                     gen_store_gpr(dc, rd, cpu_dst);
4897                     break;
4898                 case 0x028: /* VIS I fcmpgt16 */
4899                     CHECK_FPU_FEATURE(dc, VIS1);
4900                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4901                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4902                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4903                     gen_store_gpr(dc, rd, cpu_dst);
4904                     break;
4905                 case 0x02a: /* VIS I fcmpeq16 */
4906                     CHECK_FPU_FEATURE(dc, VIS1);
4907                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4908                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4909                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4910                     gen_store_gpr(dc, rd, cpu_dst);
4911                     break;
4912                 case 0x02c: /* VIS I fcmpgt32 */
4913                     CHECK_FPU_FEATURE(dc, VIS1);
4914                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4915                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4916                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4917                     gen_store_gpr(dc, rd, cpu_dst);
4918                     break;
4919                 case 0x02e: /* VIS I fcmpeq32 */
4920                     CHECK_FPU_FEATURE(dc, VIS1);
4921                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4922                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4923                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4924                     gen_store_gpr(dc, rd, cpu_dst);
4925                     break;
4926                 case 0x031: /* VIS I fmul8x16 */
4927                     CHECK_FPU_FEATURE(dc, VIS1);
4928                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4929                     break;
4930                 case 0x033: /* VIS I fmul8x16au */
4931                     CHECK_FPU_FEATURE(dc, VIS1);
4932                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4933                     break;
4934                 case 0x035: /* VIS I fmul8x16al */
4935                     CHECK_FPU_FEATURE(dc, VIS1);
4936                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4937                     break;
4938                 case 0x036: /* VIS I fmul8sux16 */
4939                     CHECK_FPU_FEATURE(dc, VIS1);
4940                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4941                     break;
4942                 case 0x037: /* VIS I fmul8ulx16 */
4943                     CHECK_FPU_FEATURE(dc, VIS1);
4944                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4945                     break;
4946                 case 0x038: /* VIS I fmuld8sux16 */
4947                     CHECK_FPU_FEATURE(dc, VIS1);
4948                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4949                     break;
4950                 case 0x039: /* VIS I fmuld8ulx16 */
4951                     CHECK_FPU_FEATURE(dc, VIS1);
4952                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4953                     break;
4954                 case 0x03a: /* VIS I fpack32 */
4955                     CHECK_FPU_FEATURE(dc, VIS1);
4956                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4957                     break;
4958                 case 0x03b: /* VIS I fpack16 */
4959                     CHECK_FPU_FEATURE(dc, VIS1);
4960                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4961                     cpu_dst_32 = gen_dest_fpr_F(dc);
4962                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4963                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4964                     break;
4965                 case 0x03d: /* VIS I fpackfix */
4966                     CHECK_FPU_FEATURE(dc, VIS1);
4967                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4968                     cpu_dst_32 = gen_dest_fpr_F(dc);
4969                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4970                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4971                     break;
4972                 case 0x03e: /* VIS I pdist */
4973                     CHECK_FPU_FEATURE(dc, VIS1);
4974                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4975                     break;
4976                 case 0x048: /* VIS I faligndata */
4977                     CHECK_FPU_FEATURE(dc, VIS1);
4978                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4979                     break;
4980                 case 0x04b: /* VIS I fpmerge */
4981                     CHECK_FPU_FEATURE(dc, VIS1);
4982                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4983                     break;
4984                 case 0x04c: /* VIS II bshuffle */
4985                     CHECK_FPU_FEATURE(dc, VIS2);
4986                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4987                     break;
4988                 case 0x04d: /* VIS I fexpand */
4989                     CHECK_FPU_FEATURE(dc, VIS1);
4990                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4991                     break;
4992                 case 0x050: /* VIS I fpadd16 */
4993                     CHECK_FPU_FEATURE(dc, VIS1);
4994                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4995                     break;
4996                 case 0x051: /* VIS I fpadd16s */
4997                     CHECK_FPU_FEATURE(dc, VIS1);
4998                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4999                     break;
5000                 case 0x052: /* VIS I fpadd32 */
5001                     CHECK_FPU_FEATURE(dc, VIS1);
5002                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
5003                     break;
5004                 case 0x053: /* VIS I fpadd32s */
5005                     CHECK_FPU_FEATURE(dc, VIS1);
5006                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
5007                     break;
5008                 case 0x054: /* VIS I fpsub16 */
5009                     CHECK_FPU_FEATURE(dc, VIS1);
5010                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
5011                     break;
5012                 case 0x055: /* VIS I fpsub16s */
5013                     CHECK_FPU_FEATURE(dc, VIS1);
5014                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
5015                     break;
5016                 case 0x056: /* VIS I fpsub32 */
5017                     CHECK_FPU_FEATURE(dc, VIS1);
5018                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
5019                     break;
5020                 case 0x057: /* VIS I fpsub32s */
5021                     CHECK_FPU_FEATURE(dc, VIS1);
5022                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
5023                     break;
5024                 case 0x060: /* VIS I fzero */
5025                     CHECK_FPU_FEATURE(dc, VIS1);
5026                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5027                     tcg_gen_movi_i64(cpu_dst_64, 0);
5028                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5029                     break;
5030                 case 0x061: /* VIS I fzeros */
5031                     CHECK_FPU_FEATURE(dc, VIS1);
5032                     cpu_dst_32 = gen_dest_fpr_F(dc);
5033                     tcg_gen_movi_i32(cpu_dst_32, 0);
5034                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5035                     break;
5036                 case 0x062: /* VIS I fnor */
5037                     CHECK_FPU_FEATURE(dc, VIS1);
5038                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
5039                     break;
5040                 case 0x063: /* VIS I fnors */
5041                     CHECK_FPU_FEATURE(dc, VIS1);
5042                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
5043                     break;
5044                 case 0x064: /* VIS I fandnot2 */
5045                     CHECK_FPU_FEATURE(dc, VIS1);
5046                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
5047                     break;
5048                 case 0x065: /* VIS I fandnot2s */
5049                     CHECK_FPU_FEATURE(dc, VIS1);
5050                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
5051                     break;
5052                 case 0x066: /* VIS I fnot2 */
5053                     CHECK_FPU_FEATURE(dc, VIS1);
5054                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
5055                     break;
5056                 case 0x067: /* VIS I fnot2s */
5057                     CHECK_FPU_FEATURE(dc, VIS1);
5058                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
5059                     break;
5060                 case 0x068: /* VIS I fandnot1 */
5061                     CHECK_FPU_FEATURE(dc, VIS1);
5062                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
5063                     break;
5064                 case 0x069: /* VIS I fandnot1s */
5065                     CHECK_FPU_FEATURE(dc, VIS1);
5066                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
5067                     break;
5068                 case 0x06a: /* VIS I fnot1 */
5069                     CHECK_FPU_FEATURE(dc, VIS1);
5070                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
5071                     break;
5072                 case 0x06b: /* VIS I fnot1s */
5073                     CHECK_FPU_FEATURE(dc, VIS1);
5074                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
5075                     break;
5076                 case 0x06c: /* VIS I fxor */
5077                     CHECK_FPU_FEATURE(dc, VIS1);
5078                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
5079                     break;
5080                 case 0x06d: /* VIS I fxors */
5081                     CHECK_FPU_FEATURE(dc, VIS1);
5082                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
5083                     break;
5084                 case 0x06e: /* VIS I fnand */
5085                     CHECK_FPU_FEATURE(dc, VIS1);
5086                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
5087                     break;
5088                 case 0x06f: /* VIS I fnands */
5089                     CHECK_FPU_FEATURE(dc, VIS1);
5090                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
5091                     break;
5092                 case 0x070: /* VIS I fand */
5093                     CHECK_FPU_FEATURE(dc, VIS1);
5094                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
5095                     break;
5096                 case 0x071: /* VIS I fands */
5097                     CHECK_FPU_FEATURE(dc, VIS1);
5098                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
5099                     break;
5100                 case 0x072: /* VIS I fxnor */
5101                     CHECK_FPU_FEATURE(dc, VIS1);
5102                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
5103                     break;
5104                 case 0x073: /* VIS I fxnors */
5105                     CHECK_FPU_FEATURE(dc, VIS1);
5106                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
5107                     break;
5108                 case 0x074: /* VIS I fsrc1 */
5109                     CHECK_FPU_FEATURE(dc, VIS1);
5110                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
5111                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5112                     break;
5113                 case 0x075: /* VIS I fsrc1s */
5114                     CHECK_FPU_FEATURE(dc, VIS1);
5115                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
5116                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5117                     break;
5118                 case 0x076: /* VIS I fornot2 */
5119                     CHECK_FPU_FEATURE(dc, VIS1);
5120                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
5121                     break;
5122                 case 0x077: /* VIS I fornot2s */
5123                     CHECK_FPU_FEATURE(dc, VIS1);
5124                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
5125                     break;
5126                 case 0x078: /* VIS I fsrc2 */
5127                     CHECK_FPU_FEATURE(dc, VIS1);
5128                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5129                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5130                     break;
5131                 case 0x079: /* VIS I fsrc2s */
5132                     CHECK_FPU_FEATURE(dc, VIS1);
5133                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
5134                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5135                     break;
5136                 case 0x07a: /* VIS I fornot1 */
5137                     CHECK_FPU_FEATURE(dc, VIS1);
5138                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5139                     break;
5140                 case 0x07b: /* VIS I fornot1s */
5141                     CHECK_FPU_FEATURE(dc, VIS1);
5142                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5143                     break;
5144                 case 0x07c: /* VIS I for */
5145                     CHECK_FPU_FEATURE(dc, VIS1);
5146                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5147                     break;
5148                 case 0x07d: /* VIS I fors */
5149                     CHECK_FPU_FEATURE(dc, VIS1);
5150                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5151                     break;
5152                 case 0x07e: /* VIS I fone */
5153                     CHECK_FPU_FEATURE(dc, VIS1);
5154                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5155                     tcg_gen_movi_i64(cpu_dst_64, -1);
5156                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5157                     break;
5158                 case 0x07f: /* VIS I fones */
5159                     CHECK_FPU_FEATURE(dc, VIS1);
5160                     cpu_dst_32 = gen_dest_fpr_F(dc);
5161                     tcg_gen_movi_i32(cpu_dst_32, -1);
5162                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5163                     break;
5164                 case 0x080: /* VIS I shutdown */
5165                 case 0x081: /* VIS II siam */
5166                     // XXX
5167                     goto illegal_insn;
5168                 default:
5169                     goto illegal_insn;
5170                 }
5171 #else
5172                 goto ncp_insn;
5173 #endif
5174             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5175 #ifdef TARGET_SPARC64
5176                 goto illegal_insn;
5177 #else
5178                 goto ncp_insn;
5179 #endif
5180 #ifdef TARGET_SPARC64
5181             } else if (xop == 0x39) { /* V9 return */
5182                 save_state(dc);
5183                 cpu_src1 = get_src1(dc, insn);
5184                 cpu_tmp0 = get_temp_tl(dc);
5185                 if (IS_IMM) {   /* immediate */
5186                     simm = GET_FIELDs(insn, 19, 31);
5187                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5188                 } else {                /* register */
5189                     rs2 = GET_FIELD(insn, 27, 31);
5190                     if (rs2) {
5191                         cpu_src2 = gen_load_gpr(dc, rs2);
5192                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5193                     } else {
5194                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5195                     }
5196                 }
5197                 gen_helper_restore(cpu_env);
5198                 gen_mov_pc_npc(dc);
5199                 gen_check_align(cpu_tmp0, 3);
5200                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5201                 dc->npc = DYNAMIC_PC;
5202                 goto jmp_insn;
5203 #endif
5204             } else {
5205                 cpu_src1 = get_src1(dc, insn);
5206                 cpu_tmp0 = get_temp_tl(dc);
5207                 if (IS_IMM) {   /* immediate */
5208                     simm = GET_FIELDs(insn, 19, 31);
5209                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5210                 } else {                /* register */
5211                     rs2 = GET_FIELD(insn, 27, 31);
5212                     if (rs2) {
5213                         cpu_src2 = gen_load_gpr(dc, rs2);
5214                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5215                     } else {
5216                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5217                     }
5218                 }
5219                 switch (xop) {
5220                 case 0x38:      /* jmpl */
5221                     {
5222                         TCGv t = gen_dest_gpr(dc, rd);
5223                         tcg_gen_movi_tl(t, dc->pc);
5224                         gen_store_gpr(dc, rd, t);
5225 
5226                         gen_mov_pc_npc(dc);
5227                         gen_check_align(cpu_tmp0, 3);
5228                         gen_address_mask(dc, cpu_tmp0);
5229                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5230                         dc->npc = DYNAMIC_PC;
5231                     }
5232                     goto jmp_insn;
5233 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5234                 case 0x39:      /* rett, V9 return */
5235                     {
5236                         if (!supervisor(dc))
5237                             goto priv_insn;
5238                         gen_mov_pc_npc(dc);
5239                         gen_check_align(cpu_tmp0, 3);
5240                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5241                         dc->npc = DYNAMIC_PC;
5242                         gen_helper_rett(cpu_env);
5243                     }
5244                     goto jmp_insn;
5245 #endif
5246                 case 0x3b: /* flush */
5247                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5248                         goto unimp_flush;
5249                     /* nop */
5250                     break;
5251                 case 0x3c:      /* save */
5252                     gen_helper_save(cpu_env);
5253                     gen_store_gpr(dc, rd, cpu_tmp0);
5254                     break;
5255                 case 0x3d:      /* restore */
5256                     gen_helper_restore(cpu_env);
5257                     gen_store_gpr(dc, rd, cpu_tmp0);
5258                     break;
5259 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5260                 case 0x3e:      /* V9 done/retry */
5261                     {
5262                         switch (rd) {
5263                         case 0:
5264                             if (!supervisor(dc))
5265                                 goto priv_insn;
5266                             dc->npc = DYNAMIC_PC;
5267                             dc->pc = DYNAMIC_PC;
5268                             gen_helper_done(cpu_env);
5269                             goto jmp_insn;
5270                         case 1:
5271                             if (!supervisor(dc))
5272                                 goto priv_insn;
5273                             dc->npc = DYNAMIC_PC;
5274                             dc->pc = DYNAMIC_PC;
5275                             gen_helper_retry(cpu_env);
5276                             goto jmp_insn;
5277                         default:
5278                             goto illegal_insn;
5279                         }
5280                     }
5281                     break;
5282 #endif
5283                 default:
5284                     goto illegal_insn;
5285                 }
5286             }
5287             break;
5288         }
5289         break;
5290     case 3:                     /* load/store instructions */
5291         {
5292             unsigned int xop = GET_FIELD(insn, 7, 12);
5293             /* ??? gen_address_mask prevents us from using a source
5294                register directly.  Always generate a temporary.  */
5295             TCGv cpu_addr = get_temp_tl(dc);
5296 
5297             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5298             if (xop == 0x3c || xop == 0x3e) {
5299                 /* V9 casa/casxa : no offset */
5300             } else if (IS_IMM) {     /* immediate */
5301                 simm = GET_FIELDs(insn, 19, 31);
5302                 if (simm != 0) {
5303                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5304                 }
5305             } else {            /* register */
5306                 rs2 = GET_FIELD(insn, 27, 31);
5307                 if (rs2 != 0) {
5308                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5309                 }
5310             }
5311             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5312                 (xop > 0x17 && xop <= 0x1d ) ||
5313                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5314                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5315 
5316                 switch (xop) {
5317                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5318                     gen_address_mask(dc, cpu_addr);
5319                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
5320                     break;
5321                 case 0x1:       /* ldub, load unsigned byte */
5322                     gen_address_mask(dc, cpu_addr);
5323                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
5324                     break;
5325                 case 0x2:       /* lduh, load unsigned halfword */
5326                     gen_address_mask(dc, cpu_addr);
5327                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
5328                     break;
5329                 case 0x3:       /* ldd, load double word */
5330                     if (rd & 1)
5331                         goto illegal_insn;
5332                     else {
5333                         TCGv_i64 t64;
5334 
5335                         gen_address_mask(dc, cpu_addr);
5336                         t64 = tcg_temp_new_i64();
5337                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
5338                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5339                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5340                         gen_store_gpr(dc, rd + 1, cpu_val);
5341                         tcg_gen_shri_i64(t64, t64, 32);
5342                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5343                         tcg_temp_free_i64(t64);
5344                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5345                     }
5346                     break;
5347                 case 0x9:       /* ldsb, load signed byte */
5348                     gen_address_mask(dc, cpu_addr);
5349                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
5350                     break;
5351                 case 0xa:       /* ldsh, load signed halfword */
5352                     gen_address_mask(dc, cpu_addr);
5353                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
5354                     break;
5355                 case 0xd:       /* ldstub */
5356                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5357                     break;
5358                 case 0x0f:
5359                     /* swap, swap register with memory. Also atomically */
5360                     CHECK_IU_FEATURE(dc, SWAP);
5361                     cpu_src1 = gen_load_gpr(dc, rd);
5362                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5363                              dc->mem_idx, MO_TEUL);
5364                     break;
5365 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5366                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5367                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5368                     break;
5369                 case 0x11:      /* lduba, load unsigned byte alternate */
5370                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5371                     break;
5372                 case 0x12:      /* lduha, load unsigned halfword alternate */
5373                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5374                     break;
5375                 case 0x13:      /* ldda, load double word alternate */
5376                     if (rd & 1) {
5377                         goto illegal_insn;
5378                     }
5379                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5380                     goto skip_move;
5381                 case 0x19:      /* ldsba, load signed byte alternate */
5382                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5383                     break;
5384                 case 0x1a:      /* ldsha, load signed halfword alternate */
5385                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5386                     break;
5387                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5388                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5389                     break;
5390                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5391                                    atomically */
5392                     CHECK_IU_FEATURE(dc, SWAP);
5393                     cpu_src1 = gen_load_gpr(dc, rd);
5394                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5395                     break;
5396 
5397 #ifndef TARGET_SPARC64
5398                 case 0x30: /* ldc */
5399                 case 0x31: /* ldcsr */
5400                 case 0x33: /* lddc */
5401                     goto ncp_insn;
5402 #endif
5403 #endif
5404 #ifdef TARGET_SPARC64
5405                 case 0x08: /* V9 ldsw */
5406                     gen_address_mask(dc, cpu_addr);
5407                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
5408                     break;
5409                 case 0x0b: /* V9 ldx */
5410                     gen_address_mask(dc, cpu_addr);
5411                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
5412                     break;
5413                 case 0x18: /* V9 ldswa */
5414                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5415                     break;
5416                 case 0x1b: /* V9 ldxa */
5417                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5418                     break;
5419                 case 0x2d: /* V9 prefetch, no effect */
5420                     goto skip_move;
5421                 case 0x30: /* V9 ldfa */
5422                     if (gen_trap_ifnofpu(dc)) {
5423                         goto jmp_insn;
5424                     }
5425                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5426                     gen_update_fprs_dirty(dc, rd);
5427                     goto skip_move;
5428                 case 0x33: /* V9 lddfa */
5429                     if (gen_trap_ifnofpu(dc)) {
5430                         goto jmp_insn;
5431                     }
5432                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5433                     gen_update_fprs_dirty(dc, DFPREG(rd));
5434                     goto skip_move;
5435                 case 0x3d: /* V9 prefetcha, no effect */
5436                     goto skip_move;
5437                 case 0x32: /* V9 ldqfa */
5438                     CHECK_FPU_FEATURE(dc, FLOAT128);
5439                     if (gen_trap_ifnofpu(dc)) {
5440                         goto jmp_insn;
5441                     }
5442                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5443                     gen_update_fprs_dirty(dc, QFPREG(rd));
5444                     goto skip_move;
5445 #endif
5446                 default:
5447                     goto illegal_insn;
5448                 }
5449                 gen_store_gpr(dc, rd, cpu_val);
5450 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5451             skip_move: ;
5452 #endif
5453             } else if (xop >= 0x20 && xop < 0x24) {
5454                 if (gen_trap_ifnofpu(dc)) {
5455                     goto jmp_insn;
5456                 }
5457                 switch (xop) {
5458                 case 0x20:      /* ldf, load fpreg */
5459                     gen_address_mask(dc, cpu_addr);
5460                     cpu_dst_32 = gen_dest_fpr_F(dc);
5461                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5462                                         dc->mem_idx, MO_TEUL);
5463                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5464                     break;
5465                 case 0x21:      /* ldfsr, V9 ldxfsr */
5466 #ifdef TARGET_SPARC64
5467                     gen_address_mask(dc, cpu_addr);
5468                     if (rd == 1) {
5469                         TCGv_i64 t64 = tcg_temp_new_i64();
5470                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5471                                             dc->mem_idx, MO_TEQ);
5472                         gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5473                         tcg_temp_free_i64(t64);
5474                         break;
5475                     }
5476 #endif
5477                     cpu_dst_32 = get_temp_i32(dc);
5478                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5479                                         dc->mem_idx, MO_TEUL);
5480                     gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5481                     break;
5482                 case 0x22:      /* ldqf, load quad fpreg */
5483                     CHECK_FPU_FEATURE(dc, FLOAT128);
5484                     gen_address_mask(dc, cpu_addr);
5485                     cpu_src1_64 = tcg_temp_new_i64();
5486                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5487                                         MO_TEQ | MO_ALIGN_4);
5488                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5489                     cpu_src2_64 = tcg_temp_new_i64();
5490                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5491                                         MO_TEQ | MO_ALIGN_4);
5492                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5493                     tcg_temp_free_i64(cpu_src1_64);
5494                     tcg_temp_free_i64(cpu_src2_64);
5495                     break;
5496                 case 0x23:      /* lddf, load double fpreg */
5497                     gen_address_mask(dc, cpu_addr);
5498                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5499                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5500                                         MO_TEQ | MO_ALIGN_4);
5501                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5502                     break;
5503                 default:
5504                     goto illegal_insn;
5505                 }
5506             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5507                        xop == 0xe || xop == 0x1e) {
5508                 TCGv cpu_val = gen_load_gpr(dc, rd);
5509 
5510                 switch (xop) {
5511                 case 0x4: /* st, store word */
5512                     gen_address_mask(dc, cpu_addr);
5513                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
5514                     break;
5515                 case 0x5: /* stb, store byte */
5516                     gen_address_mask(dc, cpu_addr);
5517                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
5518                     break;
5519                 case 0x6: /* sth, store halfword */
5520                     gen_address_mask(dc, cpu_addr);
5521                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
5522                     break;
5523                 case 0x7: /* std, store double word */
5524                     if (rd & 1)
5525                         goto illegal_insn;
5526                     else {
5527                         TCGv_i64 t64;
5528                         TCGv lo;
5529 
5530                         gen_address_mask(dc, cpu_addr);
5531                         lo = gen_load_gpr(dc, rd + 1);
5532                         t64 = tcg_temp_new_i64();
5533                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5534                         tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
5535                         tcg_temp_free_i64(t64);
5536                     }
5537                     break;
5538 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5539                 case 0x14: /* sta, V9 stwa, store word alternate */
5540                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5541                     break;
5542                 case 0x15: /* stba, store byte alternate */
5543                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5544                     break;
5545                 case 0x16: /* stha, store halfword alternate */
5546                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5547                     break;
5548                 case 0x17: /* stda, store double word alternate */
5549                     if (rd & 1) {
5550                         goto illegal_insn;
5551                     }
5552                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5553                     break;
5554 #endif
5555 #ifdef TARGET_SPARC64
5556                 case 0x0e: /* V9 stx */
5557                     gen_address_mask(dc, cpu_addr);
5558                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5559                     break;
5560                 case 0x1e: /* V9 stxa */
5561                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5562                     break;
5563 #endif
5564                 default:
5565                     goto illegal_insn;
5566                 }
5567             } else if (xop > 0x23 && xop < 0x28) {
5568                 if (gen_trap_ifnofpu(dc)) {
5569                     goto jmp_insn;
5570                 }
5571                 switch (xop) {
5572                 case 0x24: /* stf, store fpreg */
5573                     gen_address_mask(dc, cpu_addr);
5574                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5575                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5576                                         dc->mem_idx, MO_TEUL);
5577                     break;
5578                 case 0x25: /* stfsr, V9 stxfsr */
5579                     {
5580 #ifdef TARGET_SPARC64
5581                         gen_address_mask(dc, cpu_addr);
5582                         if (rd == 1) {
5583                             tcg_gen_qemu_st64(cpu_fsr, cpu_addr, dc->mem_idx);
5584                             break;
5585                         }
5586 #endif
5587                         tcg_gen_qemu_st32(cpu_fsr, cpu_addr, dc->mem_idx);
5588                     }
5589                     break;
5590                 case 0x26:
5591 #ifdef TARGET_SPARC64
5592                     /* V9 stqf, store quad fpreg */
5593                     CHECK_FPU_FEATURE(dc, FLOAT128);
5594                     gen_address_mask(dc, cpu_addr);
5595                     /* ??? While stqf only requires 4-byte alignment, it is
5596                        legal for the cpu to signal the unaligned exception.
5597                        The OS trap handler is then required to fix it up.
5598                        For qemu, this avoids having to probe the second page
5599                        before performing the first write.  */
5600                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5601                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5602                                         dc->mem_idx, MO_TEQ | MO_ALIGN_16);
5603                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5604                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5605                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5606                                         dc->mem_idx, MO_TEQ);
5607                     break;
5608 #else /* !TARGET_SPARC64 */
5609                     /* stdfq, store floating point queue */
5610 #if defined(CONFIG_USER_ONLY)
5611                     goto illegal_insn;
5612 #else
5613                     if (!supervisor(dc))
5614                         goto priv_insn;
5615                     if (gen_trap_ifnofpu(dc)) {
5616                         goto jmp_insn;
5617                     }
5618                     goto nfq_insn;
5619 #endif
5620 #endif
5621                 case 0x27: /* stdf, store double fpreg */
5622                     gen_address_mask(dc, cpu_addr);
5623                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5624                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5625                                         MO_TEQ | MO_ALIGN_4);
5626                     break;
5627                 default:
5628                     goto illegal_insn;
5629                 }
5630             } else if (xop > 0x33 && xop < 0x3f) {
5631                 switch (xop) {
5632 #ifdef TARGET_SPARC64
5633                 case 0x34: /* V9 stfa */
5634                     if (gen_trap_ifnofpu(dc)) {
5635                         goto jmp_insn;
5636                     }
5637                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5638                     break;
5639                 case 0x36: /* V9 stqfa */
5640                     {
5641                         CHECK_FPU_FEATURE(dc, FLOAT128);
5642                         if (gen_trap_ifnofpu(dc)) {
5643                             goto jmp_insn;
5644                         }
5645                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5646                     }
5647                     break;
5648                 case 0x37: /* V9 stdfa */
5649                     if (gen_trap_ifnofpu(dc)) {
5650                         goto jmp_insn;
5651                     }
5652                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5653                     break;
5654                 case 0x3e: /* V9 casxa */
5655                     rs2 = GET_FIELD(insn, 27, 31);
5656                     cpu_src2 = gen_load_gpr(dc, rs2);
5657                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5658                     break;
5659 #else
5660                 case 0x34: /* stc */
5661                 case 0x35: /* stcsr */
5662                 case 0x36: /* stdcq */
5663                 case 0x37: /* stdc */
5664                     goto ncp_insn;
5665 #endif
5666 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5667                 case 0x3c: /* V9 or LEON3 casa */
5668 #ifndef TARGET_SPARC64
5669                     CHECK_IU_FEATURE(dc, CASA);
5670 #endif
5671                     rs2 = GET_FIELD(insn, 27, 31);
5672                     cpu_src2 = gen_load_gpr(dc, rs2);
5673                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5674                     break;
5675 #endif
5676                 default:
5677                     goto illegal_insn;
5678                 }
5679             } else {
5680                 goto illegal_insn;
5681             }
5682         }
5683         break;
5684     }
5685     /* default case for non jump instructions */
5686     if (dc->npc == DYNAMIC_PC) {
5687         dc->pc = DYNAMIC_PC;
5688         gen_op_next_insn();
5689     } else if (dc->npc == JUMP_PC) {
5690         /* we can do a static jump */
5691         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5692         dc->base.is_jmp = DISAS_NORETURN;
5693     } else {
5694         dc->pc = dc->npc;
5695         dc->npc = dc->npc + 4;
5696     }
5697  jmp_insn:
5698     goto egress;
5699  illegal_insn:
5700     gen_exception(dc, TT_ILL_INSN);
5701     goto egress;
5702  unimp_flush:
5703     gen_exception(dc, TT_UNIMP_FLUSH);
5704     goto egress;
5705 #if !defined(CONFIG_USER_ONLY)
5706  priv_insn:
5707     gen_exception(dc, TT_PRIV_INSN);
5708     goto egress;
5709 #endif
5710  nfpu_insn:
5711     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5712     goto egress;
5713 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5714  nfq_insn:
5715     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5716     goto egress;
5717 #endif
5718 #ifndef TARGET_SPARC64
5719  ncp_insn:
5720     gen_exception(dc, TT_NCP_INSN);
5721     goto egress;
5722 #endif
5723  egress:
5724     if (dc->n_t32 != 0) {
5725         int i;
5726         for (i = dc->n_t32 - 1; i >= 0; --i) {
5727             tcg_temp_free_i32(dc->t32[i]);
5728         }
5729         dc->n_t32 = 0;
5730     }
5731     if (dc->n_ttl != 0) {
5732         int i;
5733         for (i = dc->n_ttl - 1; i >= 0; --i) {
5734             tcg_temp_free(dc->ttl[i]);
5735         }
5736         dc->n_ttl = 0;
5737     }
5738 }
5739 
5740 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5741 {
5742     DisasContext *dc = container_of(dcbase, DisasContext, base);
5743     CPUSPARCState *env = cs->env_ptr;
5744     int bound;
5745 
5746     dc->pc = dc->base.pc_first;
5747     dc->npc = (target_ulong)dc->base.tb->cs_base;
5748     dc->cc_op = CC_OP_DYNAMIC;
5749     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5750     dc->def = &env->def;
5751     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5752     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5753 #ifndef CONFIG_USER_ONLY
5754     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5755 #endif
5756 #ifdef TARGET_SPARC64
5757     dc->fprs_dirty = 0;
5758     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5759 #ifndef CONFIG_USER_ONLY
5760     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5761 #endif
5762 #endif
5763     /*
5764      * if we reach a page boundary, we stop generation so that the
5765      * PC of a TT_TFAULT exception is always in the right page
5766      */
5767     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5768     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5769 }
5770 
5771 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5772 {
5773 }
5774 
5775 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5776 {
5777     DisasContext *dc = container_of(dcbase, DisasContext, base);
5778 
5779     if (dc->npc & JUMP_PC) {
5780         assert(dc->jump_pc[1] == dc->pc + 4);
5781         tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5782     } else {
5783         tcg_gen_insn_start(dc->pc, dc->npc);
5784     }
5785 }
5786 
5787 static bool sparc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs,
5788                                       const CPUBreakpoint *bp)
5789 {
5790     DisasContext *dc = container_of(dcbase, DisasContext, base);
5791 
5792     if (dc->pc != dc->base.pc_first) {
5793         save_state(dc);
5794     }
5795     gen_helper_debug(cpu_env);
5796     tcg_gen_exit_tb(0);
5797     dc->base.is_jmp = DISAS_NORETURN;
5798     /* update pc_next so that the current instruction is included in tb->size */
5799     dc->base.pc_next += 4;
5800     return true;
5801 }
5802 
5803 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5804 {
5805     DisasContext *dc = container_of(dcbase, DisasContext, base);
5806     CPUSPARCState *env = cs->env_ptr;
5807     unsigned int insn;
5808 
5809     insn = cpu_ldl_code(env, dc->pc);
5810     dc->base.pc_next += 4;
5811     disas_sparc_insn(dc, insn);
5812 
5813     if (dc->base.is_jmp == DISAS_NORETURN) {
5814         return;
5815     }
5816     if (dc->pc != dc->base.pc_next) {
5817         dc->base.is_jmp = DISAS_TOO_MANY;
5818     }
5819 }
5820 
5821 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5822 {
5823     DisasContext *dc = container_of(dcbase, DisasContext, base);
5824 
5825     if (dc->base.is_jmp != DISAS_NORETURN) {
5826         if (dc->pc != DYNAMIC_PC &&
5827             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5828             /* static PC and NPC: we can use direct chaining */
5829             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5830         } else {
5831             if (dc->pc != DYNAMIC_PC) {
5832                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5833             }
5834             save_npc(dc);
5835             tcg_gen_exit_tb(0);
5836         }
5837     }
5838 }
5839 
5840 static void sparc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)
5841 {
5842     qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
5843     log_target_disas(cpu, dcbase->pc_first, dcbase->tb->size);
5844 }
5845 
5846 static const TranslatorOps sparc_tr_ops = {
5847     .init_disas_context = sparc_tr_init_disas_context,
5848     .tb_start           = sparc_tr_tb_start,
5849     .insn_start         = sparc_tr_insn_start,
5850     .breakpoint_check   = sparc_tr_breakpoint_check,
5851     .translate_insn     = sparc_tr_translate_insn,
5852     .tb_stop            = sparc_tr_tb_stop,
5853     .disas_log          = sparc_tr_disas_log,
5854 };
5855 
5856 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
5857 {
5858     DisasContext dc = {};
5859 
5860     translator_loop(&sparc_tr_ops, &dc.base, cs, tb);
5861 }
5862 
5863 void sparc_tcg_init(void)
5864 {
5865     static const char gregnames[32][4] = {
5866         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5867         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5868         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5869         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5870     };
5871     static const char fregnames[32][4] = {
5872         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5873         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5874         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5875         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5876     };
5877 
5878     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5879 #ifdef TARGET_SPARC64
5880         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5881         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5882 #else
5883         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5884 #endif
5885         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5886         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5887     };
5888 
5889     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5890 #ifdef TARGET_SPARC64
5891         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5892         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5893         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5894         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5895           "hstick_cmpr" },
5896         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5897         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5898         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5899         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5900         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5901 #endif
5902         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5903         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5904         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5905         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5906         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5907         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5908         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5909         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5910 #ifndef CONFIG_USER_ONLY
5911         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5912 #endif
5913     };
5914 
5915     unsigned int i;
5916 
5917     cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5918                                          offsetof(CPUSPARCState, regwptr),
5919                                          "regwptr");
5920 
5921     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5922         *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5923     }
5924 
5925     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5926         *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5927     }
5928 
5929     cpu_regs[0] = NULL;
5930     for (i = 1; i < 8; ++i) {
5931         cpu_regs[i] = tcg_global_mem_new(cpu_env,
5932                                          offsetof(CPUSPARCState, gregs[i]),
5933                                          gregnames[i]);
5934     }
5935 
5936     for (i = 8; i < 32; ++i) {
5937         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5938                                          (i - 8) * sizeof(target_ulong),
5939                                          gregnames[i]);
5940     }
5941 
5942     for (i = 0; i < TARGET_DPREGS; i++) {
5943         cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5944                                             offsetof(CPUSPARCState, fpr[i]),
5945                                             fregnames[i]);
5946     }
5947 }
5948 
5949 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb,
5950                           target_ulong *data)
5951 {
5952     target_ulong pc = data[0];
5953     target_ulong npc = data[1];
5954 
5955     env->pc = pc;
5956     if (npc == DYNAMIC_PC) {
5957         /* dynamic NPC: already stored */
5958     } else if (npc & JUMP_PC) {
5959         /* jump PC: use 'cond' and the jump targets of the translation */
5960         if (env->cond) {
5961             env->npc = npc & ~3;
5962         } else {
5963             env->npc = pc + 4;
5964         }
5965     } else {
5966         env->npc = npc;
5967     }
5968 }
5969