xref: /qemu/target/ppc/translate.c (revision b2a3cbb8)
1 /*
2  *  PowerPC emulation for qemu: main translation routines.
3  *
4  *  Copyright (c) 2003-2007 Jocelyn Mayer
5  *  Copyright (C) 2011 Freescale Semiconductor, Inc.
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
31 
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
34 
35 #include "exec/translator.h"
36 #include "exec/log.h"
37 #include "qemu/atomic128.h"
38 #include "spr_common.h"
39 #include "power8-pmu.h"
40 
41 #include "qemu/qemu-print.h"
42 #include "qapi/error.h"
43 
44 #define CPU_SINGLE_STEP 0x1
45 #define CPU_BRANCH_STEP 0x2
46 
47 /* Include definitions for instructions classes and implementations flags */
48 /* #define PPC_DEBUG_DISAS */
49 
50 #ifdef PPC_DEBUG_DISAS
51 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
52 #else
53 #  define LOG_DISAS(...) do { } while (0)
54 #endif
55 /*****************************************************************************/
56 /* Code translation helpers                                                  */
57 
58 /* global register indexes */
59 static char cpu_reg_names[10 * 3 + 22 * 4   /* GPR */
60                           + 10 * 4 + 22 * 5 /* SPE GPRh */
61                           + 8 * 5           /* CRF */];
62 static TCGv cpu_gpr[32];
63 static TCGv cpu_gprh[32];
64 static TCGv_i32 cpu_crf[8];
65 static TCGv cpu_nip;
66 static TCGv cpu_msr;
67 static TCGv cpu_ctr;
68 static TCGv cpu_lr;
69 #if defined(TARGET_PPC64)
70 static TCGv cpu_cfar;
71 #endif
72 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
73 static TCGv cpu_reserve;
74 static TCGv cpu_reserve_val;
75 static TCGv cpu_fpscr;
76 static TCGv_i32 cpu_access_type;
77 
78 #include "exec/gen-icount.h"
79 
80 void ppc_translate_init(void)
81 {
82     int i;
83     char *p;
84     size_t cpu_reg_names_size;
85 
86     p = cpu_reg_names;
87     cpu_reg_names_size = sizeof(cpu_reg_names);
88 
89     for (i = 0; i < 8; i++) {
90         snprintf(p, cpu_reg_names_size, "crf%d", i);
91         cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
92                                             offsetof(CPUPPCState, crf[i]), p);
93         p += 5;
94         cpu_reg_names_size -= 5;
95     }
96 
97     for (i = 0; i < 32; i++) {
98         snprintf(p, cpu_reg_names_size, "r%d", i);
99         cpu_gpr[i] = tcg_global_mem_new(cpu_env,
100                                         offsetof(CPUPPCState, gpr[i]), p);
101         p += (i < 10) ? 3 : 4;
102         cpu_reg_names_size -= (i < 10) ? 3 : 4;
103         snprintf(p, cpu_reg_names_size, "r%dH", i);
104         cpu_gprh[i] = tcg_global_mem_new(cpu_env,
105                                          offsetof(CPUPPCState, gprh[i]), p);
106         p += (i < 10) ? 4 : 5;
107         cpu_reg_names_size -= (i < 10) ? 4 : 5;
108     }
109 
110     cpu_nip = tcg_global_mem_new(cpu_env,
111                                  offsetof(CPUPPCState, nip), "nip");
112 
113     cpu_msr = tcg_global_mem_new(cpu_env,
114                                  offsetof(CPUPPCState, msr), "msr");
115 
116     cpu_ctr = tcg_global_mem_new(cpu_env,
117                                  offsetof(CPUPPCState, ctr), "ctr");
118 
119     cpu_lr = tcg_global_mem_new(cpu_env,
120                                 offsetof(CPUPPCState, lr), "lr");
121 
122 #if defined(TARGET_PPC64)
123     cpu_cfar = tcg_global_mem_new(cpu_env,
124                                   offsetof(CPUPPCState, cfar), "cfar");
125 #endif
126 
127     cpu_xer = tcg_global_mem_new(cpu_env,
128                                  offsetof(CPUPPCState, xer), "xer");
129     cpu_so = tcg_global_mem_new(cpu_env,
130                                 offsetof(CPUPPCState, so), "SO");
131     cpu_ov = tcg_global_mem_new(cpu_env,
132                                 offsetof(CPUPPCState, ov), "OV");
133     cpu_ca = tcg_global_mem_new(cpu_env,
134                                 offsetof(CPUPPCState, ca), "CA");
135     cpu_ov32 = tcg_global_mem_new(cpu_env,
136                                   offsetof(CPUPPCState, ov32), "OV32");
137     cpu_ca32 = tcg_global_mem_new(cpu_env,
138                                   offsetof(CPUPPCState, ca32), "CA32");
139 
140     cpu_reserve = tcg_global_mem_new(cpu_env,
141                                      offsetof(CPUPPCState, reserve_addr),
142                                      "reserve_addr");
143     cpu_reserve_val = tcg_global_mem_new(cpu_env,
144                                      offsetof(CPUPPCState, reserve_val),
145                                      "reserve_val");
146 
147     cpu_fpscr = tcg_global_mem_new(cpu_env,
148                                    offsetof(CPUPPCState, fpscr), "fpscr");
149 
150     cpu_access_type = tcg_global_mem_new_i32(cpu_env,
151                                              offsetof(CPUPPCState, access_type),
152                                              "access_type");
153 }
154 
155 /* internal defines */
156 struct DisasContext {
157     DisasContextBase base;
158     target_ulong cia;  /* current instruction address */
159     uint32_t opcode;
160     /* Routine used to access memory */
161     bool pr, hv, dr, le_mode;
162     bool lazy_tlb_flush;
163     bool need_access_type;
164     int mem_idx;
165     int access_type;
166     /* Translation flags */
167     MemOp default_tcg_memop_mask;
168 #if defined(TARGET_PPC64)
169     bool sf_mode;
170     bool has_cfar;
171 #endif
172     bool fpu_enabled;
173     bool altivec_enabled;
174     bool vsx_enabled;
175     bool spe_enabled;
176     bool tm_enabled;
177     bool gtse;
178     bool hr;
179     bool mmcr0_pmcc0;
180     bool mmcr0_pmcc1;
181     bool mmcr0_pmcjce;
182     bool pmc_other;
183     bool pmu_insn_cnt;
184     ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
185     int singlestep_enabled;
186     uint32_t flags;
187     uint64_t insns_flags;
188     uint64_t insns_flags2;
189 };
190 
191 #define DISAS_EXIT         DISAS_TARGET_0  /* exit to main loop, pc updated */
192 #define DISAS_EXIT_UPDATE  DISAS_TARGET_1  /* exit to main loop, pc stale */
193 #define DISAS_CHAIN        DISAS_TARGET_2  /* lookup next tb, pc updated */
194 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3  /* lookup next tb, pc stale */
195 
196 /* Return true iff byteswap is needed in a scalar memop */
197 static inline bool need_byteswap(const DisasContext *ctx)
198 {
199 #if TARGET_BIG_ENDIAN
200      return ctx->le_mode;
201 #else
202      return !ctx->le_mode;
203 #endif
204 }
205 
206 /* True when active word size < size of target_long.  */
207 #ifdef TARGET_PPC64
208 # define NARROW_MODE(C)  (!(C)->sf_mode)
209 #else
210 # define NARROW_MODE(C)  0
211 #endif
212 
213 struct opc_handler_t {
214     /* invalid bits for instruction 1 (Rc(opcode) == 0) */
215     uint32_t inval1;
216     /* invalid bits for instruction 2 (Rc(opcode) == 1) */
217     uint32_t inval2;
218     /* instruction type */
219     uint64_t type;
220     /* extended instruction type */
221     uint64_t type2;
222     /* handler */
223     void (*handler)(DisasContext *ctx);
224 };
225 
226 /* SPR load/store helpers */
227 static inline void gen_load_spr(TCGv t, int reg)
228 {
229     tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
230 }
231 
232 static inline void gen_store_spr(int reg, TCGv t)
233 {
234     tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
235 }
236 
237 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
238 {
239     if (ctx->need_access_type && ctx->access_type != access_type) {
240         tcg_gen_movi_i32(cpu_access_type, access_type);
241         ctx->access_type = access_type;
242     }
243 }
244 
245 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
246 {
247     if (NARROW_MODE(ctx)) {
248         nip = (uint32_t)nip;
249     }
250     tcg_gen_movi_tl(cpu_nip, nip);
251 }
252 
253 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
254 {
255     TCGv_i32 t0, t1;
256 
257     /*
258      * These are all synchronous exceptions, we set the PC back to the
259      * faulting instruction
260      */
261     gen_update_nip(ctx, ctx->cia);
262     t0 = tcg_const_i32(excp);
263     t1 = tcg_const_i32(error);
264     gen_helper_raise_exception_err(cpu_env, t0, t1);
265     tcg_temp_free_i32(t0);
266     tcg_temp_free_i32(t1);
267     ctx->base.is_jmp = DISAS_NORETURN;
268 }
269 
270 static void gen_exception(DisasContext *ctx, uint32_t excp)
271 {
272     TCGv_i32 t0;
273 
274     /*
275      * These are all synchronous exceptions, we set the PC back to the
276      * faulting instruction
277      */
278     gen_update_nip(ctx, ctx->cia);
279     t0 = tcg_const_i32(excp);
280     gen_helper_raise_exception(cpu_env, t0);
281     tcg_temp_free_i32(t0);
282     ctx->base.is_jmp = DISAS_NORETURN;
283 }
284 
285 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
286                               target_ulong nip)
287 {
288     TCGv_i32 t0;
289 
290     gen_update_nip(ctx, nip);
291     t0 = tcg_const_i32(excp);
292     gen_helper_raise_exception(cpu_env, t0);
293     tcg_temp_free_i32(t0);
294     ctx->base.is_jmp = DISAS_NORETURN;
295 }
296 
297 static void gen_icount_io_start(DisasContext *ctx)
298 {
299     if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
300         gen_io_start();
301         /*
302          * An I/O instruction must be last in the TB.
303          * Chain to the next TB, and let the code from gen_tb_start
304          * decide if we need to return to the main loop.
305          * Doing this first also allows this value to be overridden.
306          */
307         ctx->base.is_jmp = DISAS_TOO_MANY;
308     }
309 }
310 
311 #if !defined(CONFIG_USER_ONLY)
312 static void gen_ppc_maybe_interrupt(DisasContext *ctx)
313 {
314     gen_icount_io_start(ctx);
315     gen_helper_ppc_maybe_interrupt(cpu_env);
316 }
317 #endif
318 
319 /*
320  * Tells the caller what is the appropriate exception to generate and prepares
321  * SPR registers for this exception.
322  *
323  * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
324  * POWERPC_EXCP_DEBUG (on BookE).
325  */
326 static uint32_t gen_prep_dbgex(DisasContext *ctx)
327 {
328     if (ctx->flags & POWERPC_FLAG_DE) {
329         target_ulong dbsr = 0;
330         if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
331             dbsr = DBCR0_ICMP;
332         } else {
333             /* Must have been branch */
334             dbsr = DBCR0_BRT;
335         }
336         TCGv t0 = tcg_temp_new();
337         gen_load_spr(t0, SPR_BOOKE_DBSR);
338         tcg_gen_ori_tl(t0, t0, dbsr);
339         gen_store_spr(SPR_BOOKE_DBSR, t0);
340         tcg_temp_free(t0);
341         return POWERPC_EXCP_DEBUG;
342     } else {
343         return POWERPC_EXCP_TRACE;
344     }
345 }
346 
347 static void gen_debug_exception(DisasContext *ctx)
348 {
349     gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx)));
350     ctx->base.is_jmp = DISAS_NORETURN;
351 }
352 
353 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
354 {
355     /* Will be converted to program check if needed */
356     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
357 }
358 
359 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
360 {
361     gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
362 }
363 
364 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
365 {
366     /* Will be converted to program check if needed */
367     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
368 }
369 
370 /*****************************************************************************/
371 /* SPR READ/WRITE CALLBACKS */
372 
373 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
374 {
375 #if 0
376     sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
377     printf("ERROR: try to access SPR %d !\n", sprn);
378 #endif
379 }
380 
381 /* #define PPC_DUMP_SPR_ACCESSES */
382 
383 /*
384  * Generic callbacks:
385  * do nothing but store/retrieve spr value
386  */
387 static void spr_load_dump_spr(int sprn)
388 {
389 #ifdef PPC_DUMP_SPR_ACCESSES
390     TCGv_i32 t0 = tcg_const_i32(sprn);
391     gen_helper_load_dump_spr(cpu_env, t0);
392     tcg_temp_free_i32(t0);
393 #endif
394 }
395 
396 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
397 {
398     gen_load_spr(cpu_gpr[gprn], sprn);
399     spr_load_dump_spr(sprn);
400 }
401 
402 static void spr_store_dump_spr(int sprn)
403 {
404 #ifdef PPC_DUMP_SPR_ACCESSES
405     TCGv_i32 t0 = tcg_const_i32(sprn);
406     gen_helper_store_dump_spr(cpu_env, t0);
407     tcg_temp_free_i32(t0);
408 #endif
409 }
410 
411 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
412 {
413     gen_store_spr(sprn, cpu_gpr[gprn]);
414     spr_store_dump_spr(sprn);
415 }
416 
417 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
418 {
419     spr_write_generic(ctx, sprn, gprn);
420 
421     /*
422      * SPR_CTRL writes must force a new translation block,
423      * allowing the PMU to calculate the run latch events with
424      * more accuracy.
425      */
426     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
427 }
428 
429 #if !defined(CONFIG_USER_ONLY)
430 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
431 {
432 #ifdef TARGET_PPC64
433     TCGv t0 = tcg_temp_new();
434     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
435     gen_store_spr(sprn, t0);
436     tcg_temp_free(t0);
437     spr_store_dump_spr(sprn);
438 #else
439     spr_write_generic(ctx, sprn, gprn);
440 #endif
441 }
442 
443 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
444 {
445     TCGv t0 = tcg_temp_new();
446     TCGv t1 = tcg_temp_new();
447     gen_load_spr(t0, sprn);
448     tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
449     tcg_gen_and_tl(t0, t0, t1);
450     gen_store_spr(sprn, t0);
451     tcg_temp_free(t0);
452     tcg_temp_free(t1);
453 }
454 
455 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
456 {
457 }
458 
459 #endif
460 
461 /* SPR common to all PowerPC */
462 /* XER */
463 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
464 {
465     TCGv dst = cpu_gpr[gprn];
466     TCGv t0 = tcg_temp_new();
467     TCGv t1 = tcg_temp_new();
468     TCGv t2 = tcg_temp_new();
469     tcg_gen_mov_tl(dst, cpu_xer);
470     tcg_gen_shli_tl(t0, cpu_so, XER_SO);
471     tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
472     tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
473     tcg_gen_or_tl(t0, t0, t1);
474     tcg_gen_or_tl(dst, dst, t2);
475     tcg_gen_or_tl(dst, dst, t0);
476     if (is_isa300(ctx)) {
477         tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
478         tcg_gen_or_tl(dst, dst, t0);
479         tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
480         tcg_gen_or_tl(dst, dst, t0);
481     }
482     tcg_temp_free(t0);
483     tcg_temp_free(t1);
484     tcg_temp_free(t2);
485 }
486 
487 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
488 {
489     TCGv src = cpu_gpr[gprn];
490     /* Write all flags, while reading back check for isa300 */
491     tcg_gen_andi_tl(cpu_xer, src,
492                     ~((1u << XER_SO) |
493                       (1u << XER_OV) | (1u << XER_OV32) |
494                       (1u << XER_CA) | (1u << XER_CA32)));
495     tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
496     tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
497     tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
498     tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
499     tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
500 }
501 
502 /* LR */
503 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
504 {
505     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
506 }
507 
508 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
509 {
510     tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
511 }
512 
513 /* CFAR */
514 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
515 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
516 {
517     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
518 }
519 
520 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
521 {
522     tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
523 }
524 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
525 
526 /* CTR */
527 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
528 {
529     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
530 }
531 
532 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
533 {
534     tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
535 }
536 
537 /* User read access to SPR */
538 /* USPRx */
539 /* UMMCRx */
540 /* UPMCx */
541 /* USIA */
542 /* UDECR */
543 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
544 {
545     gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
546 }
547 
548 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
549 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
550 {
551     gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
552 }
553 #endif
554 
555 /* SPR common to all non-embedded PowerPC */
556 /* DECR */
557 #if !defined(CONFIG_USER_ONLY)
558 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
559 {
560     gen_icount_io_start(ctx);
561     gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
562 }
563 
564 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
565 {
566     gen_icount_io_start(ctx);
567     gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
568 }
569 #endif
570 
571 /* SPR common to all non-embedded PowerPC, except 601 */
572 /* Time base */
573 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
574 {
575     gen_icount_io_start(ctx);
576     gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
577 }
578 
579 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
580 {
581     gen_icount_io_start(ctx);
582     gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
583 }
584 
585 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
586 {
587     gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
588 }
589 
590 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
591 {
592     gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
593 }
594 
595 #if !defined(CONFIG_USER_ONLY)
596 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
597 {
598     gen_icount_io_start(ctx);
599     gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
600 }
601 
602 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
603 {
604     gen_icount_io_start(ctx);
605     gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
606 }
607 
608 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
609 {
610     gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
611 }
612 
613 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
614 {
615     gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
616 }
617 
618 #if defined(TARGET_PPC64)
619 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
620 {
621     gen_icount_io_start(ctx);
622     gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
623 }
624 
625 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
626 {
627     gen_icount_io_start(ctx);
628     gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
629 }
630 
631 /* HDECR */
632 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
633 {
634     gen_icount_io_start(ctx);
635     gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
636 }
637 
638 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
639 {
640     gen_icount_io_start(ctx);
641     gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
642 }
643 
644 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
645 {
646     gen_icount_io_start(ctx);
647     gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
648 }
649 
650 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
651 {
652     gen_icount_io_start(ctx);
653     gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
654 }
655 
656 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
657 {
658     gen_icount_io_start(ctx);
659     gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
660 }
661 
662 #endif
663 #endif
664 
665 #if !defined(CONFIG_USER_ONLY)
666 /* IBAT0U...IBAT0U */
667 /* IBAT0L...IBAT7L */
668 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
669 {
670     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
671                   offsetof(CPUPPCState,
672                            IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
673 }
674 
675 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
676 {
677     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
678                   offsetof(CPUPPCState,
679                            IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
680 }
681 
682 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
683 {
684     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
685     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
686     tcg_temp_free_i32(t0);
687 }
688 
689 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
690 {
691     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4);
692     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
693     tcg_temp_free_i32(t0);
694 }
695 
696 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
697 {
698     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2);
699     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
700     tcg_temp_free_i32(t0);
701 }
702 
703 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
704 {
705     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4);
706     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
707     tcg_temp_free_i32(t0);
708 }
709 
710 /* DBAT0U...DBAT7U */
711 /* DBAT0L...DBAT7L */
712 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
713 {
714     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
715                   offsetof(CPUPPCState,
716                            DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
717 }
718 
719 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
720 {
721     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
722                   offsetof(CPUPPCState,
723                            DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
724 }
725 
726 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
727 {
728     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2);
729     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
730     tcg_temp_free_i32(t0);
731 }
732 
733 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
734 {
735     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4);
736     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
737     tcg_temp_free_i32(t0);
738 }
739 
740 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
741 {
742     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2);
743     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
744     tcg_temp_free_i32(t0);
745 }
746 
747 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
748 {
749     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4);
750     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
751     tcg_temp_free_i32(t0);
752 }
753 
754 /* SDR1 */
755 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
756 {
757     gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
758 }
759 
760 #if defined(TARGET_PPC64)
761 /* 64 bits PowerPC specific SPRs */
762 /* PIDR */
763 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
764 {
765     gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
766 }
767 
768 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
769 {
770     gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
771 }
772 
773 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
774 {
775     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
776 }
777 
778 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
779 {
780     TCGv t0 = tcg_temp_new();
781     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
782     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
783     tcg_temp_free(t0);
784 }
785 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
786 {
787     gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
788 }
789 
790 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
791 {
792     gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
793 }
794 
795 /* DPDES */
796 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
797 {
798     gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
799 }
800 
801 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
802 {
803     gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
804 }
805 #endif
806 #endif
807 
808 /* PowerPC 40x specific registers */
809 #if !defined(CONFIG_USER_ONLY)
810 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
811 {
812     gen_icount_io_start(ctx);
813     gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
814 }
815 
816 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
817 {
818     gen_icount_io_start(ctx);
819     gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
820 }
821 
822 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
823 {
824     gen_icount_io_start(ctx);
825     gen_store_spr(sprn, cpu_gpr[gprn]);
826     gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
827     /* We must stop translation as we may have rebooted */
828     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
829 }
830 
831 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
832 {
833     gen_icount_io_start(ctx);
834     gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
835 }
836 
837 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
838 {
839     gen_icount_io_start(ctx);
840     gen_helper_store_40x_tcr(cpu_env, cpu_gpr[gprn]);
841 }
842 
843 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
844 {
845     gen_icount_io_start(ctx);
846     gen_helper_store_40x_tsr(cpu_env, cpu_gpr[gprn]);
847 }
848 
849 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn)
850 {
851     TCGv t0 = tcg_temp_new();
852     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF);
853     gen_helper_store_40x_pid(cpu_env, t0);
854     tcg_temp_free(t0);
855 }
856 
857 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
858 {
859     gen_icount_io_start(ctx);
860     gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
861 }
862 
863 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
864 {
865     gen_icount_io_start(ctx);
866     gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
867 }
868 #endif
869 
870 /* PIR */
871 #if !defined(CONFIG_USER_ONLY)
872 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
873 {
874     TCGv t0 = tcg_temp_new();
875     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
876     gen_store_spr(SPR_PIR, t0);
877     tcg_temp_free(t0);
878 }
879 #endif
880 
881 /* SPE specific registers */
882 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
883 {
884     TCGv_i32 t0 = tcg_temp_new_i32();
885     tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
886     tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
887     tcg_temp_free_i32(t0);
888 }
889 
890 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
891 {
892     TCGv_i32 t0 = tcg_temp_new_i32();
893     tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
894     tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
895     tcg_temp_free_i32(t0);
896 }
897 
898 #if !defined(CONFIG_USER_ONLY)
899 /* Callback used to write the exception vector base */
900 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
901 {
902     TCGv t0 = tcg_temp_new();
903     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
904     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
905     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
906     gen_store_spr(sprn, t0);
907     tcg_temp_free(t0);
908 }
909 
910 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
911 {
912     int sprn_offs;
913 
914     if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
915         sprn_offs = sprn - SPR_BOOKE_IVOR0;
916     } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
917         sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
918     } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
919         sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
920     } else {
921         qemu_log_mask(LOG_GUEST_ERROR, "Trying to write an unknown exception"
922                       " vector 0x%03x\n", sprn);
923         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
924         return;
925     }
926 
927     TCGv t0 = tcg_temp_new();
928     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
929     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
930     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
931     gen_store_spr(sprn, t0);
932     tcg_temp_free(t0);
933 }
934 #endif
935 
936 #ifdef TARGET_PPC64
937 #ifndef CONFIG_USER_ONLY
938 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
939 {
940     TCGv t0 = tcg_temp_new();
941     TCGv t1 = tcg_temp_new();
942     TCGv t2 = tcg_temp_new();
943 
944     /*
945      * Note, the HV=1 PR=0 case is handled earlier by simply using
946      * spr_write_generic for HV mode in the SPR table
947      */
948 
949     /* Build insertion mask into t1 based on context */
950     if (ctx->pr) {
951         gen_load_spr(t1, SPR_UAMOR);
952     } else {
953         gen_load_spr(t1, SPR_AMOR);
954     }
955 
956     /* Mask new bits into t2 */
957     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
958 
959     /* Load AMR and clear new bits in t0 */
960     gen_load_spr(t0, SPR_AMR);
961     tcg_gen_andc_tl(t0, t0, t1);
962 
963     /* Or'in new bits and write it out */
964     tcg_gen_or_tl(t0, t0, t2);
965     gen_store_spr(SPR_AMR, t0);
966     spr_store_dump_spr(SPR_AMR);
967 
968     tcg_temp_free(t0);
969     tcg_temp_free(t1);
970     tcg_temp_free(t2);
971 }
972 
973 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
974 {
975     TCGv t0 = tcg_temp_new();
976     TCGv t1 = tcg_temp_new();
977     TCGv t2 = tcg_temp_new();
978 
979     /*
980      * Note, the HV=1 case is handled earlier by simply using
981      * spr_write_generic for HV mode in the SPR table
982      */
983 
984     /* Build insertion mask into t1 based on context */
985     gen_load_spr(t1, SPR_AMOR);
986 
987     /* Mask new bits into t2 */
988     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
989 
990     /* Load AMR and clear new bits in t0 */
991     gen_load_spr(t0, SPR_UAMOR);
992     tcg_gen_andc_tl(t0, t0, t1);
993 
994     /* Or'in new bits and write it out */
995     tcg_gen_or_tl(t0, t0, t2);
996     gen_store_spr(SPR_UAMOR, t0);
997     spr_store_dump_spr(SPR_UAMOR);
998 
999     tcg_temp_free(t0);
1000     tcg_temp_free(t1);
1001     tcg_temp_free(t2);
1002 }
1003 
1004 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1005 {
1006     TCGv t0 = tcg_temp_new();
1007     TCGv t1 = tcg_temp_new();
1008     TCGv t2 = tcg_temp_new();
1009 
1010     /*
1011      * Note, the HV=1 case is handled earlier by simply using
1012      * spr_write_generic for HV mode in the SPR table
1013      */
1014 
1015     /* Build insertion mask into t1 based on context */
1016     gen_load_spr(t1, SPR_AMOR);
1017 
1018     /* Mask new bits into t2 */
1019     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1020 
1021     /* Load AMR and clear new bits in t0 */
1022     gen_load_spr(t0, SPR_IAMR);
1023     tcg_gen_andc_tl(t0, t0, t1);
1024 
1025     /* Or'in new bits and write it out */
1026     tcg_gen_or_tl(t0, t0, t2);
1027     gen_store_spr(SPR_IAMR, t0);
1028     spr_store_dump_spr(SPR_IAMR);
1029 
1030     tcg_temp_free(t0);
1031     tcg_temp_free(t1);
1032     tcg_temp_free(t2);
1033 }
1034 #endif
1035 #endif
1036 
1037 #ifndef CONFIG_USER_ONLY
1038 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1039 {
1040     gen_helper_fixup_thrm(cpu_env);
1041     gen_load_spr(cpu_gpr[gprn], sprn);
1042     spr_load_dump_spr(sprn);
1043 }
1044 #endif /* !CONFIG_USER_ONLY */
1045 
1046 #if !defined(CONFIG_USER_ONLY)
1047 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1048 {
1049     TCGv t0 = tcg_temp_new();
1050 
1051     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1052     gen_store_spr(sprn, t0);
1053     tcg_temp_free(t0);
1054 }
1055 
1056 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1057 {
1058     TCGv t0 = tcg_temp_new();
1059 
1060     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1061     gen_store_spr(sprn, t0);
1062     tcg_temp_free(t0);
1063 }
1064 
1065 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1066 {
1067     TCGv t0 = tcg_temp_new();
1068 
1069     tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1070                     ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1071     gen_store_spr(sprn, t0);
1072     tcg_temp_free(t0);
1073 }
1074 
1075 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1076 {
1077     gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1078 }
1079 
1080 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1081 {
1082     TCGv_i32 t0 = tcg_const_i32(sprn);
1083     gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1084     tcg_temp_free_i32(t0);
1085 }
1086 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1087 {
1088     gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1089 }
1090 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1091 {
1092     gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1093 }
1094 
1095 #endif
1096 
1097 #if !defined(CONFIG_USER_ONLY)
1098 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1099 {
1100     TCGv val = tcg_temp_new();
1101     tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1102     gen_store_spr(SPR_BOOKE_MAS3, val);
1103     tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1104     gen_store_spr(SPR_BOOKE_MAS7, val);
1105     tcg_temp_free(val);
1106 }
1107 
1108 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1109 {
1110     TCGv mas7 = tcg_temp_new();
1111     TCGv mas3 = tcg_temp_new();
1112     gen_load_spr(mas7, SPR_BOOKE_MAS7);
1113     tcg_gen_shli_tl(mas7, mas7, 32);
1114     gen_load_spr(mas3, SPR_BOOKE_MAS3);
1115     tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1116     tcg_temp_free(mas3);
1117     tcg_temp_free(mas7);
1118 }
1119 
1120 #endif
1121 
1122 #ifdef TARGET_PPC64
1123 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1124                                     int bit, int sprn, int cause)
1125 {
1126     TCGv_i32 t1 = tcg_const_i32(bit);
1127     TCGv_i32 t2 = tcg_const_i32(sprn);
1128     TCGv_i32 t3 = tcg_const_i32(cause);
1129 
1130     gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1131 
1132     tcg_temp_free_i32(t3);
1133     tcg_temp_free_i32(t2);
1134     tcg_temp_free_i32(t1);
1135 }
1136 
1137 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1138                                    int bit, int sprn, int cause)
1139 {
1140     TCGv_i32 t1 = tcg_const_i32(bit);
1141     TCGv_i32 t2 = tcg_const_i32(sprn);
1142     TCGv_i32 t3 = tcg_const_i32(cause);
1143 
1144     gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1145 
1146     tcg_temp_free_i32(t3);
1147     tcg_temp_free_i32(t2);
1148     tcg_temp_free_i32(t1);
1149 }
1150 
1151 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1152 {
1153     TCGv spr_up = tcg_temp_new();
1154     TCGv spr = tcg_temp_new();
1155 
1156     gen_load_spr(spr, sprn - 1);
1157     tcg_gen_shri_tl(spr_up, spr, 32);
1158     tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1159 
1160     tcg_temp_free(spr);
1161     tcg_temp_free(spr_up);
1162 }
1163 
1164 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1165 {
1166     TCGv spr = tcg_temp_new();
1167 
1168     gen_load_spr(spr, sprn - 1);
1169     tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1170     gen_store_spr(sprn - 1, spr);
1171 
1172     tcg_temp_free(spr);
1173 }
1174 
1175 #if !defined(CONFIG_USER_ONLY)
1176 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1177 {
1178     TCGv hmer = tcg_temp_new();
1179 
1180     gen_load_spr(hmer, sprn);
1181     tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1182     gen_store_spr(sprn, hmer);
1183     spr_store_dump_spr(sprn);
1184     tcg_temp_free(hmer);
1185 }
1186 
1187 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1188 {
1189     gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1190 }
1191 #endif /* !defined(CONFIG_USER_ONLY) */
1192 
1193 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1194 {
1195     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1196     spr_read_generic(ctx, gprn, sprn);
1197 }
1198 
1199 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1200 {
1201     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1202     spr_write_generic(ctx, sprn, gprn);
1203 }
1204 
1205 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1206 {
1207     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1208     spr_read_generic(ctx, gprn, sprn);
1209 }
1210 
1211 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1212 {
1213     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1214     spr_write_generic(ctx, sprn, gprn);
1215 }
1216 
1217 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1218 {
1219     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1220     spr_read_prev_upper32(ctx, gprn, sprn);
1221 }
1222 
1223 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1224 {
1225     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1226     spr_write_prev_upper32(ctx, sprn, gprn);
1227 }
1228 
1229 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1230 {
1231     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1232     spr_read_generic(ctx, gprn, sprn);
1233 }
1234 
1235 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1236 {
1237     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1238     spr_write_generic(ctx, sprn, gprn);
1239 }
1240 
1241 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1242 {
1243     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1244     spr_read_prev_upper32(ctx, gprn, sprn);
1245 }
1246 
1247 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1248 {
1249     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1250     spr_write_prev_upper32(ctx, sprn, gprn);
1251 }
1252 #endif
1253 
1254 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type)                      \
1255 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1256 
1257 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2)             \
1258 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1259 
1260 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type)               \
1261 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1262 
1263 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2)      \
1264 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1265 
1266 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2)     \
1267 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1268 
1269 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1270 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1271 
1272 typedef struct opcode_t {
1273     unsigned char opc1, opc2, opc3, opc4;
1274 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1275     unsigned char pad[4];
1276 #endif
1277     opc_handler_t handler;
1278     const char *oname;
1279 } opcode_t;
1280 
1281 static void gen_priv_opc(DisasContext *ctx)
1282 {
1283     gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
1284 }
1285 
1286 /* Helpers for priv. check */
1287 #define GEN_PRIV(CTX)              \
1288     do {                           \
1289         gen_priv_opc(CTX); return; \
1290     } while (0)
1291 
1292 #if defined(CONFIG_USER_ONLY)
1293 #define CHK_HV(CTX) GEN_PRIV(CTX)
1294 #define CHK_SV(CTX) GEN_PRIV(CTX)
1295 #define CHK_HVRM(CTX) GEN_PRIV(CTX)
1296 #else
1297 #define CHK_HV(CTX)                         \
1298     do {                                    \
1299         if (unlikely(ctx->pr || !ctx->hv)) {\
1300             GEN_PRIV(CTX);                  \
1301         }                                   \
1302     } while (0)
1303 #define CHK_SV(CTX)              \
1304     do {                         \
1305         if (unlikely(ctx->pr)) { \
1306             GEN_PRIV(CTX);       \
1307         }                        \
1308     } while (0)
1309 #define CHK_HVRM(CTX)                                   \
1310     do {                                                \
1311         if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1312             GEN_PRIV(CTX);                              \
1313         }                                               \
1314     } while (0)
1315 #endif
1316 
1317 #define CHK_NONE(CTX)
1318 
1319 /*****************************************************************************/
1320 /* PowerPC instructions table                                                */
1321 
1322 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2)                    \
1323 {                                                                             \
1324     .opc1 = op1,                                                              \
1325     .opc2 = op2,                                                              \
1326     .opc3 = op3,                                                              \
1327     .opc4 = 0xff,                                                             \
1328     .handler = {                                                              \
1329         .inval1  = invl,                                                      \
1330         .type = _typ,                                                         \
1331         .type2 = _typ2,                                                       \
1332         .handler = &gen_##name,                                               \
1333     },                                                                        \
1334     .oname = stringify(name),                                                 \
1335 }
1336 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2)       \
1337 {                                                                             \
1338     .opc1 = op1,                                                              \
1339     .opc2 = op2,                                                              \
1340     .opc3 = op3,                                                              \
1341     .opc4 = 0xff,                                                             \
1342     .handler = {                                                              \
1343         .inval1  = invl1,                                                     \
1344         .inval2  = invl2,                                                     \
1345         .type = _typ,                                                         \
1346         .type2 = _typ2,                                                       \
1347         .handler = &gen_##name,                                               \
1348     },                                                                        \
1349     .oname = stringify(name),                                                 \
1350 }
1351 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2)             \
1352 {                                                                             \
1353     .opc1 = op1,                                                              \
1354     .opc2 = op2,                                                              \
1355     .opc3 = op3,                                                              \
1356     .opc4 = 0xff,                                                             \
1357     .handler = {                                                              \
1358         .inval1  = invl,                                                      \
1359         .type = _typ,                                                         \
1360         .type2 = _typ2,                                                       \
1361         .handler = &gen_##name,                                               \
1362     },                                                                        \
1363     .oname = onam,                                                            \
1364 }
1365 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2)              \
1366 {                                                                             \
1367     .opc1 = op1,                                                              \
1368     .opc2 = op2,                                                              \
1369     .opc3 = op3,                                                              \
1370     .opc4 = op4,                                                              \
1371     .handler = {                                                              \
1372         .inval1  = invl,                                                      \
1373         .type = _typ,                                                         \
1374         .type2 = _typ2,                                                       \
1375         .handler = &gen_##name,                                               \
1376     },                                                                        \
1377     .oname = stringify(name),                                                 \
1378 }
1379 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2)        \
1380 {                                                                             \
1381     .opc1 = op1,                                                              \
1382     .opc2 = op2,                                                              \
1383     .opc3 = op3,                                                              \
1384     .opc4 = op4,                                                              \
1385     .handler = {                                                              \
1386         .inval1  = invl,                                                      \
1387         .type = _typ,                                                         \
1388         .type2 = _typ2,                                                       \
1389         .handler = &gen_##name,                                               \
1390     },                                                                        \
1391     .oname = onam,                                                            \
1392 }
1393 
1394 /* Invalid instruction */
1395 static void gen_invalid(DisasContext *ctx)
1396 {
1397     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1398 }
1399 
1400 static opc_handler_t invalid_handler = {
1401     .inval1  = 0xFFFFFFFF,
1402     .inval2  = 0xFFFFFFFF,
1403     .type    = PPC_NONE,
1404     .type2   = PPC_NONE,
1405     .handler = gen_invalid,
1406 };
1407 
1408 /***                           Integer comparison                          ***/
1409 
1410 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1411 {
1412     TCGv t0 = tcg_temp_new();
1413     TCGv t1 = tcg_temp_new();
1414     TCGv_i32 t = tcg_temp_new_i32();
1415 
1416     tcg_gen_movi_tl(t0, CRF_EQ);
1417     tcg_gen_movi_tl(t1, CRF_LT);
1418     tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1419                        t0, arg0, arg1, t1, t0);
1420     tcg_gen_movi_tl(t1, CRF_GT);
1421     tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1422                        t0, arg0, arg1, t1, t0);
1423 
1424     tcg_gen_trunc_tl_i32(t, t0);
1425     tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1426     tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1427 
1428     tcg_temp_free(t0);
1429     tcg_temp_free(t1);
1430     tcg_temp_free_i32(t);
1431 }
1432 
1433 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1434 {
1435     TCGv t0 = tcg_const_tl(arg1);
1436     gen_op_cmp(arg0, t0, s, crf);
1437     tcg_temp_free(t0);
1438 }
1439 
1440 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1441 {
1442     TCGv t0, t1;
1443     t0 = tcg_temp_new();
1444     t1 = tcg_temp_new();
1445     if (s) {
1446         tcg_gen_ext32s_tl(t0, arg0);
1447         tcg_gen_ext32s_tl(t1, arg1);
1448     } else {
1449         tcg_gen_ext32u_tl(t0, arg0);
1450         tcg_gen_ext32u_tl(t1, arg1);
1451     }
1452     gen_op_cmp(t0, t1, s, crf);
1453     tcg_temp_free(t1);
1454     tcg_temp_free(t0);
1455 }
1456 
1457 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1458 {
1459     TCGv t0 = tcg_const_tl(arg1);
1460     gen_op_cmp32(arg0, t0, s, crf);
1461     tcg_temp_free(t0);
1462 }
1463 
1464 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1465 {
1466     if (NARROW_MODE(ctx)) {
1467         gen_op_cmpi32(reg, 0, 1, 0);
1468     } else {
1469         gen_op_cmpi(reg, 0, 1, 0);
1470     }
1471 }
1472 
1473 /* cmprb - range comparison: isupper, isaplha, islower*/
1474 static void gen_cmprb(DisasContext *ctx)
1475 {
1476     TCGv_i32 src1 = tcg_temp_new_i32();
1477     TCGv_i32 src2 = tcg_temp_new_i32();
1478     TCGv_i32 src2lo = tcg_temp_new_i32();
1479     TCGv_i32 src2hi = tcg_temp_new_i32();
1480     TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1481 
1482     tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1483     tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1484 
1485     tcg_gen_andi_i32(src1, src1, 0xFF);
1486     tcg_gen_ext8u_i32(src2lo, src2);
1487     tcg_gen_shri_i32(src2, src2, 8);
1488     tcg_gen_ext8u_i32(src2hi, src2);
1489 
1490     tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1491     tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1492     tcg_gen_and_i32(crf, src2lo, src2hi);
1493 
1494     if (ctx->opcode & 0x00200000) {
1495         tcg_gen_shri_i32(src2, src2, 8);
1496         tcg_gen_ext8u_i32(src2lo, src2);
1497         tcg_gen_shri_i32(src2, src2, 8);
1498         tcg_gen_ext8u_i32(src2hi, src2);
1499         tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1500         tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1501         tcg_gen_and_i32(src2lo, src2lo, src2hi);
1502         tcg_gen_or_i32(crf, crf, src2lo);
1503     }
1504     tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1505     tcg_temp_free_i32(src1);
1506     tcg_temp_free_i32(src2);
1507     tcg_temp_free_i32(src2lo);
1508     tcg_temp_free_i32(src2hi);
1509 }
1510 
1511 #if defined(TARGET_PPC64)
1512 /* cmpeqb */
1513 static void gen_cmpeqb(DisasContext *ctx)
1514 {
1515     gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1516                       cpu_gpr[rB(ctx->opcode)]);
1517 }
1518 #endif
1519 
1520 /* isel (PowerPC 2.03 specification) */
1521 static void gen_isel(DisasContext *ctx)
1522 {
1523     uint32_t bi = rC(ctx->opcode);
1524     uint32_t mask = 0x08 >> (bi & 0x03);
1525     TCGv t0 = tcg_temp_new();
1526     TCGv zr;
1527 
1528     tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1529     tcg_gen_andi_tl(t0, t0, mask);
1530 
1531     zr = tcg_const_tl(0);
1532     tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1533                        rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1534                        cpu_gpr[rB(ctx->opcode)]);
1535     tcg_temp_free(zr);
1536     tcg_temp_free(t0);
1537 }
1538 
1539 /* cmpb: PowerPC 2.05 specification */
1540 static void gen_cmpb(DisasContext *ctx)
1541 {
1542     gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1543                     cpu_gpr[rB(ctx->opcode)]);
1544 }
1545 
1546 /***                           Integer arithmetic                          ***/
1547 
1548 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1549                                            TCGv arg1, TCGv arg2, int sub)
1550 {
1551     TCGv t0 = tcg_temp_new();
1552 
1553     tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1554     tcg_gen_xor_tl(t0, arg1, arg2);
1555     if (sub) {
1556         tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1557     } else {
1558         tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1559     }
1560     tcg_temp_free(t0);
1561     if (NARROW_MODE(ctx)) {
1562         tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1563         if (is_isa300(ctx)) {
1564             tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1565         }
1566     } else {
1567         if (is_isa300(ctx)) {
1568             tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1569         }
1570         tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1571     }
1572     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1573 }
1574 
1575 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1576                                              TCGv res, TCGv arg0, TCGv arg1,
1577                                              TCGv ca32, int sub)
1578 {
1579     TCGv t0;
1580 
1581     if (!is_isa300(ctx)) {
1582         return;
1583     }
1584 
1585     t0 = tcg_temp_new();
1586     if (sub) {
1587         tcg_gen_eqv_tl(t0, arg0, arg1);
1588     } else {
1589         tcg_gen_xor_tl(t0, arg0, arg1);
1590     }
1591     tcg_gen_xor_tl(t0, t0, res);
1592     tcg_gen_extract_tl(ca32, t0, 32, 1);
1593     tcg_temp_free(t0);
1594 }
1595 
1596 /* Common add function */
1597 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1598                                     TCGv arg2, TCGv ca, TCGv ca32,
1599                                     bool add_ca, bool compute_ca,
1600                                     bool compute_ov, bool compute_rc0)
1601 {
1602     TCGv t0 = ret;
1603 
1604     if (compute_ca || compute_ov) {
1605         t0 = tcg_temp_new();
1606     }
1607 
1608     if (compute_ca) {
1609         if (NARROW_MODE(ctx)) {
1610             /*
1611              * Caution: a non-obvious corner case of the spec is that
1612              * we must produce the *entire* 64-bit addition, but
1613              * produce the carry into bit 32.
1614              */
1615             TCGv t1 = tcg_temp_new();
1616             tcg_gen_xor_tl(t1, arg1, arg2);        /* add without carry */
1617             tcg_gen_add_tl(t0, arg1, arg2);
1618             if (add_ca) {
1619                 tcg_gen_add_tl(t0, t0, ca);
1620             }
1621             tcg_gen_xor_tl(ca, t0, t1);        /* bits changed w/ carry */
1622             tcg_temp_free(t1);
1623             tcg_gen_extract_tl(ca, ca, 32, 1);
1624             if (is_isa300(ctx)) {
1625                 tcg_gen_mov_tl(ca32, ca);
1626             }
1627         } else {
1628             TCGv zero = tcg_const_tl(0);
1629             if (add_ca) {
1630                 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1631                 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1632             } else {
1633                 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1634             }
1635             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1636             tcg_temp_free(zero);
1637         }
1638     } else {
1639         tcg_gen_add_tl(t0, arg1, arg2);
1640         if (add_ca) {
1641             tcg_gen_add_tl(t0, t0, ca);
1642         }
1643     }
1644 
1645     if (compute_ov) {
1646         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1647     }
1648     if (unlikely(compute_rc0)) {
1649         gen_set_Rc0(ctx, t0);
1650     }
1651 
1652     if (t0 != ret) {
1653         tcg_gen_mov_tl(ret, t0);
1654         tcg_temp_free(t0);
1655     }
1656 }
1657 /* Add functions with two operands */
1658 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov)     \
1659 static void glue(gen_, name)(DisasContext *ctx)                               \
1660 {                                                                             \
1661     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1662                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1663                      ca, glue(ca, 32),                                        \
1664                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1665 }
1666 /* Add functions with one operand and one immediate */
1667 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca,                    \
1668                                 add_ca, compute_ca, compute_ov)               \
1669 static void glue(gen_, name)(DisasContext *ctx)                               \
1670 {                                                                             \
1671     TCGv t0 = tcg_const_tl(const_val);                                        \
1672     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1673                      cpu_gpr[rA(ctx->opcode)], t0,                            \
1674                      ca, glue(ca, 32),                                        \
1675                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1676     tcg_temp_free(t0);                                                        \
1677 }
1678 
1679 /* add  add.  addo  addo. */
1680 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1681 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1682 /* addc  addc.  addco  addco. */
1683 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1684 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1685 /* adde  adde.  addeo  addeo. */
1686 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1687 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1688 /* addme  addme.  addmeo  addmeo.  */
1689 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1690 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1691 /* addex */
1692 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1693 /* addze  addze.  addzeo  addzeo.*/
1694 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1695 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1696 /* addic  addic.*/
1697 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1698 {
1699     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1700     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1701                      c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1702     tcg_temp_free(c);
1703 }
1704 
1705 static void gen_addic(DisasContext *ctx)
1706 {
1707     gen_op_addic(ctx, 0);
1708 }
1709 
1710 static void gen_addic_(DisasContext *ctx)
1711 {
1712     gen_op_addic(ctx, 1);
1713 }
1714 
1715 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1716                                      TCGv arg2, int sign, int compute_ov)
1717 {
1718     TCGv_i32 t0 = tcg_temp_new_i32();
1719     TCGv_i32 t1 = tcg_temp_new_i32();
1720     TCGv_i32 t2 = tcg_temp_new_i32();
1721     TCGv_i32 t3 = tcg_temp_new_i32();
1722 
1723     tcg_gen_trunc_tl_i32(t0, arg1);
1724     tcg_gen_trunc_tl_i32(t1, arg2);
1725     if (sign) {
1726         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1727         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1728         tcg_gen_and_i32(t2, t2, t3);
1729         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1730         tcg_gen_or_i32(t2, t2, t3);
1731         tcg_gen_movi_i32(t3, 0);
1732         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1733         tcg_gen_div_i32(t3, t0, t1);
1734         tcg_gen_extu_i32_tl(ret, t3);
1735     } else {
1736         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1737         tcg_gen_movi_i32(t3, 0);
1738         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1739         tcg_gen_divu_i32(t3, t0, t1);
1740         tcg_gen_extu_i32_tl(ret, t3);
1741     }
1742     if (compute_ov) {
1743         tcg_gen_extu_i32_tl(cpu_ov, t2);
1744         if (is_isa300(ctx)) {
1745             tcg_gen_extu_i32_tl(cpu_ov32, t2);
1746         }
1747         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1748     }
1749     tcg_temp_free_i32(t0);
1750     tcg_temp_free_i32(t1);
1751     tcg_temp_free_i32(t2);
1752     tcg_temp_free_i32(t3);
1753 
1754     if (unlikely(Rc(ctx->opcode) != 0)) {
1755         gen_set_Rc0(ctx, ret);
1756     }
1757 }
1758 /* Div functions */
1759 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
1760 static void glue(gen_, name)(DisasContext *ctx)                               \
1761 {                                                                             \
1762     gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1763                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1764                      sign, compute_ov);                                       \
1765 }
1766 /* divwu  divwu.  divwuo  divwuo.   */
1767 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1768 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1769 /* divw  divw.  divwo  divwo.   */
1770 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1771 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1772 
1773 /* div[wd]eu[o][.] */
1774 #define GEN_DIVE(name, hlpr, compute_ov)                                      \
1775 static void gen_##name(DisasContext *ctx)                                     \
1776 {                                                                             \
1777     TCGv_i32 t0 = tcg_const_i32(compute_ov);                                  \
1778     gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env,                      \
1779                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1780     tcg_temp_free_i32(t0);                                                    \
1781     if (unlikely(Rc(ctx->opcode) != 0)) {                                     \
1782         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);                           \
1783     }                                                                         \
1784 }
1785 
1786 GEN_DIVE(divweu, divweu, 0);
1787 GEN_DIVE(divweuo, divweu, 1);
1788 GEN_DIVE(divwe, divwe, 0);
1789 GEN_DIVE(divweo, divwe, 1);
1790 
1791 #if defined(TARGET_PPC64)
1792 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1793                                      TCGv arg2, int sign, int compute_ov)
1794 {
1795     TCGv_i64 t0 = tcg_temp_new_i64();
1796     TCGv_i64 t1 = tcg_temp_new_i64();
1797     TCGv_i64 t2 = tcg_temp_new_i64();
1798     TCGv_i64 t3 = tcg_temp_new_i64();
1799 
1800     tcg_gen_mov_i64(t0, arg1);
1801     tcg_gen_mov_i64(t1, arg2);
1802     if (sign) {
1803         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1804         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1805         tcg_gen_and_i64(t2, t2, t3);
1806         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1807         tcg_gen_or_i64(t2, t2, t3);
1808         tcg_gen_movi_i64(t3, 0);
1809         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1810         tcg_gen_div_i64(ret, t0, t1);
1811     } else {
1812         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1813         tcg_gen_movi_i64(t3, 0);
1814         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1815         tcg_gen_divu_i64(ret, t0, t1);
1816     }
1817     if (compute_ov) {
1818         tcg_gen_mov_tl(cpu_ov, t2);
1819         if (is_isa300(ctx)) {
1820             tcg_gen_mov_tl(cpu_ov32, t2);
1821         }
1822         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1823     }
1824     tcg_temp_free_i64(t0);
1825     tcg_temp_free_i64(t1);
1826     tcg_temp_free_i64(t2);
1827     tcg_temp_free_i64(t3);
1828 
1829     if (unlikely(Rc(ctx->opcode) != 0)) {
1830         gen_set_Rc0(ctx, ret);
1831     }
1832 }
1833 
1834 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
1835 static void glue(gen_, name)(DisasContext *ctx)                               \
1836 {                                                                             \
1837     gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1838                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
1839                       sign, compute_ov);                                      \
1840 }
1841 /* divdu  divdu.  divduo  divduo.   */
1842 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1843 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1844 /* divd  divd.  divdo  divdo.   */
1845 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1846 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1847 
1848 GEN_DIVE(divdeu, divdeu, 0);
1849 GEN_DIVE(divdeuo, divdeu, 1);
1850 GEN_DIVE(divde, divde, 0);
1851 GEN_DIVE(divdeo, divde, 1);
1852 #endif
1853 
1854 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1855                                      TCGv arg2, int sign)
1856 {
1857     TCGv_i32 t0 = tcg_temp_new_i32();
1858     TCGv_i32 t1 = tcg_temp_new_i32();
1859 
1860     tcg_gen_trunc_tl_i32(t0, arg1);
1861     tcg_gen_trunc_tl_i32(t1, arg2);
1862     if (sign) {
1863         TCGv_i32 t2 = tcg_temp_new_i32();
1864         TCGv_i32 t3 = tcg_temp_new_i32();
1865         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1866         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1867         tcg_gen_and_i32(t2, t2, t3);
1868         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1869         tcg_gen_or_i32(t2, t2, t3);
1870         tcg_gen_movi_i32(t3, 0);
1871         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1872         tcg_gen_rem_i32(t3, t0, t1);
1873         tcg_gen_ext_i32_tl(ret, t3);
1874         tcg_temp_free_i32(t2);
1875         tcg_temp_free_i32(t3);
1876     } else {
1877         TCGv_i32 t2 = tcg_const_i32(1);
1878         TCGv_i32 t3 = tcg_const_i32(0);
1879         tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1880         tcg_gen_remu_i32(t3, t0, t1);
1881         tcg_gen_extu_i32_tl(ret, t3);
1882         tcg_temp_free_i32(t2);
1883         tcg_temp_free_i32(t3);
1884     }
1885     tcg_temp_free_i32(t0);
1886     tcg_temp_free_i32(t1);
1887 }
1888 
1889 #define GEN_INT_ARITH_MODW(name, opc3, sign)                                \
1890 static void glue(gen_, name)(DisasContext *ctx)                             \
1891 {                                                                           \
1892     gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1893                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1894                       sign);                                                \
1895 }
1896 
1897 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
1898 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
1899 
1900 #if defined(TARGET_PPC64)
1901 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1902                                      TCGv arg2, int sign)
1903 {
1904     TCGv_i64 t0 = tcg_temp_new_i64();
1905     TCGv_i64 t1 = tcg_temp_new_i64();
1906 
1907     tcg_gen_mov_i64(t0, arg1);
1908     tcg_gen_mov_i64(t1, arg2);
1909     if (sign) {
1910         TCGv_i64 t2 = tcg_temp_new_i64();
1911         TCGv_i64 t3 = tcg_temp_new_i64();
1912         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1913         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1914         tcg_gen_and_i64(t2, t2, t3);
1915         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1916         tcg_gen_or_i64(t2, t2, t3);
1917         tcg_gen_movi_i64(t3, 0);
1918         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1919         tcg_gen_rem_i64(ret, t0, t1);
1920         tcg_temp_free_i64(t2);
1921         tcg_temp_free_i64(t3);
1922     } else {
1923         TCGv_i64 t2 = tcg_const_i64(1);
1924         TCGv_i64 t3 = tcg_const_i64(0);
1925         tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1926         tcg_gen_remu_i64(ret, t0, t1);
1927         tcg_temp_free_i64(t2);
1928         tcg_temp_free_i64(t3);
1929     }
1930     tcg_temp_free_i64(t0);
1931     tcg_temp_free_i64(t1);
1932 }
1933 
1934 #define GEN_INT_ARITH_MODD(name, opc3, sign)                            \
1935 static void glue(gen_, name)(DisasContext *ctx)                           \
1936 {                                                                         \
1937   gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1938                     cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1939                     sign);                                                \
1940 }
1941 
1942 GEN_INT_ARITH_MODD(modud, 0x08, 0);
1943 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
1944 #endif
1945 
1946 /* mulhw  mulhw. */
1947 static void gen_mulhw(DisasContext *ctx)
1948 {
1949     TCGv_i32 t0 = tcg_temp_new_i32();
1950     TCGv_i32 t1 = tcg_temp_new_i32();
1951 
1952     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1953     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1954     tcg_gen_muls2_i32(t0, t1, t0, t1);
1955     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1956     tcg_temp_free_i32(t0);
1957     tcg_temp_free_i32(t1);
1958     if (unlikely(Rc(ctx->opcode) != 0)) {
1959         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1960     }
1961 }
1962 
1963 /* mulhwu  mulhwu.  */
1964 static void gen_mulhwu(DisasContext *ctx)
1965 {
1966     TCGv_i32 t0 = tcg_temp_new_i32();
1967     TCGv_i32 t1 = tcg_temp_new_i32();
1968 
1969     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1970     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1971     tcg_gen_mulu2_i32(t0, t1, t0, t1);
1972     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1973     tcg_temp_free_i32(t0);
1974     tcg_temp_free_i32(t1);
1975     if (unlikely(Rc(ctx->opcode) != 0)) {
1976         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1977     }
1978 }
1979 
1980 /* mullw  mullw. */
1981 static void gen_mullw(DisasContext *ctx)
1982 {
1983 #if defined(TARGET_PPC64)
1984     TCGv_i64 t0, t1;
1985     t0 = tcg_temp_new_i64();
1986     t1 = tcg_temp_new_i64();
1987     tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1988     tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1989     tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
1990     tcg_temp_free(t0);
1991     tcg_temp_free(t1);
1992 #else
1993     tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1994                     cpu_gpr[rB(ctx->opcode)]);
1995 #endif
1996     if (unlikely(Rc(ctx->opcode) != 0)) {
1997         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1998     }
1999 }
2000 
2001 /* mullwo  mullwo. */
2002 static void gen_mullwo(DisasContext *ctx)
2003 {
2004     TCGv_i32 t0 = tcg_temp_new_i32();
2005     TCGv_i32 t1 = tcg_temp_new_i32();
2006 
2007     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2008     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2009     tcg_gen_muls2_i32(t0, t1, t0, t1);
2010 #if defined(TARGET_PPC64)
2011     tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2012 #else
2013     tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
2014 #endif
2015 
2016     tcg_gen_sari_i32(t0, t0, 31);
2017     tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
2018     tcg_gen_extu_i32_tl(cpu_ov, t0);
2019     if (is_isa300(ctx)) {
2020         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2021     }
2022     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2023 
2024     tcg_temp_free_i32(t0);
2025     tcg_temp_free_i32(t1);
2026     if (unlikely(Rc(ctx->opcode) != 0)) {
2027         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2028     }
2029 }
2030 
2031 /* mulli */
2032 static void gen_mulli(DisasContext *ctx)
2033 {
2034     tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2035                     SIMM(ctx->opcode));
2036 }
2037 
2038 #if defined(TARGET_PPC64)
2039 /* mulhd  mulhd. */
2040 static void gen_mulhd(DisasContext *ctx)
2041 {
2042     TCGv lo = tcg_temp_new();
2043     tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2044                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2045     tcg_temp_free(lo);
2046     if (unlikely(Rc(ctx->opcode) != 0)) {
2047         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2048     }
2049 }
2050 
2051 /* mulhdu  mulhdu. */
2052 static void gen_mulhdu(DisasContext *ctx)
2053 {
2054     TCGv lo = tcg_temp_new();
2055     tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2056                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2057     tcg_temp_free(lo);
2058     if (unlikely(Rc(ctx->opcode) != 0)) {
2059         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2060     }
2061 }
2062 
2063 /* mulld  mulld. */
2064 static void gen_mulld(DisasContext *ctx)
2065 {
2066     tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2067                    cpu_gpr[rB(ctx->opcode)]);
2068     if (unlikely(Rc(ctx->opcode) != 0)) {
2069         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2070     }
2071 }
2072 
2073 /* mulldo  mulldo. */
2074 static void gen_mulldo(DisasContext *ctx)
2075 {
2076     TCGv_i64 t0 = tcg_temp_new_i64();
2077     TCGv_i64 t1 = tcg_temp_new_i64();
2078 
2079     tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
2080                       cpu_gpr[rB(ctx->opcode)]);
2081     tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
2082 
2083     tcg_gen_sari_i64(t0, t0, 63);
2084     tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
2085     if (is_isa300(ctx)) {
2086         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2087     }
2088     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2089 
2090     tcg_temp_free_i64(t0);
2091     tcg_temp_free_i64(t1);
2092 
2093     if (unlikely(Rc(ctx->opcode) != 0)) {
2094         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2095     }
2096 }
2097 #endif
2098 
2099 /* Common subf function */
2100 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
2101                                      TCGv arg2, bool add_ca, bool compute_ca,
2102                                      bool compute_ov, bool compute_rc0)
2103 {
2104     TCGv t0 = ret;
2105 
2106     if (compute_ca || compute_ov) {
2107         t0 = tcg_temp_new();
2108     }
2109 
2110     if (compute_ca) {
2111         /* dest = ~arg1 + arg2 [+ ca].  */
2112         if (NARROW_MODE(ctx)) {
2113             /*
2114              * Caution: a non-obvious corner case of the spec is that
2115              * we must produce the *entire* 64-bit addition, but
2116              * produce the carry into bit 32.
2117              */
2118             TCGv inv1 = tcg_temp_new();
2119             TCGv t1 = tcg_temp_new();
2120             tcg_gen_not_tl(inv1, arg1);
2121             if (add_ca) {
2122                 tcg_gen_add_tl(t0, arg2, cpu_ca);
2123             } else {
2124                 tcg_gen_addi_tl(t0, arg2, 1);
2125             }
2126             tcg_gen_xor_tl(t1, arg2, inv1);         /* add without carry */
2127             tcg_gen_add_tl(t0, t0, inv1);
2128             tcg_temp_free(inv1);
2129             tcg_gen_xor_tl(cpu_ca, t0, t1);         /* bits changes w/ carry */
2130             tcg_temp_free(t1);
2131             tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2132             if (is_isa300(ctx)) {
2133                 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2134             }
2135         } else if (add_ca) {
2136             TCGv zero, inv1 = tcg_temp_new();
2137             tcg_gen_not_tl(inv1, arg1);
2138             zero = tcg_const_tl(0);
2139             tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2140             tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2141             gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2142             tcg_temp_free(zero);
2143             tcg_temp_free(inv1);
2144         } else {
2145             tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2146             tcg_gen_sub_tl(t0, arg2, arg1);
2147             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2148         }
2149     } else if (add_ca) {
2150         /*
2151          * Since we're ignoring carry-out, we can simplify the
2152          * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2153          */
2154         tcg_gen_sub_tl(t0, arg2, arg1);
2155         tcg_gen_add_tl(t0, t0, cpu_ca);
2156         tcg_gen_subi_tl(t0, t0, 1);
2157     } else {
2158         tcg_gen_sub_tl(t0, arg2, arg1);
2159     }
2160 
2161     if (compute_ov) {
2162         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2163     }
2164     if (unlikely(compute_rc0)) {
2165         gen_set_Rc0(ctx, t0);
2166     }
2167 
2168     if (t0 != ret) {
2169         tcg_gen_mov_tl(ret, t0);
2170         tcg_temp_free(t0);
2171     }
2172 }
2173 /* Sub functions with Two operands functions */
2174 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
2175 static void glue(gen_, name)(DisasContext *ctx)                               \
2176 {                                                                             \
2177     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2178                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
2179                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2180 }
2181 /* Sub functions with one operand and one immediate */
2182 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
2183                                 add_ca, compute_ca, compute_ov)               \
2184 static void glue(gen_, name)(DisasContext *ctx)                               \
2185 {                                                                             \
2186     TCGv t0 = tcg_const_tl(const_val);                                        \
2187     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2188                       cpu_gpr[rA(ctx->opcode)], t0,                           \
2189                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2190     tcg_temp_free(t0);                                                        \
2191 }
2192 /* subf  subf.  subfo  subfo. */
2193 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2194 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2195 /* subfc  subfc.  subfco  subfco. */
2196 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2197 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2198 /* subfe  subfe.  subfeo  subfo. */
2199 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2200 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2201 /* subfme  subfme.  subfmeo  subfmeo.  */
2202 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2203 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2204 /* subfze  subfze.  subfzeo  subfzeo.*/
2205 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2206 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2207 
2208 /* subfic */
2209 static void gen_subfic(DisasContext *ctx)
2210 {
2211     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
2212     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2213                       c, 0, 1, 0, 0);
2214     tcg_temp_free(c);
2215 }
2216 
2217 /* neg neg. nego nego. */
2218 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2219 {
2220     TCGv zero = tcg_const_tl(0);
2221     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2222                       zero, 0, 0, compute_ov, Rc(ctx->opcode));
2223     tcg_temp_free(zero);
2224 }
2225 
2226 static void gen_neg(DisasContext *ctx)
2227 {
2228     tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2229     if (unlikely(Rc(ctx->opcode))) {
2230         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2231     }
2232 }
2233 
2234 static void gen_nego(DisasContext *ctx)
2235 {
2236     gen_op_arith_neg(ctx, 1);
2237 }
2238 
2239 /***                            Integer logical                            ***/
2240 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
2241 static void glue(gen_, name)(DisasContext *ctx)                               \
2242 {                                                                             \
2243     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],                \
2244        cpu_gpr[rB(ctx->opcode)]);                                             \
2245     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2246         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2247 }
2248 
2249 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
2250 static void glue(gen_, name)(DisasContext *ctx)                               \
2251 {                                                                             \
2252     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);               \
2253     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2254         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2255 }
2256 
2257 /* and & and. */
2258 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2259 /* andc & andc. */
2260 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2261 
2262 /* andi. */
2263 static void gen_andi_(DisasContext *ctx)
2264 {
2265     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2266                     UIMM(ctx->opcode));
2267     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2268 }
2269 
2270 /* andis. */
2271 static void gen_andis_(DisasContext *ctx)
2272 {
2273     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2274                     UIMM(ctx->opcode) << 16);
2275     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2276 }
2277 
2278 /* cntlzw */
2279 static void gen_cntlzw(DisasContext *ctx)
2280 {
2281     TCGv_i32 t = tcg_temp_new_i32();
2282 
2283     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2284     tcg_gen_clzi_i32(t, t, 32);
2285     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2286     tcg_temp_free_i32(t);
2287 
2288     if (unlikely(Rc(ctx->opcode) != 0)) {
2289         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2290     }
2291 }
2292 
2293 /* cnttzw */
2294 static void gen_cnttzw(DisasContext *ctx)
2295 {
2296     TCGv_i32 t = tcg_temp_new_i32();
2297 
2298     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2299     tcg_gen_ctzi_i32(t, t, 32);
2300     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2301     tcg_temp_free_i32(t);
2302 
2303     if (unlikely(Rc(ctx->opcode) != 0)) {
2304         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2305     }
2306 }
2307 
2308 /* eqv & eqv. */
2309 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2310 /* extsb & extsb. */
2311 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2312 /* extsh & extsh. */
2313 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2314 /* nand & nand. */
2315 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2316 /* nor & nor. */
2317 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2318 
2319 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2320 static void gen_pause(DisasContext *ctx)
2321 {
2322     TCGv_i32 t0 = tcg_const_i32(0);
2323     tcg_gen_st_i32(t0, cpu_env,
2324                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2325     tcg_temp_free_i32(t0);
2326 
2327     /* Stop translation, this gives other CPUs a chance to run */
2328     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2329 }
2330 #endif /* defined(TARGET_PPC64) */
2331 
2332 /* or & or. */
2333 static void gen_or(DisasContext *ctx)
2334 {
2335     int rs, ra, rb;
2336 
2337     rs = rS(ctx->opcode);
2338     ra = rA(ctx->opcode);
2339     rb = rB(ctx->opcode);
2340     /* Optimisation for mr. ri case */
2341     if (rs != ra || rs != rb) {
2342         if (rs != rb) {
2343             tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2344         } else {
2345             tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2346         }
2347         if (unlikely(Rc(ctx->opcode) != 0)) {
2348             gen_set_Rc0(ctx, cpu_gpr[ra]);
2349         }
2350     } else if (unlikely(Rc(ctx->opcode) != 0)) {
2351         gen_set_Rc0(ctx, cpu_gpr[rs]);
2352 #if defined(TARGET_PPC64)
2353     } else if (rs != 0) { /* 0 is nop */
2354         int prio = 0;
2355 
2356         switch (rs) {
2357         case 1:
2358             /* Set process priority to low */
2359             prio = 2;
2360             break;
2361         case 6:
2362             /* Set process priority to medium-low */
2363             prio = 3;
2364             break;
2365         case 2:
2366             /* Set process priority to normal */
2367             prio = 4;
2368             break;
2369 #if !defined(CONFIG_USER_ONLY)
2370         case 31:
2371             if (!ctx->pr) {
2372                 /* Set process priority to very low */
2373                 prio = 1;
2374             }
2375             break;
2376         case 5:
2377             if (!ctx->pr) {
2378                 /* Set process priority to medium-hight */
2379                 prio = 5;
2380             }
2381             break;
2382         case 3:
2383             if (!ctx->pr) {
2384                 /* Set process priority to high */
2385                 prio = 6;
2386             }
2387             break;
2388         case 7:
2389             if (ctx->hv && !ctx->pr) {
2390                 /* Set process priority to very high */
2391                 prio = 7;
2392             }
2393             break;
2394 #endif
2395         default:
2396             break;
2397         }
2398         if (prio) {
2399             TCGv t0 = tcg_temp_new();
2400             gen_load_spr(t0, SPR_PPR);
2401             tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2402             tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2403             gen_store_spr(SPR_PPR, t0);
2404             tcg_temp_free(t0);
2405         }
2406 #if !defined(CONFIG_USER_ONLY)
2407         /*
2408          * Pause out of TCG otherwise spin loops with smt_low eat too
2409          * much CPU and the kernel hangs.  This applies to all
2410          * encodings other than no-op, e.g., miso(rs=26), yield(27),
2411          * mdoio(29), mdoom(30), and all currently undefined.
2412          */
2413         gen_pause(ctx);
2414 #endif
2415 #endif
2416     }
2417 }
2418 /* orc & orc. */
2419 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2420 
2421 /* xor & xor. */
2422 static void gen_xor(DisasContext *ctx)
2423 {
2424     /* Optimisation for "set to zero" case */
2425     if (rS(ctx->opcode) != rB(ctx->opcode)) {
2426         tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2427                        cpu_gpr[rB(ctx->opcode)]);
2428     } else {
2429         tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2430     }
2431     if (unlikely(Rc(ctx->opcode) != 0)) {
2432         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2433     }
2434 }
2435 
2436 /* ori */
2437 static void gen_ori(DisasContext *ctx)
2438 {
2439     target_ulong uimm = UIMM(ctx->opcode);
2440 
2441     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2442         return;
2443     }
2444     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2445 }
2446 
2447 /* oris */
2448 static void gen_oris(DisasContext *ctx)
2449 {
2450     target_ulong uimm = UIMM(ctx->opcode);
2451 
2452     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2453         /* NOP */
2454         return;
2455     }
2456     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2457                    uimm << 16);
2458 }
2459 
2460 /* xori */
2461 static void gen_xori(DisasContext *ctx)
2462 {
2463     target_ulong uimm = UIMM(ctx->opcode);
2464 
2465     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2466         /* NOP */
2467         return;
2468     }
2469     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2470 }
2471 
2472 /* xoris */
2473 static void gen_xoris(DisasContext *ctx)
2474 {
2475     target_ulong uimm = UIMM(ctx->opcode);
2476 
2477     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2478         /* NOP */
2479         return;
2480     }
2481     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2482                     uimm << 16);
2483 }
2484 
2485 /* popcntb : PowerPC 2.03 specification */
2486 static void gen_popcntb(DisasContext *ctx)
2487 {
2488     gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2489 }
2490 
2491 static void gen_popcntw(DisasContext *ctx)
2492 {
2493 #if defined(TARGET_PPC64)
2494     gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2495 #else
2496     tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2497 #endif
2498 }
2499 
2500 #if defined(TARGET_PPC64)
2501 /* popcntd: PowerPC 2.06 specification */
2502 static void gen_popcntd(DisasContext *ctx)
2503 {
2504     tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2505 }
2506 #endif
2507 
2508 /* prtyw: PowerPC 2.05 specification */
2509 static void gen_prtyw(DisasContext *ctx)
2510 {
2511     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2512     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2513     TCGv t0 = tcg_temp_new();
2514     tcg_gen_shri_tl(t0, rs, 16);
2515     tcg_gen_xor_tl(ra, rs, t0);
2516     tcg_gen_shri_tl(t0, ra, 8);
2517     tcg_gen_xor_tl(ra, ra, t0);
2518     tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2519     tcg_temp_free(t0);
2520 }
2521 
2522 #if defined(TARGET_PPC64)
2523 /* prtyd: PowerPC 2.05 specification */
2524 static void gen_prtyd(DisasContext *ctx)
2525 {
2526     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2527     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2528     TCGv t0 = tcg_temp_new();
2529     tcg_gen_shri_tl(t0, rs, 32);
2530     tcg_gen_xor_tl(ra, rs, t0);
2531     tcg_gen_shri_tl(t0, ra, 16);
2532     tcg_gen_xor_tl(ra, ra, t0);
2533     tcg_gen_shri_tl(t0, ra, 8);
2534     tcg_gen_xor_tl(ra, ra, t0);
2535     tcg_gen_andi_tl(ra, ra, 1);
2536     tcg_temp_free(t0);
2537 }
2538 #endif
2539 
2540 #if defined(TARGET_PPC64)
2541 /* bpermd */
2542 static void gen_bpermd(DisasContext *ctx)
2543 {
2544     gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2545                       cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2546 }
2547 #endif
2548 
2549 #if defined(TARGET_PPC64)
2550 /* extsw & extsw. */
2551 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2552 
2553 /* cntlzd */
2554 static void gen_cntlzd(DisasContext *ctx)
2555 {
2556     tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2557     if (unlikely(Rc(ctx->opcode) != 0)) {
2558         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2559     }
2560 }
2561 
2562 /* cnttzd */
2563 static void gen_cnttzd(DisasContext *ctx)
2564 {
2565     tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2566     if (unlikely(Rc(ctx->opcode) != 0)) {
2567         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2568     }
2569 }
2570 
2571 /* darn */
2572 static void gen_darn(DisasContext *ctx)
2573 {
2574     int l = L(ctx->opcode);
2575 
2576     if (l > 2) {
2577         tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2578     } else {
2579         gen_icount_io_start(ctx);
2580         if (l == 0) {
2581             gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2582         } else {
2583             /* Return 64-bit random for both CRN and RRN */
2584             gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2585         }
2586     }
2587 }
2588 #endif
2589 
2590 /***                             Integer rotate                            ***/
2591 
2592 /* rlwimi & rlwimi. */
2593 static void gen_rlwimi(DisasContext *ctx)
2594 {
2595     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2596     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2597     uint32_t sh = SH(ctx->opcode);
2598     uint32_t mb = MB(ctx->opcode);
2599     uint32_t me = ME(ctx->opcode);
2600 
2601     if (sh == (31 - me) && mb <= me) {
2602         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2603     } else {
2604         target_ulong mask;
2605         bool mask_in_32b = true;
2606         TCGv t1;
2607 
2608 #if defined(TARGET_PPC64)
2609         mb += 32;
2610         me += 32;
2611 #endif
2612         mask = MASK(mb, me);
2613 
2614 #if defined(TARGET_PPC64)
2615         if (mask > 0xffffffffu) {
2616             mask_in_32b = false;
2617         }
2618 #endif
2619         t1 = tcg_temp_new();
2620         if (mask_in_32b) {
2621             TCGv_i32 t0 = tcg_temp_new_i32();
2622             tcg_gen_trunc_tl_i32(t0, t_rs);
2623             tcg_gen_rotli_i32(t0, t0, sh);
2624             tcg_gen_extu_i32_tl(t1, t0);
2625             tcg_temp_free_i32(t0);
2626         } else {
2627 #if defined(TARGET_PPC64)
2628             tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2629             tcg_gen_rotli_i64(t1, t1, sh);
2630 #else
2631             g_assert_not_reached();
2632 #endif
2633         }
2634 
2635         tcg_gen_andi_tl(t1, t1, mask);
2636         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2637         tcg_gen_or_tl(t_ra, t_ra, t1);
2638         tcg_temp_free(t1);
2639     }
2640     if (unlikely(Rc(ctx->opcode) != 0)) {
2641         gen_set_Rc0(ctx, t_ra);
2642     }
2643 }
2644 
2645 /* rlwinm & rlwinm. */
2646 static void gen_rlwinm(DisasContext *ctx)
2647 {
2648     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2649     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2650     int sh = SH(ctx->opcode);
2651     int mb = MB(ctx->opcode);
2652     int me = ME(ctx->opcode);
2653     int len = me - mb + 1;
2654     int rsh = (32 - sh) & 31;
2655 
2656     if (sh != 0 && len > 0 && me == (31 - sh)) {
2657         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2658     } else if (me == 31 && rsh + len <= 32) {
2659         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2660     } else {
2661         target_ulong mask;
2662         bool mask_in_32b = true;
2663 #if defined(TARGET_PPC64)
2664         mb += 32;
2665         me += 32;
2666 #endif
2667         mask = MASK(mb, me);
2668 #if defined(TARGET_PPC64)
2669         if (mask > 0xffffffffu) {
2670             mask_in_32b = false;
2671         }
2672 #endif
2673         if (mask_in_32b) {
2674             if (sh == 0) {
2675                 tcg_gen_andi_tl(t_ra, t_rs, mask);
2676             } else {
2677                 TCGv_i32 t0 = tcg_temp_new_i32();
2678                 tcg_gen_trunc_tl_i32(t0, t_rs);
2679                 tcg_gen_rotli_i32(t0, t0, sh);
2680                 tcg_gen_andi_i32(t0, t0, mask);
2681                 tcg_gen_extu_i32_tl(t_ra, t0);
2682                 tcg_temp_free_i32(t0);
2683             }
2684         } else {
2685 #if defined(TARGET_PPC64)
2686             tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2687             tcg_gen_rotli_i64(t_ra, t_ra, sh);
2688             tcg_gen_andi_i64(t_ra, t_ra, mask);
2689 #else
2690             g_assert_not_reached();
2691 #endif
2692         }
2693     }
2694     if (unlikely(Rc(ctx->opcode) != 0)) {
2695         gen_set_Rc0(ctx, t_ra);
2696     }
2697 }
2698 
2699 /* rlwnm & rlwnm. */
2700 static void gen_rlwnm(DisasContext *ctx)
2701 {
2702     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2703     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2704     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2705     uint32_t mb = MB(ctx->opcode);
2706     uint32_t me = ME(ctx->opcode);
2707     target_ulong mask;
2708     bool mask_in_32b = true;
2709 
2710 #if defined(TARGET_PPC64)
2711     mb += 32;
2712     me += 32;
2713 #endif
2714     mask = MASK(mb, me);
2715 
2716 #if defined(TARGET_PPC64)
2717     if (mask > 0xffffffffu) {
2718         mask_in_32b = false;
2719     }
2720 #endif
2721     if (mask_in_32b) {
2722         TCGv_i32 t0 = tcg_temp_new_i32();
2723         TCGv_i32 t1 = tcg_temp_new_i32();
2724         tcg_gen_trunc_tl_i32(t0, t_rb);
2725         tcg_gen_trunc_tl_i32(t1, t_rs);
2726         tcg_gen_andi_i32(t0, t0, 0x1f);
2727         tcg_gen_rotl_i32(t1, t1, t0);
2728         tcg_gen_extu_i32_tl(t_ra, t1);
2729         tcg_temp_free_i32(t0);
2730         tcg_temp_free_i32(t1);
2731     } else {
2732 #if defined(TARGET_PPC64)
2733         TCGv_i64 t0 = tcg_temp_new_i64();
2734         tcg_gen_andi_i64(t0, t_rb, 0x1f);
2735         tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2736         tcg_gen_rotl_i64(t_ra, t_ra, t0);
2737         tcg_temp_free_i64(t0);
2738 #else
2739         g_assert_not_reached();
2740 #endif
2741     }
2742 
2743     tcg_gen_andi_tl(t_ra, t_ra, mask);
2744 
2745     if (unlikely(Rc(ctx->opcode) != 0)) {
2746         gen_set_Rc0(ctx, t_ra);
2747     }
2748 }
2749 
2750 #if defined(TARGET_PPC64)
2751 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
2752 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2753 {                                                                             \
2754     gen_##name(ctx, 0);                                                       \
2755 }                                                                             \
2756                                                                               \
2757 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2758 {                                                                             \
2759     gen_##name(ctx, 1);                                                       \
2760 }
2761 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
2762 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2763 {                                                                             \
2764     gen_##name(ctx, 0, 0);                                                    \
2765 }                                                                             \
2766                                                                               \
2767 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2768 {                                                                             \
2769     gen_##name(ctx, 0, 1);                                                    \
2770 }                                                                             \
2771                                                                               \
2772 static void glue(gen_, name##2)(DisasContext *ctx)                            \
2773 {                                                                             \
2774     gen_##name(ctx, 1, 0);                                                    \
2775 }                                                                             \
2776                                                                               \
2777 static void glue(gen_, name##3)(DisasContext *ctx)                            \
2778 {                                                                             \
2779     gen_##name(ctx, 1, 1);                                                    \
2780 }
2781 
2782 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2783 {
2784     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2785     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2786     int len = me - mb + 1;
2787     int rsh = (64 - sh) & 63;
2788 
2789     if (sh != 0 && len > 0 && me == (63 - sh)) {
2790         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2791     } else if (me == 63 && rsh + len <= 64) {
2792         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2793     } else {
2794         tcg_gen_rotli_tl(t_ra, t_rs, sh);
2795         tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2796     }
2797     if (unlikely(Rc(ctx->opcode) != 0)) {
2798         gen_set_Rc0(ctx, t_ra);
2799     }
2800 }
2801 
2802 /* rldicl - rldicl. */
2803 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2804 {
2805     uint32_t sh, mb;
2806 
2807     sh = SH(ctx->opcode) | (shn << 5);
2808     mb = MB(ctx->opcode) | (mbn << 5);
2809     gen_rldinm(ctx, mb, 63, sh);
2810 }
2811 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2812 
2813 /* rldicr - rldicr. */
2814 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2815 {
2816     uint32_t sh, me;
2817 
2818     sh = SH(ctx->opcode) | (shn << 5);
2819     me = MB(ctx->opcode) | (men << 5);
2820     gen_rldinm(ctx, 0, me, sh);
2821 }
2822 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2823 
2824 /* rldic - rldic. */
2825 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2826 {
2827     uint32_t sh, mb;
2828 
2829     sh = SH(ctx->opcode) | (shn << 5);
2830     mb = MB(ctx->opcode) | (mbn << 5);
2831     gen_rldinm(ctx, mb, 63 - sh, sh);
2832 }
2833 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2834 
2835 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2836 {
2837     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2838     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2839     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2840     TCGv t0;
2841 
2842     t0 = tcg_temp_new();
2843     tcg_gen_andi_tl(t0, t_rb, 0x3f);
2844     tcg_gen_rotl_tl(t_ra, t_rs, t0);
2845     tcg_temp_free(t0);
2846 
2847     tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2848     if (unlikely(Rc(ctx->opcode) != 0)) {
2849         gen_set_Rc0(ctx, t_ra);
2850     }
2851 }
2852 
2853 /* rldcl - rldcl. */
2854 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2855 {
2856     uint32_t mb;
2857 
2858     mb = MB(ctx->opcode) | (mbn << 5);
2859     gen_rldnm(ctx, mb, 63);
2860 }
2861 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2862 
2863 /* rldcr - rldcr. */
2864 static inline void gen_rldcr(DisasContext *ctx, int men)
2865 {
2866     uint32_t me;
2867 
2868     me = MB(ctx->opcode) | (men << 5);
2869     gen_rldnm(ctx, 0, me);
2870 }
2871 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2872 
2873 /* rldimi - rldimi. */
2874 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2875 {
2876     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2877     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2878     uint32_t sh = SH(ctx->opcode) | (shn << 5);
2879     uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2880     uint32_t me = 63 - sh;
2881 
2882     if (mb <= me) {
2883         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2884     } else {
2885         target_ulong mask = MASK(mb, me);
2886         TCGv t1 = tcg_temp_new();
2887 
2888         tcg_gen_rotli_tl(t1, t_rs, sh);
2889         tcg_gen_andi_tl(t1, t1, mask);
2890         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2891         tcg_gen_or_tl(t_ra, t_ra, t1);
2892         tcg_temp_free(t1);
2893     }
2894     if (unlikely(Rc(ctx->opcode) != 0)) {
2895         gen_set_Rc0(ctx, t_ra);
2896     }
2897 }
2898 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2899 #endif
2900 
2901 /***                             Integer shift                             ***/
2902 
2903 /* slw & slw. */
2904 static void gen_slw(DisasContext *ctx)
2905 {
2906     TCGv t0, t1;
2907 
2908     t0 = tcg_temp_new();
2909     /* AND rS with a mask that is 0 when rB >= 0x20 */
2910 #if defined(TARGET_PPC64)
2911     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2912     tcg_gen_sari_tl(t0, t0, 0x3f);
2913 #else
2914     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2915     tcg_gen_sari_tl(t0, t0, 0x1f);
2916 #endif
2917     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2918     t1 = tcg_temp_new();
2919     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2920     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2921     tcg_temp_free(t1);
2922     tcg_temp_free(t0);
2923     tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2924     if (unlikely(Rc(ctx->opcode) != 0)) {
2925         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2926     }
2927 }
2928 
2929 /* sraw & sraw. */
2930 static void gen_sraw(DisasContext *ctx)
2931 {
2932     gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
2933                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2934     if (unlikely(Rc(ctx->opcode) != 0)) {
2935         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2936     }
2937 }
2938 
2939 /* srawi & srawi. */
2940 static void gen_srawi(DisasContext *ctx)
2941 {
2942     int sh = SH(ctx->opcode);
2943     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2944     TCGv src = cpu_gpr[rS(ctx->opcode)];
2945     if (sh == 0) {
2946         tcg_gen_ext32s_tl(dst, src);
2947         tcg_gen_movi_tl(cpu_ca, 0);
2948         if (is_isa300(ctx)) {
2949             tcg_gen_movi_tl(cpu_ca32, 0);
2950         }
2951     } else {
2952         TCGv t0;
2953         tcg_gen_ext32s_tl(dst, src);
2954         tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2955         t0 = tcg_temp_new();
2956         tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2957         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2958         tcg_temp_free(t0);
2959         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2960         if (is_isa300(ctx)) {
2961             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2962         }
2963         tcg_gen_sari_tl(dst, dst, sh);
2964     }
2965     if (unlikely(Rc(ctx->opcode) != 0)) {
2966         gen_set_Rc0(ctx, dst);
2967     }
2968 }
2969 
2970 /* srw & srw. */
2971 static void gen_srw(DisasContext *ctx)
2972 {
2973     TCGv t0, t1;
2974 
2975     t0 = tcg_temp_new();
2976     /* AND rS with a mask that is 0 when rB >= 0x20 */
2977 #if defined(TARGET_PPC64)
2978     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2979     tcg_gen_sari_tl(t0, t0, 0x3f);
2980 #else
2981     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2982     tcg_gen_sari_tl(t0, t0, 0x1f);
2983 #endif
2984     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2985     tcg_gen_ext32u_tl(t0, t0);
2986     t1 = tcg_temp_new();
2987     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2988     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2989     tcg_temp_free(t1);
2990     tcg_temp_free(t0);
2991     if (unlikely(Rc(ctx->opcode) != 0)) {
2992         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2993     }
2994 }
2995 
2996 #if defined(TARGET_PPC64)
2997 /* sld & sld. */
2998 static void gen_sld(DisasContext *ctx)
2999 {
3000     TCGv t0, t1;
3001 
3002     t0 = tcg_temp_new();
3003     /* AND rS with a mask that is 0 when rB >= 0x40 */
3004     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3005     tcg_gen_sari_tl(t0, t0, 0x3f);
3006     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3007     t1 = tcg_temp_new();
3008     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3009     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3010     tcg_temp_free(t1);
3011     tcg_temp_free(t0);
3012     if (unlikely(Rc(ctx->opcode) != 0)) {
3013         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3014     }
3015 }
3016 
3017 /* srad & srad. */
3018 static void gen_srad(DisasContext *ctx)
3019 {
3020     gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
3021                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3022     if (unlikely(Rc(ctx->opcode) != 0)) {
3023         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3024     }
3025 }
3026 /* sradi & sradi. */
3027 static inline void gen_sradi(DisasContext *ctx, int n)
3028 {
3029     int sh = SH(ctx->opcode) + (n << 5);
3030     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3031     TCGv src = cpu_gpr[rS(ctx->opcode)];
3032     if (sh == 0) {
3033         tcg_gen_mov_tl(dst, src);
3034         tcg_gen_movi_tl(cpu_ca, 0);
3035         if (is_isa300(ctx)) {
3036             tcg_gen_movi_tl(cpu_ca32, 0);
3037         }
3038     } else {
3039         TCGv t0;
3040         tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
3041         t0 = tcg_temp_new();
3042         tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
3043         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3044         tcg_temp_free(t0);
3045         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3046         if (is_isa300(ctx)) {
3047             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3048         }
3049         tcg_gen_sari_tl(dst, src, sh);
3050     }
3051     if (unlikely(Rc(ctx->opcode) != 0)) {
3052         gen_set_Rc0(ctx, dst);
3053     }
3054 }
3055 
3056 static void gen_sradi0(DisasContext *ctx)
3057 {
3058     gen_sradi(ctx, 0);
3059 }
3060 
3061 static void gen_sradi1(DisasContext *ctx)
3062 {
3063     gen_sradi(ctx, 1);
3064 }
3065 
3066 /* extswsli & extswsli. */
3067 static inline void gen_extswsli(DisasContext *ctx, int n)
3068 {
3069     int sh = SH(ctx->opcode) + (n << 5);
3070     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3071     TCGv src = cpu_gpr[rS(ctx->opcode)];
3072 
3073     tcg_gen_ext32s_tl(dst, src);
3074     tcg_gen_shli_tl(dst, dst, sh);
3075     if (unlikely(Rc(ctx->opcode) != 0)) {
3076         gen_set_Rc0(ctx, dst);
3077     }
3078 }
3079 
3080 static void gen_extswsli0(DisasContext *ctx)
3081 {
3082     gen_extswsli(ctx, 0);
3083 }
3084 
3085 static void gen_extswsli1(DisasContext *ctx)
3086 {
3087     gen_extswsli(ctx, 1);
3088 }
3089 
3090 /* srd & srd. */
3091 static void gen_srd(DisasContext *ctx)
3092 {
3093     TCGv t0, t1;
3094 
3095     t0 = tcg_temp_new();
3096     /* AND rS with a mask that is 0 when rB >= 0x40 */
3097     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3098     tcg_gen_sari_tl(t0, t0, 0x3f);
3099     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3100     t1 = tcg_temp_new();
3101     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3102     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3103     tcg_temp_free(t1);
3104     tcg_temp_free(t0);
3105     if (unlikely(Rc(ctx->opcode) != 0)) {
3106         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3107     }
3108 }
3109 #endif
3110 
3111 /***                           Addressing modes                            ***/
3112 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
3113 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
3114                                       target_long maskl)
3115 {
3116     target_long simm = SIMM(ctx->opcode);
3117 
3118     simm &= ~maskl;
3119     if (rA(ctx->opcode) == 0) {
3120         if (NARROW_MODE(ctx)) {
3121             simm = (uint32_t)simm;
3122         }
3123         tcg_gen_movi_tl(EA, simm);
3124     } else if (likely(simm != 0)) {
3125         tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
3126         if (NARROW_MODE(ctx)) {
3127             tcg_gen_ext32u_tl(EA, EA);
3128         }
3129     } else {
3130         if (NARROW_MODE(ctx)) {
3131             tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3132         } else {
3133             tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3134         }
3135     }
3136 }
3137 
3138 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3139 {
3140     if (rA(ctx->opcode) == 0) {
3141         if (NARROW_MODE(ctx)) {
3142             tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3143         } else {
3144             tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3145         }
3146     } else {
3147         tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3148         if (NARROW_MODE(ctx)) {
3149             tcg_gen_ext32u_tl(EA, EA);
3150         }
3151     }
3152 }
3153 
3154 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3155 {
3156     if (rA(ctx->opcode) == 0) {
3157         tcg_gen_movi_tl(EA, 0);
3158     } else if (NARROW_MODE(ctx)) {
3159         tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3160     } else {
3161         tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3162     }
3163 }
3164 
3165 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3166                                 target_long val)
3167 {
3168     tcg_gen_addi_tl(ret, arg1, val);
3169     if (NARROW_MODE(ctx)) {
3170         tcg_gen_ext32u_tl(ret, ret);
3171     }
3172 }
3173 
3174 static inline void gen_align_no_le(DisasContext *ctx)
3175 {
3176     gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3177                       (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3178 }
3179 
3180 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
3181 {
3182     TCGv ea = tcg_temp_new();
3183     if (ra) {
3184         tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
3185     } else {
3186         tcg_gen_mov_tl(ea, displ);
3187     }
3188     if (NARROW_MODE(ctx)) {
3189         tcg_gen_ext32u_tl(ea, ea);
3190     }
3191     return ea;
3192 }
3193 
3194 /***                             Integer load                              ***/
3195 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3196 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3197 
3198 #define GEN_QEMU_LOAD_TL(ldop, op)                                      \
3199 static void glue(gen_qemu_, ldop)(DisasContext *ctx,                    \
3200                                   TCGv val,                             \
3201                                   TCGv addr)                            \
3202 {                                                                       \
3203     tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op);                    \
3204 }
3205 
3206 GEN_QEMU_LOAD_TL(ld8u,  DEF_MEMOP(MO_UB))
3207 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3208 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3209 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3210 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3211 
3212 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3213 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3214 
3215 #define GEN_QEMU_LOAD_64(ldop, op)                                  \
3216 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx,    \
3217                                              TCGv_i64 val,          \
3218                                              TCGv addr)             \
3219 {                                                                   \
3220     tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op);               \
3221 }
3222 
3223 GEN_QEMU_LOAD_64(ld8u,  DEF_MEMOP(MO_UB))
3224 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3225 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3226 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3227 GEN_QEMU_LOAD_64(ld64,  DEF_MEMOP(MO_UQ))
3228 
3229 #if defined(TARGET_PPC64)
3230 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ))
3231 #endif
3232 
3233 #define GEN_QEMU_STORE_TL(stop, op)                                     \
3234 static void glue(gen_qemu_, stop)(DisasContext *ctx,                    \
3235                                   TCGv val,                             \
3236                                   TCGv addr)                            \
3237 {                                                                       \
3238     tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op);                    \
3239 }
3240 
3241 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
3242 GEN_QEMU_STORE_TL(st8,  DEF_MEMOP(MO_UB))
3243 #endif
3244 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3245 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3246 
3247 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3248 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3249 
3250 #define GEN_QEMU_STORE_64(stop, op)                               \
3251 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx,  \
3252                                               TCGv_i64 val,       \
3253                                               TCGv addr)          \
3254 {                                                                 \
3255     tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op);             \
3256 }
3257 
3258 GEN_QEMU_STORE_64(st8,  DEF_MEMOP(MO_UB))
3259 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3260 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3261 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ))
3262 
3263 #if defined(TARGET_PPC64)
3264 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ))
3265 #endif
3266 
3267 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
3268 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3269 {                                                                             \
3270     TCGv EA;                                                                  \
3271     chk(ctx);                                                                 \
3272     gen_set_access_type(ctx, ACCESS_INT);                                     \
3273     EA = tcg_temp_new();                                                      \
3274     gen_addr_reg_index(ctx, EA);                                              \
3275     gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA);                       \
3276     tcg_temp_free(EA);                                                        \
3277 }
3278 
3279 #define GEN_LDX(name, ldop, opc2, opc3, type)                                 \
3280     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3281 
3282 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type)                            \
3283     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3284 
3285 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
3286 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3287 {                                                                             \
3288     TCGv EA;                                                                  \
3289     CHK_SV(ctx);                                                              \
3290     gen_set_access_type(ctx, ACCESS_INT);                                     \
3291     EA = tcg_temp_new();                                                      \
3292     gen_addr_reg_index(ctx, EA);                                              \
3293     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3294     tcg_temp_free(EA);                                                        \
3295 }
3296 
3297 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3298 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3299 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3300 #if defined(TARGET_PPC64)
3301 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
3302 #endif
3303 
3304 #if defined(TARGET_PPC64)
3305 /* CI load/store variants */
3306 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3307 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3308 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3309 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3310 #endif
3311 
3312 /***                              Integer store                            ***/
3313 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
3314 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3315 {                                                                             \
3316     TCGv EA;                                                                  \
3317     chk(ctx);                                                                 \
3318     gen_set_access_type(ctx, ACCESS_INT);                                     \
3319     EA = tcg_temp_new();                                                      \
3320     gen_addr_reg_index(ctx, EA);                                              \
3321     gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA);                       \
3322     tcg_temp_free(EA);                                                        \
3323 }
3324 #define GEN_STX(name, stop, opc2, opc3, type)                                 \
3325     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3326 
3327 #define GEN_STX_HVRM(name, stop, opc2, opc3, type)                            \
3328     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3329 
3330 #define GEN_STEPX(name, stop, opc2, opc3)                                     \
3331 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3332 {                                                                             \
3333     TCGv EA;                                                                  \
3334     CHK_SV(ctx);                                                              \
3335     gen_set_access_type(ctx, ACCESS_INT);                                     \
3336     EA = tcg_temp_new();                                                      \
3337     gen_addr_reg_index(ctx, EA);                                              \
3338     tcg_gen_qemu_st_tl(                                                       \
3339         cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop);              \
3340     tcg_temp_free(EA);                                                        \
3341 }
3342 
3343 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3344 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3345 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3346 #if defined(TARGET_PPC64)
3347 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04)
3348 #endif
3349 
3350 #if defined(TARGET_PPC64)
3351 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3352 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3353 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3354 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3355 #endif
3356 /***                Integer load and store with byte reverse               ***/
3357 
3358 /* lhbrx */
3359 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3360 
3361 /* lwbrx */
3362 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3363 
3364 #if defined(TARGET_PPC64)
3365 /* ldbrx */
3366 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3367 /* stdbrx */
3368 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3369 #endif  /* TARGET_PPC64 */
3370 
3371 /* sthbrx */
3372 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3373 /* stwbrx */
3374 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3375 
3376 /***                    Integer load and store multiple                    ***/
3377 
3378 /* lmw */
3379 static void gen_lmw(DisasContext *ctx)
3380 {
3381     TCGv t0;
3382     TCGv_i32 t1;
3383 
3384     if (ctx->le_mode) {
3385         gen_align_no_le(ctx);
3386         return;
3387     }
3388     gen_set_access_type(ctx, ACCESS_INT);
3389     t0 = tcg_temp_new();
3390     t1 = tcg_const_i32(rD(ctx->opcode));
3391     gen_addr_imm_index(ctx, t0, 0);
3392     gen_helper_lmw(cpu_env, t0, t1);
3393     tcg_temp_free(t0);
3394     tcg_temp_free_i32(t1);
3395 }
3396 
3397 /* stmw */
3398 static void gen_stmw(DisasContext *ctx)
3399 {
3400     TCGv t0;
3401     TCGv_i32 t1;
3402 
3403     if (ctx->le_mode) {
3404         gen_align_no_le(ctx);
3405         return;
3406     }
3407     gen_set_access_type(ctx, ACCESS_INT);
3408     t0 = tcg_temp_new();
3409     t1 = tcg_const_i32(rS(ctx->opcode));
3410     gen_addr_imm_index(ctx, t0, 0);
3411     gen_helper_stmw(cpu_env, t0, t1);
3412     tcg_temp_free(t0);
3413     tcg_temp_free_i32(t1);
3414 }
3415 
3416 /***                    Integer load and store strings                     ***/
3417 
3418 /* lswi */
3419 /*
3420  * PowerPC32 specification says we must generate an exception if rA is
3421  * in the range of registers to be loaded.  In an other hand, IBM says
3422  * this is valid, but rA won't be loaded.  For now, I'll follow the
3423  * spec...
3424  */
3425 static void gen_lswi(DisasContext *ctx)
3426 {
3427     TCGv t0;
3428     TCGv_i32 t1, t2;
3429     int nb = NB(ctx->opcode);
3430     int start = rD(ctx->opcode);
3431     int ra = rA(ctx->opcode);
3432     int nr;
3433 
3434     if (ctx->le_mode) {
3435         gen_align_no_le(ctx);
3436         return;
3437     }
3438     if (nb == 0) {
3439         nb = 32;
3440     }
3441     nr = DIV_ROUND_UP(nb, 4);
3442     if (unlikely(lsw_reg_in_range(start, nr, ra))) {
3443         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3444         return;
3445     }
3446     gen_set_access_type(ctx, ACCESS_INT);
3447     t0 = tcg_temp_new();
3448     gen_addr_register(ctx, t0);
3449     t1 = tcg_const_i32(nb);
3450     t2 = tcg_const_i32(start);
3451     gen_helper_lsw(cpu_env, t0, t1, t2);
3452     tcg_temp_free(t0);
3453     tcg_temp_free_i32(t1);
3454     tcg_temp_free_i32(t2);
3455 }
3456 
3457 /* lswx */
3458 static void gen_lswx(DisasContext *ctx)
3459 {
3460     TCGv t0;
3461     TCGv_i32 t1, t2, t3;
3462 
3463     if (ctx->le_mode) {
3464         gen_align_no_le(ctx);
3465         return;
3466     }
3467     gen_set_access_type(ctx, ACCESS_INT);
3468     t0 = tcg_temp_new();
3469     gen_addr_reg_index(ctx, t0);
3470     t1 = tcg_const_i32(rD(ctx->opcode));
3471     t2 = tcg_const_i32(rA(ctx->opcode));
3472     t3 = tcg_const_i32(rB(ctx->opcode));
3473     gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3474     tcg_temp_free(t0);
3475     tcg_temp_free_i32(t1);
3476     tcg_temp_free_i32(t2);
3477     tcg_temp_free_i32(t3);
3478 }
3479 
3480 /* stswi */
3481 static void gen_stswi(DisasContext *ctx)
3482 {
3483     TCGv t0;
3484     TCGv_i32 t1, t2;
3485     int nb = NB(ctx->opcode);
3486 
3487     if (ctx->le_mode) {
3488         gen_align_no_le(ctx);
3489         return;
3490     }
3491     gen_set_access_type(ctx, ACCESS_INT);
3492     t0 = tcg_temp_new();
3493     gen_addr_register(ctx, t0);
3494     if (nb == 0) {
3495         nb = 32;
3496     }
3497     t1 = tcg_const_i32(nb);
3498     t2 = tcg_const_i32(rS(ctx->opcode));
3499     gen_helper_stsw(cpu_env, t0, t1, t2);
3500     tcg_temp_free(t0);
3501     tcg_temp_free_i32(t1);
3502     tcg_temp_free_i32(t2);
3503 }
3504 
3505 /* stswx */
3506 static void gen_stswx(DisasContext *ctx)
3507 {
3508     TCGv t0;
3509     TCGv_i32 t1, t2;
3510 
3511     if (ctx->le_mode) {
3512         gen_align_no_le(ctx);
3513         return;
3514     }
3515     gen_set_access_type(ctx, ACCESS_INT);
3516     t0 = tcg_temp_new();
3517     gen_addr_reg_index(ctx, t0);
3518     t1 = tcg_temp_new_i32();
3519     tcg_gen_trunc_tl_i32(t1, cpu_xer);
3520     tcg_gen_andi_i32(t1, t1, 0x7F);
3521     t2 = tcg_const_i32(rS(ctx->opcode));
3522     gen_helper_stsw(cpu_env, t0, t1, t2);
3523     tcg_temp_free(t0);
3524     tcg_temp_free_i32(t1);
3525     tcg_temp_free_i32(t2);
3526 }
3527 
3528 /***                        Memory synchronisation                         ***/
3529 /* eieio */
3530 static void gen_eieio(DisasContext *ctx)
3531 {
3532     TCGBar bar = TCG_MO_ALL;
3533 
3534     /*
3535      * eieio has complex semanitcs. It provides memory ordering between
3536      * operations in the set:
3537      * - loads from CI memory.
3538      * - stores to CI memory.
3539      * - stores to WT memory.
3540      *
3541      * It separately also orders memory for operations in the set:
3542      * - stores to cacheble memory.
3543      *
3544      * It also serializes instructions:
3545      * - dcbt and dcbst.
3546      *
3547      * It separately serializes:
3548      * - tlbie and tlbsync.
3549      *
3550      * And separately serializes:
3551      * - slbieg, slbiag, and slbsync.
3552      *
3553      * The end result is that CI memory ordering requires TCG_MO_ALL
3554      * and it is not possible to special-case more relaxed ordering for
3555      * cacheable accesses. TCG_BAR_SC is required to provide this
3556      * serialization.
3557      */
3558 
3559     /*
3560      * POWER9 has a eieio instruction variant using bit 6 as a hint to
3561      * tell the CPU it is a store-forwarding barrier.
3562      */
3563     if (ctx->opcode & 0x2000000) {
3564         /*
3565          * ISA says that "Reserved fields in instructions are ignored
3566          * by the processor". So ignore the bit 6 on non-POWER9 CPU but
3567          * as this is not an instruction software should be using,
3568          * complain to the user.
3569          */
3570         if (!(ctx->insns_flags2 & PPC2_ISA300)) {
3571             qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
3572                           TARGET_FMT_lx "\n", ctx->cia);
3573         } else {
3574             bar = TCG_MO_ST_LD;
3575         }
3576     }
3577 
3578     tcg_gen_mb(bar | TCG_BAR_SC);
3579 }
3580 
3581 #if !defined(CONFIG_USER_ONLY)
3582 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
3583 {
3584     TCGv_i32 t;
3585     TCGLabel *l;
3586 
3587     if (!ctx->lazy_tlb_flush) {
3588         return;
3589     }
3590     l = gen_new_label();
3591     t = tcg_temp_new_i32();
3592     tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
3593     tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
3594     if (global) {
3595         gen_helper_check_tlb_flush_global(cpu_env);
3596     } else {
3597         gen_helper_check_tlb_flush_local(cpu_env);
3598     }
3599     gen_set_label(l);
3600     tcg_temp_free_i32(t);
3601 }
3602 #else
3603 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
3604 #endif
3605 
3606 /* isync */
3607 static void gen_isync(DisasContext *ctx)
3608 {
3609     /*
3610      * We need to check for a pending TLB flush. This can only happen in
3611      * kernel mode however so check MSR_PR
3612      */
3613     if (!ctx->pr) {
3614         gen_check_tlb_flush(ctx, false);
3615     }
3616     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
3617     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
3618 }
3619 
3620 #define MEMOP_GET_SIZE(x)  (1 << ((x) & MO_SIZE))
3621 
3622 static void gen_load_locked(DisasContext *ctx, MemOp memop)
3623 {
3624     TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3625     TCGv t0 = tcg_temp_new();
3626 
3627     gen_set_access_type(ctx, ACCESS_RES);
3628     gen_addr_reg_index(ctx, t0);
3629     tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
3630     tcg_gen_mov_tl(cpu_reserve, t0);
3631     tcg_gen_mov_tl(cpu_reserve_val, gpr);
3632     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
3633     tcg_temp_free(t0);
3634 }
3635 
3636 #define LARX(name, memop)                  \
3637 static void gen_##name(DisasContext *ctx)  \
3638 {                                          \
3639     gen_load_locked(ctx, memop);           \
3640 }
3641 
3642 /* lwarx */
3643 LARX(lbarx, DEF_MEMOP(MO_UB))
3644 LARX(lharx, DEF_MEMOP(MO_UW))
3645 LARX(lwarx, DEF_MEMOP(MO_UL))
3646 
3647 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
3648                                       TCGv EA, TCGCond cond, int addend)
3649 {
3650     TCGv t = tcg_temp_new();
3651     TCGv t2 = tcg_temp_new();
3652     TCGv u = tcg_temp_new();
3653 
3654     tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3655     tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
3656     tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
3657     tcg_gen_addi_tl(u, t, addend);
3658 
3659     /* E.g. for fetch and increment bounded... */
3660     /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
3661     tcg_gen_movcond_tl(cond, u, t, t2, u, t);
3662     tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
3663 
3664     /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
3665     tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
3666     tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
3667 
3668     tcg_temp_free(t);
3669     tcg_temp_free(t2);
3670     tcg_temp_free(u);
3671 }
3672 
3673 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
3674 {
3675     uint32_t gpr_FC = FC(ctx->opcode);
3676     TCGv EA = tcg_temp_new();
3677     int rt = rD(ctx->opcode);
3678     bool need_serial;
3679     TCGv src, dst;
3680 
3681     gen_addr_register(ctx, EA);
3682     dst = cpu_gpr[rt];
3683     src = cpu_gpr[(rt + 1) & 31];
3684 
3685     need_serial = false;
3686     memop |= MO_ALIGN;
3687     switch (gpr_FC) {
3688     case 0: /* Fetch and add */
3689         tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
3690         break;
3691     case 1: /* Fetch and xor */
3692         tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
3693         break;
3694     case 2: /* Fetch and or */
3695         tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3696         break;
3697     case 3: /* Fetch and 'and' */
3698         tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3699         break;
3700     case 4:  /* Fetch and max unsigned */
3701         tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3702         break;
3703     case 5:  /* Fetch and max signed */
3704         tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3705         break;
3706     case 6:  /* Fetch and min unsigned */
3707         tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3708         break;
3709     case 7:  /* Fetch and min signed */
3710         tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3711         break;
3712     case 8: /* Swap */
3713         tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3714         break;
3715 
3716     case 16: /* Compare and swap not equal */
3717         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3718             need_serial = true;
3719         } else {
3720             TCGv t0 = tcg_temp_new();
3721             TCGv t1 = tcg_temp_new();
3722 
3723             tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3724             if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3725                 tcg_gen_mov_tl(t1, src);
3726             } else {
3727                 tcg_gen_ext32u_tl(t1, src);
3728             }
3729             tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3730                                cpu_gpr[(rt + 2) & 31], t0);
3731             tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3732             tcg_gen_mov_tl(dst, t0);
3733 
3734             tcg_temp_free(t0);
3735             tcg_temp_free(t1);
3736         }
3737         break;
3738 
3739     case 24: /* Fetch and increment bounded */
3740         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3741             need_serial = true;
3742         } else {
3743             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3744         }
3745         break;
3746     case 25: /* Fetch and increment equal */
3747         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3748             need_serial = true;
3749         } else {
3750             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3751         }
3752         break;
3753     case 28: /* Fetch and decrement bounded */
3754         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3755             need_serial = true;
3756         } else {
3757             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3758         }
3759         break;
3760 
3761     default:
3762         /* invoke data storage error handler */
3763         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3764     }
3765     tcg_temp_free(EA);
3766 
3767     if (need_serial) {
3768         /* Restart with exclusive lock.  */
3769         gen_helper_exit_atomic(cpu_env);
3770         ctx->base.is_jmp = DISAS_NORETURN;
3771     }
3772 }
3773 
3774 static void gen_lwat(DisasContext *ctx)
3775 {
3776     gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3777 }
3778 
3779 #ifdef TARGET_PPC64
3780 static void gen_ldat(DisasContext *ctx)
3781 {
3782     gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ));
3783 }
3784 #endif
3785 
3786 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3787 {
3788     uint32_t gpr_FC = FC(ctx->opcode);
3789     TCGv EA = tcg_temp_new();
3790     TCGv src, discard;
3791 
3792     gen_addr_register(ctx, EA);
3793     src = cpu_gpr[rD(ctx->opcode)];
3794     discard = tcg_temp_new();
3795 
3796     memop |= MO_ALIGN;
3797     switch (gpr_FC) {
3798     case 0: /* add and Store */
3799         tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3800         break;
3801     case 1: /* xor and Store */
3802         tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3803         break;
3804     case 2: /* Or and Store */
3805         tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3806         break;
3807     case 3: /* 'and' and Store */
3808         tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3809         break;
3810     case 4:  /* Store max unsigned */
3811         tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3812         break;
3813     case 5:  /* Store max signed */
3814         tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3815         break;
3816     case 6:  /* Store min unsigned */
3817         tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3818         break;
3819     case 7:  /* Store min signed */
3820         tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3821         break;
3822     case 24: /* Store twin  */
3823         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3824             /* Restart with exclusive lock.  */
3825             gen_helper_exit_atomic(cpu_env);
3826             ctx->base.is_jmp = DISAS_NORETURN;
3827         } else {
3828             TCGv t = tcg_temp_new();
3829             TCGv t2 = tcg_temp_new();
3830             TCGv s = tcg_temp_new();
3831             TCGv s2 = tcg_temp_new();
3832             TCGv ea_plus_s = tcg_temp_new();
3833 
3834             tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3835             tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
3836             tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3837             tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3838             tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3839             tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3840             tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3841 
3842             tcg_temp_free(ea_plus_s);
3843             tcg_temp_free(s2);
3844             tcg_temp_free(s);
3845             tcg_temp_free(t2);
3846             tcg_temp_free(t);
3847         }
3848         break;
3849     default:
3850         /* invoke data storage error handler */
3851         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3852     }
3853     tcg_temp_free(discard);
3854     tcg_temp_free(EA);
3855 }
3856 
3857 static void gen_stwat(DisasContext *ctx)
3858 {
3859     gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3860 }
3861 
3862 #ifdef TARGET_PPC64
3863 static void gen_stdat(DisasContext *ctx)
3864 {
3865     gen_st_atomic(ctx, DEF_MEMOP(MO_UQ));
3866 }
3867 #endif
3868 
3869 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3870 {
3871     TCGLabel *l1 = gen_new_label();
3872     TCGLabel *l2 = gen_new_label();
3873     TCGv t0 = tcg_temp_new();
3874     int reg = rS(ctx->opcode);
3875 
3876     gen_set_access_type(ctx, ACCESS_RES);
3877     gen_addr_reg_index(ctx, t0);
3878     tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3879     tcg_temp_free(t0);
3880 
3881     t0 = tcg_temp_new();
3882     tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3883                               cpu_gpr[reg], ctx->mem_idx,
3884                               DEF_MEMOP(memop) | MO_ALIGN);
3885     tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3886     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3887     tcg_gen_or_tl(t0, t0, cpu_so);
3888     tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
3889     tcg_temp_free(t0);
3890     tcg_gen_br(l2);
3891 
3892     gen_set_label(l1);
3893 
3894     /*
3895      * Address mismatch implies failure.  But we still need to provide
3896      * the memory barrier semantics of the instruction.
3897      */
3898     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
3899     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3900 
3901     gen_set_label(l2);
3902     tcg_gen_movi_tl(cpu_reserve, -1);
3903 }
3904 
3905 #define STCX(name, memop)                  \
3906 static void gen_##name(DisasContext *ctx)  \
3907 {                                          \
3908     gen_conditional_store(ctx, memop);     \
3909 }
3910 
3911 STCX(stbcx_, DEF_MEMOP(MO_UB))
3912 STCX(sthcx_, DEF_MEMOP(MO_UW))
3913 STCX(stwcx_, DEF_MEMOP(MO_UL))
3914 
3915 #if defined(TARGET_PPC64)
3916 /* ldarx */
3917 LARX(ldarx, DEF_MEMOP(MO_UQ))
3918 /* stdcx. */
3919 STCX(stdcx_, DEF_MEMOP(MO_UQ))
3920 
3921 /* lqarx */
3922 static void gen_lqarx(DisasContext *ctx)
3923 {
3924     int rd = rD(ctx->opcode);
3925     TCGv EA, hi, lo;
3926 
3927     if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3928                  (rd == rB(ctx->opcode)))) {
3929         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3930         return;
3931     }
3932 
3933     gen_set_access_type(ctx, ACCESS_RES);
3934     EA = tcg_temp_new();
3935     gen_addr_reg_index(ctx, EA);
3936 
3937     /* Note that the low part is always in RD+1, even in LE mode.  */
3938     lo = cpu_gpr[rd + 1];
3939     hi = cpu_gpr[rd];
3940 
3941     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3942         if (HAVE_ATOMIC128) {
3943             TCGv_i32 oi = tcg_temp_new_i32();
3944             if (ctx->le_mode) {
3945                 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128 | MO_ALIGN,
3946                                                     ctx->mem_idx));
3947                 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
3948             } else {
3949                 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128 | MO_ALIGN,
3950                                                     ctx->mem_idx));
3951                 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
3952             }
3953             tcg_temp_free_i32(oi);
3954             tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
3955         } else {
3956             /* Restart with exclusive lock.  */
3957             gen_helper_exit_atomic(cpu_env);
3958             ctx->base.is_jmp = DISAS_NORETURN;
3959             tcg_temp_free(EA);
3960             return;
3961         }
3962     } else if (ctx->le_mode) {
3963         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEUQ | MO_ALIGN_16);
3964         tcg_gen_mov_tl(cpu_reserve, EA);
3965         gen_addr_add(ctx, EA, EA, 8);
3966         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEUQ);
3967     } else {
3968         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEUQ | MO_ALIGN_16);
3969         tcg_gen_mov_tl(cpu_reserve, EA);
3970         gen_addr_add(ctx, EA, EA, 8);
3971         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEUQ);
3972     }
3973     tcg_temp_free(EA);
3974 
3975     tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
3976     tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
3977 }
3978 
3979 /* stqcx. */
3980 static void gen_stqcx_(DisasContext *ctx)
3981 {
3982     int rs = rS(ctx->opcode);
3983     TCGv EA, hi, lo;
3984 
3985     if (unlikely(rs & 1)) {
3986         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3987         return;
3988     }
3989 
3990     gen_set_access_type(ctx, ACCESS_RES);
3991     EA = tcg_temp_new();
3992     gen_addr_reg_index(ctx, EA);
3993 
3994     /* Note that the low part is always in RS+1, even in LE mode.  */
3995     lo = cpu_gpr[rs + 1];
3996     hi = cpu_gpr[rs];
3997 
3998     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3999         if (HAVE_CMPXCHG128) {
4000             TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_128) | MO_ALIGN);
4001             if (ctx->le_mode) {
4002                 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env,
4003                                              EA, lo, hi, oi);
4004             } else {
4005                 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env,
4006                                              EA, lo, hi, oi);
4007             }
4008             tcg_temp_free_i32(oi);
4009         } else {
4010             /* Restart with exclusive lock.  */
4011             gen_helper_exit_atomic(cpu_env);
4012             ctx->base.is_jmp = DISAS_NORETURN;
4013         }
4014         tcg_temp_free(EA);
4015     } else {
4016         TCGLabel *lab_fail = gen_new_label();
4017         TCGLabel *lab_over = gen_new_label();
4018         TCGv_i64 t0 = tcg_temp_new_i64();
4019         TCGv_i64 t1 = tcg_temp_new_i64();
4020 
4021         tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
4022         tcg_temp_free(EA);
4023 
4024         gen_qemu_ld64_i64(ctx, t0, cpu_reserve);
4025         tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4026                                      ? offsetof(CPUPPCState, reserve_val2)
4027                                      : offsetof(CPUPPCState, reserve_val)));
4028         tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4029 
4030         tcg_gen_addi_i64(t0, cpu_reserve, 8);
4031         gen_qemu_ld64_i64(ctx, t0, t0);
4032         tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4033                                      ? offsetof(CPUPPCState, reserve_val)
4034                                      : offsetof(CPUPPCState, reserve_val2)));
4035         tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4036 
4037         /* Success */
4038         gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve);
4039         tcg_gen_addi_i64(t0, cpu_reserve, 8);
4040         gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0);
4041 
4042         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4043         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
4044         tcg_gen_br(lab_over);
4045 
4046         gen_set_label(lab_fail);
4047         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4048 
4049         gen_set_label(lab_over);
4050         tcg_gen_movi_tl(cpu_reserve, -1);
4051         tcg_temp_free_i64(t0);
4052         tcg_temp_free_i64(t1);
4053     }
4054 }
4055 #endif /* defined(TARGET_PPC64) */
4056 
4057 /* sync */
4058 static void gen_sync(DisasContext *ctx)
4059 {
4060     TCGBar bar = TCG_MO_ALL;
4061     uint32_t l = (ctx->opcode >> 21) & 3;
4062 
4063     if ((l == 1) && (ctx->insns_flags2 & PPC2_MEM_LWSYNC)) {
4064         bar = TCG_MO_LD_LD | TCG_MO_LD_ST | TCG_MO_ST_ST;
4065     }
4066 
4067     /*
4068      * We may need to check for a pending TLB flush.
4069      *
4070      * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
4071      *
4072      * Additionally, this can only happen in kernel mode however so
4073      * check MSR_PR as well.
4074      */
4075     if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
4076         gen_check_tlb_flush(ctx, true);
4077     }
4078 
4079     tcg_gen_mb(bar | TCG_BAR_SC);
4080 }
4081 
4082 /* wait */
4083 static void gen_wait(DisasContext *ctx)
4084 {
4085     uint32_t wc;
4086 
4087     if (ctx->insns_flags & PPC_WAIT) {
4088         /* v2.03-v2.07 define an older incompatible 'wait' encoding. */
4089 
4090         if (ctx->insns_flags2 & PPC2_PM_ISA206) {
4091             /* v2.06 introduced the WC field. WC > 0 may be treated as no-op. */
4092             wc = WC(ctx->opcode);
4093         } else {
4094             wc = 0;
4095         }
4096 
4097     } else if (ctx->insns_flags2 & PPC2_ISA300) {
4098         /* v3.0 defines a new 'wait' encoding. */
4099         wc = WC(ctx->opcode);
4100         if (ctx->insns_flags2 & PPC2_ISA310) {
4101             uint32_t pl = PL(ctx->opcode);
4102 
4103             /* WC 1,2 may be treated as no-op. WC 3 is reserved. */
4104             if (wc == 3) {
4105                 gen_invalid(ctx);
4106                 return;
4107             }
4108 
4109             /* PL 1-3 are reserved. If WC=2 then the insn is treated as noop. */
4110             if (pl > 0 && wc != 2) {
4111                 gen_invalid(ctx);
4112                 return;
4113             }
4114 
4115         } else { /* ISA300 */
4116             /* WC 1-3 are reserved */
4117             if (wc > 0) {
4118                 gen_invalid(ctx);
4119                 return;
4120             }
4121         }
4122 
4123     } else {
4124         warn_report("wait instruction decoded with wrong ISA flags.");
4125         gen_invalid(ctx);
4126         return;
4127     }
4128 
4129     /*
4130      * wait without WC field or with WC=0 waits for an exception / interrupt
4131      * to occur.
4132      */
4133     if (wc == 0) {
4134         TCGv_i32 t0 = tcg_const_i32(1);
4135         tcg_gen_st_i32(t0, cpu_env,
4136                        -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
4137         tcg_temp_free_i32(t0);
4138         /* Stop translation, as the CPU is supposed to sleep from now */
4139         gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4140     }
4141 
4142     /*
4143      * Other wait types must not just wait until an exception occurs because
4144      * ignoring their other wake-up conditions could cause a hang.
4145      *
4146      * For v2.06 and 2.07, wc=1,2,3 are architected but may be implemented as
4147      * no-ops.
4148      *
4149      * wc=1 and wc=3 explicitly allow the instruction to be treated as a no-op.
4150      *
4151      * wc=2 waits for an implementation-specific condition, such could be
4152      * always true, so it can be implemented as a no-op.
4153      *
4154      * For v3.1, wc=1,2 are architected but may be implemented as no-ops.
4155      *
4156      * wc=1 (waitrsv) waits for an exception or a reservation to be lost.
4157      * Reservation-loss may have implementation-specific conditions, so it
4158      * can be implemented as a no-op.
4159      *
4160      * wc=2 waits for an exception or an amount of time to pass. This
4161      * amount is implementation-specific so it can be implemented as a
4162      * no-op.
4163      *
4164      * ISA v3.1 allows for execution to resume "in the rare case of
4165      * an implementation-dependent event", so in any case software must
4166      * not depend on the architected resumption condition to become
4167      * true, so no-op implementations should be architecturally correct
4168      * (if suboptimal).
4169      */
4170 }
4171 
4172 #if defined(TARGET_PPC64)
4173 static void gen_doze(DisasContext *ctx)
4174 {
4175 #if defined(CONFIG_USER_ONLY)
4176     GEN_PRIV(ctx);
4177 #else
4178     TCGv_i32 t;
4179 
4180     CHK_HV(ctx);
4181     t = tcg_const_i32(PPC_PM_DOZE);
4182     gen_helper_pminsn(cpu_env, t);
4183     tcg_temp_free_i32(t);
4184     /* Stop translation, as the CPU is supposed to sleep from now */
4185     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4186 #endif /* defined(CONFIG_USER_ONLY) */
4187 }
4188 
4189 static void gen_nap(DisasContext *ctx)
4190 {
4191 #if defined(CONFIG_USER_ONLY)
4192     GEN_PRIV(ctx);
4193 #else
4194     TCGv_i32 t;
4195 
4196     CHK_HV(ctx);
4197     t = tcg_const_i32(PPC_PM_NAP);
4198     gen_helper_pminsn(cpu_env, t);
4199     tcg_temp_free_i32(t);
4200     /* Stop translation, as the CPU is supposed to sleep from now */
4201     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4202 #endif /* defined(CONFIG_USER_ONLY) */
4203 }
4204 
4205 static void gen_stop(DisasContext *ctx)
4206 {
4207 #if defined(CONFIG_USER_ONLY)
4208     GEN_PRIV(ctx);
4209 #else
4210     TCGv_i32 t;
4211 
4212     CHK_HV(ctx);
4213     t = tcg_const_i32(PPC_PM_STOP);
4214     gen_helper_pminsn(cpu_env, t);
4215     tcg_temp_free_i32(t);
4216     /* Stop translation, as the CPU is supposed to sleep from now */
4217     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4218 #endif /* defined(CONFIG_USER_ONLY) */
4219 }
4220 
4221 static void gen_sleep(DisasContext *ctx)
4222 {
4223 #if defined(CONFIG_USER_ONLY)
4224     GEN_PRIV(ctx);
4225 #else
4226     TCGv_i32 t;
4227 
4228     CHK_HV(ctx);
4229     t = tcg_const_i32(PPC_PM_SLEEP);
4230     gen_helper_pminsn(cpu_env, t);
4231     tcg_temp_free_i32(t);
4232     /* Stop translation, as the CPU is supposed to sleep from now */
4233     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4234 #endif /* defined(CONFIG_USER_ONLY) */
4235 }
4236 
4237 static void gen_rvwinkle(DisasContext *ctx)
4238 {
4239 #if defined(CONFIG_USER_ONLY)
4240     GEN_PRIV(ctx);
4241 #else
4242     TCGv_i32 t;
4243 
4244     CHK_HV(ctx);
4245     t = tcg_const_i32(PPC_PM_RVWINKLE);
4246     gen_helper_pminsn(cpu_env, t);
4247     tcg_temp_free_i32(t);
4248     /* Stop translation, as the CPU is supposed to sleep from now */
4249     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4250 #endif /* defined(CONFIG_USER_ONLY) */
4251 }
4252 #endif /* #if defined(TARGET_PPC64) */
4253 
4254 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4255 {
4256 #if defined(TARGET_PPC64)
4257     if (ctx->has_cfar) {
4258         tcg_gen_movi_tl(cpu_cfar, nip);
4259     }
4260 #endif
4261 }
4262 
4263 #if defined(TARGET_PPC64)
4264 static void pmu_count_insns(DisasContext *ctx)
4265 {
4266     /*
4267      * Do not bother calling the helper if the PMU isn't counting
4268      * instructions.
4269      */
4270     if (!ctx->pmu_insn_cnt) {
4271         return;
4272     }
4273 
4274  #if !defined(CONFIG_USER_ONLY)
4275     TCGLabel *l;
4276     TCGv t0;
4277 
4278     /*
4279      * The PMU insns_inc() helper stops the internal PMU timer if a
4280      * counter overflows happens. In that case, if the guest is
4281      * running with icount and we do not handle it beforehand,
4282      * the helper can trigger a 'bad icount read'.
4283      */
4284     gen_icount_io_start(ctx);
4285 
4286     /* Avoid helper calls when only PMC5-6 are enabled. */
4287     if (!ctx->pmc_other) {
4288         l = gen_new_label();
4289         t0 = tcg_temp_new();
4290 
4291         gen_load_spr(t0, SPR_POWER_PMC5);
4292         tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4293         gen_store_spr(SPR_POWER_PMC5, t0);
4294         /* Check for overflow, if it's enabled */
4295         if (ctx->mmcr0_pmcjce) {
4296             tcg_gen_brcondi_tl(TCG_COND_LT, t0, PMC_COUNTER_NEGATIVE_VAL, l);
4297             gen_helper_handle_pmc5_overflow(cpu_env);
4298         }
4299 
4300         gen_set_label(l);
4301         tcg_temp_free(t0);
4302     } else {
4303         gen_helper_insns_inc(cpu_env, tcg_constant_i32(ctx->base.num_insns));
4304     }
4305   #else
4306     /*
4307      * User mode can read (but not write) PMC5 and start/stop
4308      * the PMU via MMCR0_FC. In this case just increment
4309      * PMC5 with base.num_insns.
4310      */
4311     TCGv t0 = tcg_temp_new();
4312 
4313     gen_load_spr(t0, SPR_POWER_PMC5);
4314     tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4315     gen_store_spr(SPR_POWER_PMC5, t0);
4316 
4317     tcg_temp_free(t0);
4318   #endif /* #if !defined(CONFIG_USER_ONLY) */
4319 }
4320 #else
4321 static void pmu_count_insns(DisasContext *ctx)
4322 {
4323     return;
4324 }
4325 #endif /* #if defined(TARGET_PPC64) */
4326 
4327 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4328 {
4329     return translator_use_goto_tb(&ctx->base, dest);
4330 }
4331 
4332 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4333 {
4334     if (unlikely(ctx->singlestep_enabled)) {
4335         gen_debug_exception(ctx);
4336     } else {
4337         /*
4338          * tcg_gen_lookup_and_goto_ptr will exit the TB if
4339          * CF_NO_GOTO_PTR is set. Count insns now.
4340          */
4341         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
4342             pmu_count_insns(ctx);
4343         }
4344 
4345         tcg_gen_lookup_and_goto_ptr();
4346     }
4347 }
4348 
4349 /***                                Branch                                 ***/
4350 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4351 {
4352     if (NARROW_MODE(ctx)) {
4353         dest = (uint32_t) dest;
4354     }
4355     if (use_goto_tb(ctx, dest)) {
4356         pmu_count_insns(ctx);
4357         tcg_gen_goto_tb(n);
4358         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4359         tcg_gen_exit_tb(ctx->base.tb, n);
4360     } else {
4361         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4362         gen_lookup_and_goto_ptr(ctx);
4363     }
4364 }
4365 
4366 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4367 {
4368     if (NARROW_MODE(ctx)) {
4369         nip = (uint32_t)nip;
4370     }
4371     tcg_gen_movi_tl(cpu_lr, nip);
4372 }
4373 
4374 /* b ba bl bla */
4375 static void gen_b(DisasContext *ctx)
4376 {
4377     target_ulong li, target;
4378 
4379     /* sign extend LI */
4380     li = LI(ctx->opcode);
4381     li = (li ^ 0x02000000) - 0x02000000;
4382     if (likely(AA(ctx->opcode) == 0)) {
4383         target = ctx->cia + li;
4384     } else {
4385         target = li;
4386     }
4387     if (LK(ctx->opcode)) {
4388         gen_setlr(ctx, ctx->base.pc_next);
4389     }
4390     gen_update_cfar(ctx, ctx->cia);
4391     gen_goto_tb(ctx, 0, target);
4392     ctx->base.is_jmp = DISAS_NORETURN;
4393 }
4394 
4395 #define BCOND_IM  0
4396 #define BCOND_LR  1
4397 #define BCOND_CTR 2
4398 #define BCOND_TAR 3
4399 
4400 static void gen_bcond(DisasContext *ctx, int type)
4401 {
4402     uint32_t bo = BO(ctx->opcode);
4403     TCGLabel *l1;
4404     TCGv target;
4405 
4406     if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4407         target = tcg_temp_local_new();
4408         if (type == BCOND_CTR) {
4409             tcg_gen_mov_tl(target, cpu_ctr);
4410         } else if (type == BCOND_TAR) {
4411             gen_load_spr(target, SPR_TAR);
4412         } else {
4413             tcg_gen_mov_tl(target, cpu_lr);
4414         }
4415     } else {
4416         target = NULL;
4417     }
4418     if (LK(ctx->opcode)) {
4419         gen_setlr(ctx, ctx->base.pc_next);
4420     }
4421     l1 = gen_new_label();
4422     if ((bo & 0x4) == 0) {
4423         /* Decrement and test CTR */
4424         TCGv temp = tcg_temp_new();
4425 
4426         if (type == BCOND_CTR) {
4427             /*
4428              * All ISAs up to v3 describe this form of bcctr as invalid but
4429              * some processors, ie. 64-bit server processors compliant with
4430              * arch 2.x, do implement a "test and decrement" logic instead,
4431              * as described in their respective UMs. This logic involves CTR
4432              * to act as both the branch target and a counter, which makes
4433              * it basically useless and thus never used in real code.
4434              *
4435              * This form was hence chosen to trigger extra micro-architectural
4436              * side-effect on real HW needed for the Spectre v2 workaround.
4437              * It is up to guests that implement such workaround, ie. linux, to
4438              * use this form in a way it just triggers the side-effect without
4439              * doing anything else harmful.
4440              */
4441             if (unlikely(!is_book3s_arch2x(ctx))) {
4442                 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4443                 tcg_temp_free(temp);
4444                 tcg_temp_free(target);
4445                 return;
4446             }
4447 
4448             if (NARROW_MODE(ctx)) {
4449                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4450             } else {
4451                 tcg_gen_mov_tl(temp, cpu_ctr);
4452             }
4453             if (bo & 0x2) {
4454                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4455             } else {
4456                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4457             }
4458             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4459         } else {
4460             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4461             if (NARROW_MODE(ctx)) {
4462                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4463             } else {
4464                 tcg_gen_mov_tl(temp, cpu_ctr);
4465             }
4466             if (bo & 0x2) {
4467                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4468             } else {
4469                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4470             }
4471         }
4472         tcg_temp_free(temp);
4473     }
4474     if ((bo & 0x10) == 0) {
4475         /* Test CR */
4476         uint32_t bi = BI(ctx->opcode);
4477         uint32_t mask = 0x08 >> (bi & 0x03);
4478         TCGv_i32 temp = tcg_temp_new_i32();
4479 
4480         if (bo & 0x8) {
4481             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4482             tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4483         } else {
4484             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4485             tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4486         }
4487         tcg_temp_free_i32(temp);
4488     }
4489     gen_update_cfar(ctx, ctx->cia);
4490     if (type == BCOND_IM) {
4491         target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4492         if (likely(AA(ctx->opcode) == 0)) {
4493             gen_goto_tb(ctx, 0, ctx->cia + li);
4494         } else {
4495             gen_goto_tb(ctx, 0, li);
4496         }
4497     } else {
4498         if (NARROW_MODE(ctx)) {
4499             tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4500         } else {
4501             tcg_gen_andi_tl(cpu_nip, target, ~3);
4502         }
4503         gen_lookup_and_goto_ptr(ctx);
4504         tcg_temp_free(target);
4505     }
4506     if ((bo & 0x14) != 0x14) {
4507         /* fallthrough case */
4508         gen_set_label(l1);
4509         gen_goto_tb(ctx, 1, ctx->base.pc_next);
4510     }
4511     ctx->base.is_jmp = DISAS_NORETURN;
4512 }
4513 
4514 static void gen_bc(DisasContext *ctx)
4515 {
4516     gen_bcond(ctx, BCOND_IM);
4517 }
4518 
4519 static void gen_bcctr(DisasContext *ctx)
4520 {
4521     gen_bcond(ctx, BCOND_CTR);
4522 }
4523 
4524 static void gen_bclr(DisasContext *ctx)
4525 {
4526     gen_bcond(ctx, BCOND_LR);
4527 }
4528 
4529 static void gen_bctar(DisasContext *ctx)
4530 {
4531     gen_bcond(ctx, BCOND_TAR);
4532 }
4533 
4534 /***                      Condition register logical                       ***/
4535 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
4536 static void glue(gen_, name)(DisasContext *ctx)                               \
4537 {                                                                             \
4538     uint8_t bitmask;                                                          \
4539     int sh;                                                                   \
4540     TCGv_i32 t0, t1;                                                          \
4541     sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
4542     t0 = tcg_temp_new_i32();                                                  \
4543     if (sh > 0)                                                               \
4544         tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
4545     else if (sh < 0)                                                          \
4546         tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
4547     else                                                                      \
4548         tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
4549     t1 = tcg_temp_new_i32();                                                  \
4550     sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
4551     if (sh > 0)                                                               \
4552         tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
4553     else if (sh < 0)                                                          \
4554         tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh);           \
4555     else                                                                      \
4556         tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]);                 \
4557     tcg_op(t0, t0, t1);                                                       \
4558     bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03);                             \
4559     tcg_gen_andi_i32(t0, t0, bitmask);                                        \
4560     tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
4561     tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
4562     tcg_temp_free_i32(t0);                                                    \
4563     tcg_temp_free_i32(t1);                                                    \
4564 }
4565 
4566 /* crand */
4567 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4568 /* crandc */
4569 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
4570 /* creqv */
4571 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
4572 /* crnand */
4573 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
4574 /* crnor */
4575 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
4576 /* cror */
4577 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
4578 /* crorc */
4579 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
4580 /* crxor */
4581 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
4582 
4583 /* mcrf */
4584 static void gen_mcrf(DisasContext *ctx)
4585 {
4586     tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
4587 }
4588 
4589 /***                           System linkage                              ***/
4590 
4591 /* rfi (supervisor only) */
4592 static void gen_rfi(DisasContext *ctx)
4593 {
4594 #if defined(CONFIG_USER_ONLY)
4595     GEN_PRIV(ctx);
4596 #else
4597     /*
4598      * This instruction doesn't exist anymore on 64-bit server
4599      * processors compliant with arch 2.x
4600      */
4601     if (is_book3s_arch2x(ctx)) {
4602         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4603         return;
4604     }
4605     /* Restore CPU state */
4606     CHK_SV(ctx);
4607     gen_icount_io_start(ctx);
4608     gen_update_cfar(ctx, ctx->cia);
4609     gen_helper_rfi(cpu_env);
4610     ctx->base.is_jmp = DISAS_EXIT;
4611 #endif
4612 }
4613 
4614 #if defined(TARGET_PPC64)
4615 static void gen_rfid(DisasContext *ctx)
4616 {
4617 #if defined(CONFIG_USER_ONLY)
4618     GEN_PRIV(ctx);
4619 #else
4620     /* Restore CPU state */
4621     CHK_SV(ctx);
4622     gen_icount_io_start(ctx);
4623     gen_update_cfar(ctx, ctx->cia);
4624     gen_helper_rfid(cpu_env);
4625     ctx->base.is_jmp = DISAS_EXIT;
4626 #endif
4627 }
4628 
4629 #if !defined(CONFIG_USER_ONLY)
4630 static void gen_rfscv(DisasContext *ctx)
4631 {
4632 #if defined(CONFIG_USER_ONLY)
4633     GEN_PRIV(ctx);
4634 #else
4635     /* Restore CPU state */
4636     CHK_SV(ctx);
4637     gen_icount_io_start(ctx);
4638     gen_update_cfar(ctx, ctx->cia);
4639     gen_helper_rfscv(cpu_env);
4640     ctx->base.is_jmp = DISAS_EXIT;
4641 #endif
4642 }
4643 #endif
4644 
4645 static void gen_hrfid(DisasContext *ctx)
4646 {
4647 #if defined(CONFIG_USER_ONLY)
4648     GEN_PRIV(ctx);
4649 #else
4650     /* Restore CPU state */
4651     CHK_HV(ctx);
4652     gen_helper_hrfid(cpu_env);
4653     ctx->base.is_jmp = DISAS_EXIT;
4654 #endif
4655 }
4656 #endif
4657 
4658 /* sc */
4659 #if defined(CONFIG_USER_ONLY)
4660 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
4661 #else
4662 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
4663 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
4664 #endif
4665 static void gen_sc(DisasContext *ctx)
4666 {
4667     uint32_t lev;
4668 
4669     lev = (ctx->opcode >> 5) & 0x7F;
4670     gen_exception_err(ctx, POWERPC_SYSCALL, lev);
4671 }
4672 
4673 #if defined(TARGET_PPC64)
4674 #if !defined(CONFIG_USER_ONLY)
4675 static void gen_scv(DisasContext *ctx)
4676 {
4677     uint32_t lev = (ctx->opcode >> 5) & 0x7F;
4678 
4679     /* Set the PC back to the faulting instruction. */
4680     gen_update_nip(ctx, ctx->cia);
4681     gen_helper_scv(cpu_env, tcg_constant_i32(lev));
4682 
4683     ctx->base.is_jmp = DISAS_NORETURN;
4684 }
4685 #endif
4686 #endif
4687 
4688 /***                                Trap                                   ***/
4689 
4690 /* Check for unconditional traps (always or never) */
4691 static bool check_unconditional_trap(DisasContext *ctx)
4692 {
4693     /* Trap never */
4694     if (TO(ctx->opcode) == 0) {
4695         return true;
4696     }
4697     /* Trap always */
4698     if (TO(ctx->opcode) == 31) {
4699         gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
4700         return true;
4701     }
4702     return false;
4703 }
4704 
4705 /* tw */
4706 static void gen_tw(DisasContext *ctx)
4707 {
4708     TCGv_i32 t0;
4709 
4710     if (check_unconditional_trap(ctx)) {
4711         return;
4712     }
4713     t0 = tcg_const_i32(TO(ctx->opcode));
4714     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4715                   t0);
4716     tcg_temp_free_i32(t0);
4717 }
4718 
4719 /* twi */
4720 static void gen_twi(DisasContext *ctx)
4721 {
4722     TCGv t0;
4723     TCGv_i32 t1;
4724 
4725     if (check_unconditional_trap(ctx)) {
4726         return;
4727     }
4728     t0 = tcg_const_tl(SIMM(ctx->opcode));
4729     t1 = tcg_const_i32(TO(ctx->opcode));
4730     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4731     tcg_temp_free(t0);
4732     tcg_temp_free_i32(t1);
4733 }
4734 
4735 #if defined(TARGET_PPC64)
4736 /* td */
4737 static void gen_td(DisasContext *ctx)
4738 {
4739     TCGv_i32 t0;
4740 
4741     if (check_unconditional_trap(ctx)) {
4742         return;
4743     }
4744     t0 = tcg_const_i32(TO(ctx->opcode));
4745     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4746                   t0);
4747     tcg_temp_free_i32(t0);
4748 }
4749 
4750 /* tdi */
4751 static void gen_tdi(DisasContext *ctx)
4752 {
4753     TCGv t0;
4754     TCGv_i32 t1;
4755 
4756     if (check_unconditional_trap(ctx)) {
4757         return;
4758     }
4759     t0 = tcg_const_tl(SIMM(ctx->opcode));
4760     t1 = tcg_const_i32(TO(ctx->opcode));
4761     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4762     tcg_temp_free(t0);
4763     tcg_temp_free_i32(t1);
4764 }
4765 #endif
4766 
4767 /***                          Processor control                            ***/
4768 
4769 /* mcrxr */
4770 static void gen_mcrxr(DisasContext *ctx)
4771 {
4772     TCGv_i32 t0 = tcg_temp_new_i32();
4773     TCGv_i32 t1 = tcg_temp_new_i32();
4774     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4775 
4776     tcg_gen_trunc_tl_i32(t0, cpu_so);
4777     tcg_gen_trunc_tl_i32(t1, cpu_ov);
4778     tcg_gen_trunc_tl_i32(dst, cpu_ca);
4779     tcg_gen_shli_i32(t0, t0, 3);
4780     tcg_gen_shli_i32(t1, t1, 2);
4781     tcg_gen_shli_i32(dst, dst, 1);
4782     tcg_gen_or_i32(dst, dst, t0);
4783     tcg_gen_or_i32(dst, dst, t1);
4784     tcg_temp_free_i32(t0);
4785     tcg_temp_free_i32(t1);
4786 
4787     tcg_gen_movi_tl(cpu_so, 0);
4788     tcg_gen_movi_tl(cpu_ov, 0);
4789     tcg_gen_movi_tl(cpu_ca, 0);
4790 }
4791 
4792 #ifdef TARGET_PPC64
4793 /* mcrxrx */
4794 static void gen_mcrxrx(DisasContext *ctx)
4795 {
4796     TCGv t0 = tcg_temp_new();
4797     TCGv t1 = tcg_temp_new();
4798     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4799 
4800     /* copy OV and OV32 */
4801     tcg_gen_shli_tl(t0, cpu_ov, 1);
4802     tcg_gen_or_tl(t0, t0, cpu_ov32);
4803     tcg_gen_shli_tl(t0, t0, 2);
4804     /* copy CA and CA32 */
4805     tcg_gen_shli_tl(t1, cpu_ca, 1);
4806     tcg_gen_or_tl(t1, t1, cpu_ca32);
4807     tcg_gen_or_tl(t0, t0, t1);
4808     tcg_gen_trunc_tl_i32(dst, t0);
4809     tcg_temp_free(t0);
4810     tcg_temp_free(t1);
4811 }
4812 #endif
4813 
4814 /* mfcr mfocrf */
4815 static void gen_mfcr(DisasContext *ctx)
4816 {
4817     uint32_t crm, crn;
4818 
4819     if (likely(ctx->opcode & 0x00100000)) {
4820         crm = CRM(ctx->opcode);
4821         if (likely(crm && ((crm & (crm - 1)) == 0))) {
4822             crn = ctz32(crm);
4823             tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4824             tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4825                             cpu_gpr[rD(ctx->opcode)], crn * 4);
4826         }
4827     } else {
4828         TCGv_i32 t0 = tcg_temp_new_i32();
4829         tcg_gen_mov_i32(t0, cpu_crf[0]);
4830         tcg_gen_shli_i32(t0, t0, 4);
4831         tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4832         tcg_gen_shli_i32(t0, t0, 4);
4833         tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4834         tcg_gen_shli_i32(t0, t0, 4);
4835         tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4836         tcg_gen_shli_i32(t0, t0, 4);
4837         tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4838         tcg_gen_shli_i32(t0, t0, 4);
4839         tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4840         tcg_gen_shli_i32(t0, t0, 4);
4841         tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4842         tcg_gen_shli_i32(t0, t0, 4);
4843         tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4844         tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4845         tcg_temp_free_i32(t0);
4846     }
4847 }
4848 
4849 /* mfmsr */
4850 static void gen_mfmsr(DisasContext *ctx)
4851 {
4852     CHK_SV(ctx);
4853     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4854 }
4855 
4856 /* mfspr */
4857 static inline void gen_op_mfspr(DisasContext *ctx)
4858 {
4859     void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4860     uint32_t sprn = SPR(ctx->opcode);
4861 
4862 #if defined(CONFIG_USER_ONLY)
4863     read_cb = ctx->spr_cb[sprn].uea_read;
4864 #else
4865     if (ctx->pr) {
4866         read_cb = ctx->spr_cb[sprn].uea_read;
4867     } else if (ctx->hv) {
4868         read_cb = ctx->spr_cb[sprn].hea_read;
4869     } else {
4870         read_cb = ctx->spr_cb[sprn].oea_read;
4871     }
4872 #endif
4873     if (likely(read_cb != NULL)) {
4874         if (likely(read_cb != SPR_NOACCESS)) {
4875             (*read_cb)(ctx, rD(ctx->opcode), sprn);
4876         } else {
4877             /* Privilege exception */
4878             /*
4879              * This is a hack to avoid warnings when running Linux:
4880              * this OS breaks the PowerPC virtualisation model,
4881              * allowing userland application to read the PVR
4882              */
4883             if (sprn != SPR_PVR) {
4884                 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4885                               "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4886                               ctx->cia);
4887             }
4888             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4889         }
4890     } else {
4891         /* ISA 2.07 defines these as no-ops */
4892         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4893             (sprn >= 808 && sprn <= 811)) {
4894             /* This is a nop */
4895             return;
4896         }
4897         /* Not defined */
4898         qemu_log_mask(LOG_GUEST_ERROR,
4899                       "Trying to read invalid spr %d (0x%03x) at "
4900                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4901 
4902         /*
4903          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4904          * generate a priv, a hv emu or a no-op
4905          */
4906         if (sprn & 0x10) {
4907             if (ctx->pr) {
4908                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4909             }
4910         } else {
4911             if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4912                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4913             }
4914         }
4915     }
4916 }
4917 
4918 static void gen_mfspr(DisasContext *ctx)
4919 {
4920     gen_op_mfspr(ctx);
4921 }
4922 
4923 /* mftb */
4924 static void gen_mftb(DisasContext *ctx)
4925 {
4926     gen_op_mfspr(ctx);
4927 }
4928 
4929 /* mtcrf mtocrf*/
4930 static void gen_mtcrf(DisasContext *ctx)
4931 {
4932     uint32_t crm, crn;
4933 
4934     crm = CRM(ctx->opcode);
4935     if (likely((ctx->opcode & 0x00100000))) {
4936         if (crm && ((crm & (crm - 1)) == 0)) {
4937             TCGv_i32 temp = tcg_temp_new_i32();
4938             crn = ctz32(crm);
4939             tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4940             tcg_gen_shri_i32(temp, temp, crn * 4);
4941             tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4942             tcg_temp_free_i32(temp);
4943         }
4944     } else {
4945         TCGv_i32 temp = tcg_temp_new_i32();
4946         tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4947         for (crn = 0 ; crn < 8 ; crn++) {
4948             if (crm & (1 << crn)) {
4949                     tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4950                     tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4951             }
4952         }
4953         tcg_temp_free_i32(temp);
4954     }
4955 }
4956 
4957 /* mtmsr */
4958 #if defined(TARGET_PPC64)
4959 static void gen_mtmsrd(DisasContext *ctx)
4960 {
4961     if (unlikely(!is_book3s_arch2x(ctx))) {
4962         gen_invalid(ctx);
4963         return;
4964     }
4965 
4966     CHK_SV(ctx);
4967 
4968 #if !defined(CONFIG_USER_ONLY)
4969     TCGv t0, t1;
4970     target_ulong mask;
4971 
4972     t0 = tcg_temp_new();
4973     t1 = tcg_temp_new();
4974 
4975     gen_icount_io_start(ctx);
4976 
4977     if (ctx->opcode & 0x00010000) {
4978         /* L=1 form only updates EE and RI */
4979         mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4980     } else {
4981         /* mtmsrd does not alter HV, S, ME, or LE */
4982         mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
4983                  (1ULL << MSR_HV));
4984         /*
4985          * XXX: we need to update nip before the store if we enter
4986          *      power saving mode, we will exit the loop directly from
4987          *      ppc_store_msr
4988          */
4989         gen_update_nip(ctx, ctx->base.pc_next);
4990     }
4991 
4992     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4993     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4994     tcg_gen_or_tl(t0, t0, t1);
4995 
4996     gen_helper_store_msr(cpu_env, t0);
4997 
4998     /* Must stop the translation as machine state (may have) changed */
4999     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5000 
5001     tcg_temp_free(t0);
5002     tcg_temp_free(t1);
5003 #endif /* !defined(CONFIG_USER_ONLY) */
5004 }
5005 #endif /* defined(TARGET_PPC64) */
5006 
5007 static void gen_mtmsr(DisasContext *ctx)
5008 {
5009     CHK_SV(ctx);
5010 
5011 #if !defined(CONFIG_USER_ONLY)
5012     TCGv t0, t1;
5013     target_ulong mask = 0xFFFFFFFF;
5014 
5015     t0 = tcg_temp_new();
5016     t1 = tcg_temp_new();
5017 
5018     gen_icount_io_start(ctx);
5019     if (ctx->opcode & 0x00010000) {
5020         /* L=1 form only updates EE and RI */
5021         mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
5022     } else {
5023         /* mtmsr does not alter S, ME, or LE */
5024         mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
5025 
5026         /*
5027          * XXX: we need to update nip before the store if we enter
5028          *      power saving mode, we will exit the loop directly from
5029          *      ppc_store_msr
5030          */
5031         gen_update_nip(ctx, ctx->base.pc_next);
5032     }
5033 
5034     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
5035     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
5036     tcg_gen_or_tl(t0, t0, t1);
5037 
5038     gen_helper_store_msr(cpu_env, t0);
5039 
5040     /* Must stop the translation as machine state (may have) changed */
5041     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5042 
5043     tcg_temp_free(t0);
5044     tcg_temp_free(t1);
5045 #endif
5046 }
5047 
5048 /* mtspr */
5049 static void gen_mtspr(DisasContext *ctx)
5050 {
5051     void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
5052     uint32_t sprn = SPR(ctx->opcode);
5053 
5054 #if defined(CONFIG_USER_ONLY)
5055     write_cb = ctx->spr_cb[sprn].uea_write;
5056 #else
5057     if (ctx->pr) {
5058         write_cb = ctx->spr_cb[sprn].uea_write;
5059     } else if (ctx->hv) {
5060         write_cb = ctx->spr_cb[sprn].hea_write;
5061     } else {
5062         write_cb = ctx->spr_cb[sprn].oea_write;
5063     }
5064 #endif
5065     if (likely(write_cb != NULL)) {
5066         if (likely(write_cb != SPR_NOACCESS)) {
5067             (*write_cb)(ctx, sprn, rS(ctx->opcode));
5068         } else {
5069             /* Privilege exception */
5070             qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
5071                           "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
5072                           ctx->cia);
5073             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5074         }
5075     } else {
5076         /* ISA 2.07 defines these as no-ops */
5077         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
5078             (sprn >= 808 && sprn <= 811)) {
5079             /* This is a nop */
5080             return;
5081         }
5082 
5083         /* Not defined */
5084         qemu_log_mask(LOG_GUEST_ERROR,
5085                       "Trying to write invalid spr %d (0x%03x) at "
5086                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5087 
5088 
5089         /*
5090          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5091          * generate a priv, a hv emu or a no-op
5092          */
5093         if (sprn & 0x10) {
5094             if (ctx->pr) {
5095                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5096             }
5097         } else {
5098             if (ctx->pr || sprn == 0) {
5099                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5100             }
5101         }
5102     }
5103 }
5104 
5105 #if defined(TARGET_PPC64)
5106 /* setb */
5107 static void gen_setb(DisasContext *ctx)
5108 {
5109     TCGv_i32 t0 = tcg_temp_new_i32();
5110     TCGv_i32 t8 = tcg_constant_i32(8);
5111     TCGv_i32 tm1 = tcg_constant_i32(-1);
5112     int crf = crfS(ctx->opcode);
5113 
5114     tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
5115     tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
5116     tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5117 
5118     tcg_temp_free_i32(t0);
5119 }
5120 #endif
5121 
5122 /***                         Cache management                              ***/
5123 
5124 /* dcbf */
5125 static void gen_dcbf(DisasContext *ctx)
5126 {
5127     /* XXX: specification says this is treated as a load by the MMU */
5128     TCGv t0;
5129     gen_set_access_type(ctx, ACCESS_CACHE);
5130     t0 = tcg_temp_new();
5131     gen_addr_reg_index(ctx, t0);
5132     gen_qemu_ld8u(ctx, t0, t0);
5133     tcg_temp_free(t0);
5134 }
5135 
5136 /* dcbfep (external PID dcbf) */
5137 static void gen_dcbfep(DisasContext *ctx)
5138 {
5139     /* XXX: specification says this is treated as a load by the MMU */
5140     TCGv t0;
5141     CHK_SV(ctx);
5142     gen_set_access_type(ctx, ACCESS_CACHE);
5143     t0 = tcg_temp_new();
5144     gen_addr_reg_index(ctx, t0);
5145     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5146     tcg_temp_free(t0);
5147 }
5148 
5149 /* dcbi (Supervisor only) */
5150 static void gen_dcbi(DisasContext *ctx)
5151 {
5152 #if defined(CONFIG_USER_ONLY)
5153     GEN_PRIV(ctx);
5154 #else
5155     TCGv EA, val;
5156 
5157     CHK_SV(ctx);
5158     EA = tcg_temp_new();
5159     gen_set_access_type(ctx, ACCESS_CACHE);
5160     gen_addr_reg_index(ctx, EA);
5161     val = tcg_temp_new();
5162     /* XXX: specification says this should be treated as a store by the MMU */
5163     gen_qemu_ld8u(ctx, val, EA);
5164     gen_qemu_st8(ctx, val, EA);
5165     tcg_temp_free(val);
5166     tcg_temp_free(EA);
5167 #endif /* defined(CONFIG_USER_ONLY) */
5168 }
5169 
5170 /* dcdst */
5171 static void gen_dcbst(DisasContext *ctx)
5172 {
5173     /* XXX: specification say this is treated as a load by the MMU */
5174     TCGv t0;
5175     gen_set_access_type(ctx, ACCESS_CACHE);
5176     t0 = tcg_temp_new();
5177     gen_addr_reg_index(ctx, t0);
5178     gen_qemu_ld8u(ctx, t0, t0);
5179     tcg_temp_free(t0);
5180 }
5181 
5182 /* dcbstep (dcbstep External PID version) */
5183 static void gen_dcbstep(DisasContext *ctx)
5184 {
5185     /* XXX: specification say this is treated as a load by the MMU */
5186     TCGv t0;
5187     gen_set_access_type(ctx, ACCESS_CACHE);
5188     t0 = tcg_temp_new();
5189     gen_addr_reg_index(ctx, t0);
5190     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5191     tcg_temp_free(t0);
5192 }
5193 
5194 /* dcbt */
5195 static void gen_dcbt(DisasContext *ctx)
5196 {
5197     /*
5198      * interpreted as no-op
5199      * XXX: specification say this is treated as a load by the MMU but
5200      *      does not generate any exception
5201      */
5202 }
5203 
5204 /* dcbtep */
5205 static void gen_dcbtep(DisasContext *ctx)
5206 {
5207     /*
5208      * interpreted as no-op
5209      * XXX: specification say this is treated as a load by the MMU but
5210      *      does not generate any exception
5211      */
5212 }
5213 
5214 /* dcbtst */
5215 static void gen_dcbtst(DisasContext *ctx)
5216 {
5217     /*
5218      * interpreted as no-op
5219      * XXX: specification say this is treated as a load by the MMU but
5220      *      does not generate any exception
5221      */
5222 }
5223 
5224 /* dcbtstep */
5225 static void gen_dcbtstep(DisasContext *ctx)
5226 {
5227     /*
5228      * interpreted as no-op
5229      * XXX: specification say this is treated as a load by the MMU but
5230      *      does not generate any exception
5231      */
5232 }
5233 
5234 /* dcbtls */
5235 static void gen_dcbtls(DisasContext *ctx)
5236 {
5237     /* Always fails locking the cache */
5238     TCGv t0 = tcg_temp_new();
5239     gen_load_spr(t0, SPR_Exxx_L1CSR0);
5240     tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
5241     gen_store_spr(SPR_Exxx_L1CSR0, t0);
5242     tcg_temp_free(t0);
5243 }
5244 
5245 /* dcbz */
5246 static void gen_dcbz(DisasContext *ctx)
5247 {
5248     TCGv tcgv_addr;
5249     TCGv_i32 tcgv_op;
5250 
5251     gen_set_access_type(ctx, ACCESS_CACHE);
5252     tcgv_addr = tcg_temp_new();
5253     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5254     gen_addr_reg_index(ctx, tcgv_addr);
5255     gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
5256     tcg_temp_free(tcgv_addr);
5257     tcg_temp_free_i32(tcgv_op);
5258 }
5259 
5260 /* dcbzep */
5261 static void gen_dcbzep(DisasContext *ctx)
5262 {
5263     TCGv tcgv_addr;
5264     TCGv_i32 tcgv_op;
5265 
5266     gen_set_access_type(ctx, ACCESS_CACHE);
5267     tcgv_addr = tcg_temp_new();
5268     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5269     gen_addr_reg_index(ctx, tcgv_addr);
5270     gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5271     tcg_temp_free(tcgv_addr);
5272     tcg_temp_free_i32(tcgv_op);
5273 }
5274 
5275 /* dst / dstt */
5276 static void gen_dst(DisasContext *ctx)
5277 {
5278     if (rA(ctx->opcode) == 0) {
5279         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5280     } else {
5281         /* interpreted as no-op */
5282     }
5283 }
5284 
5285 /* dstst /dststt */
5286 static void gen_dstst(DisasContext *ctx)
5287 {
5288     if (rA(ctx->opcode) == 0) {
5289         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5290     } else {
5291         /* interpreted as no-op */
5292     }
5293 
5294 }
5295 
5296 /* dss / dssall */
5297 static void gen_dss(DisasContext *ctx)
5298 {
5299     /* interpreted as no-op */
5300 }
5301 
5302 /* icbi */
5303 static void gen_icbi(DisasContext *ctx)
5304 {
5305     TCGv t0;
5306     gen_set_access_type(ctx, ACCESS_CACHE);
5307     t0 = tcg_temp_new();
5308     gen_addr_reg_index(ctx, t0);
5309     gen_helper_icbi(cpu_env, t0);
5310     tcg_temp_free(t0);
5311 }
5312 
5313 /* icbiep */
5314 static void gen_icbiep(DisasContext *ctx)
5315 {
5316     TCGv t0;
5317     gen_set_access_type(ctx, ACCESS_CACHE);
5318     t0 = tcg_temp_new();
5319     gen_addr_reg_index(ctx, t0);
5320     gen_helper_icbiep(cpu_env, t0);
5321     tcg_temp_free(t0);
5322 }
5323 
5324 /* Optional: */
5325 /* dcba */
5326 static void gen_dcba(DisasContext *ctx)
5327 {
5328     /*
5329      * interpreted as no-op
5330      * XXX: specification say this is treated as a store by the MMU
5331      *      but does not generate any exception
5332      */
5333 }
5334 
5335 /***                    Segment register manipulation                      ***/
5336 /* Supervisor only: */
5337 
5338 /* mfsr */
5339 static void gen_mfsr(DisasContext *ctx)
5340 {
5341 #if defined(CONFIG_USER_ONLY)
5342     GEN_PRIV(ctx);
5343 #else
5344     TCGv t0;
5345 
5346     CHK_SV(ctx);
5347     t0 = tcg_const_tl(SR(ctx->opcode));
5348     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5349     tcg_temp_free(t0);
5350 #endif /* defined(CONFIG_USER_ONLY) */
5351 }
5352 
5353 /* mfsrin */
5354 static void gen_mfsrin(DisasContext *ctx)
5355 {
5356 #if defined(CONFIG_USER_ONLY)
5357     GEN_PRIV(ctx);
5358 #else
5359     TCGv t0;
5360 
5361     CHK_SV(ctx);
5362     t0 = tcg_temp_new();
5363     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5364     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5365     tcg_temp_free(t0);
5366 #endif /* defined(CONFIG_USER_ONLY) */
5367 }
5368 
5369 /* mtsr */
5370 static void gen_mtsr(DisasContext *ctx)
5371 {
5372 #if defined(CONFIG_USER_ONLY)
5373     GEN_PRIV(ctx);
5374 #else
5375     TCGv t0;
5376 
5377     CHK_SV(ctx);
5378     t0 = tcg_const_tl(SR(ctx->opcode));
5379     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5380     tcg_temp_free(t0);
5381 #endif /* defined(CONFIG_USER_ONLY) */
5382 }
5383 
5384 /* mtsrin */
5385 static void gen_mtsrin(DisasContext *ctx)
5386 {
5387 #if defined(CONFIG_USER_ONLY)
5388     GEN_PRIV(ctx);
5389 #else
5390     TCGv t0;
5391     CHK_SV(ctx);
5392 
5393     t0 = tcg_temp_new();
5394     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5395     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5396     tcg_temp_free(t0);
5397 #endif /* defined(CONFIG_USER_ONLY) */
5398 }
5399 
5400 #if defined(TARGET_PPC64)
5401 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5402 
5403 /* mfsr */
5404 static void gen_mfsr_64b(DisasContext *ctx)
5405 {
5406 #if defined(CONFIG_USER_ONLY)
5407     GEN_PRIV(ctx);
5408 #else
5409     TCGv t0;
5410 
5411     CHK_SV(ctx);
5412     t0 = tcg_const_tl(SR(ctx->opcode));
5413     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5414     tcg_temp_free(t0);
5415 #endif /* defined(CONFIG_USER_ONLY) */
5416 }
5417 
5418 /* mfsrin */
5419 static void gen_mfsrin_64b(DisasContext *ctx)
5420 {
5421 #if defined(CONFIG_USER_ONLY)
5422     GEN_PRIV(ctx);
5423 #else
5424     TCGv t0;
5425 
5426     CHK_SV(ctx);
5427     t0 = tcg_temp_new();
5428     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5429     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5430     tcg_temp_free(t0);
5431 #endif /* defined(CONFIG_USER_ONLY) */
5432 }
5433 
5434 /* mtsr */
5435 static void gen_mtsr_64b(DisasContext *ctx)
5436 {
5437 #if defined(CONFIG_USER_ONLY)
5438     GEN_PRIV(ctx);
5439 #else
5440     TCGv t0;
5441 
5442     CHK_SV(ctx);
5443     t0 = tcg_const_tl(SR(ctx->opcode));
5444     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5445     tcg_temp_free(t0);
5446 #endif /* defined(CONFIG_USER_ONLY) */
5447 }
5448 
5449 /* mtsrin */
5450 static void gen_mtsrin_64b(DisasContext *ctx)
5451 {
5452 #if defined(CONFIG_USER_ONLY)
5453     GEN_PRIV(ctx);
5454 #else
5455     TCGv t0;
5456 
5457     CHK_SV(ctx);
5458     t0 = tcg_temp_new();
5459     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5460     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5461     tcg_temp_free(t0);
5462 #endif /* defined(CONFIG_USER_ONLY) */
5463 }
5464 
5465 #endif /* defined(TARGET_PPC64) */
5466 
5467 /***                      Lookaside buffer management                      ***/
5468 /* Optional & supervisor only: */
5469 
5470 /* tlbia */
5471 static void gen_tlbia(DisasContext *ctx)
5472 {
5473 #if defined(CONFIG_USER_ONLY)
5474     GEN_PRIV(ctx);
5475 #else
5476     CHK_HV(ctx);
5477 
5478     gen_helper_tlbia(cpu_env);
5479 #endif  /* defined(CONFIG_USER_ONLY) */
5480 }
5481 
5482 /* tlbsync */
5483 static void gen_tlbsync(DisasContext *ctx)
5484 {
5485 #if defined(CONFIG_USER_ONLY)
5486     GEN_PRIV(ctx);
5487 #else
5488 
5489     if (ctx->gtse) {
5490         CHK_SV(ctx); /* If gtse is set then tlbsync is supervisor privileged */
5491     } else {
5492         CHK_HV(ctx); /* Else hypervisor privileged */
5493     }
5494 
5495     /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
5496     if (ctx->insns_flags & PPC_BOOKE) {
5497         gen_check_tlb_flush(ctx, true);
5498     }
5499 #endif /* defined(CONFIG_USER_ONLY) */
5500 }
5501 
5502 /***                              External control                         ***/
5503 /* Optional: */
5504 
5505 /* eciwx */
5506 static void gen_eciwx(DisasContext *ctx)
5507 {
5508     TCGv t0;
5509     /* Should check EAR[E] ! */
5510     gen_set_access_type(ctx, ACCESS_EXT);
5511     t0 = tcg_temp_new();
5512     gen_addr_reg_index(ctx, t0);
5513     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5514                        DEF_MEMOP(MO_UL | MO_ALIGN));
5515     tcg_temp_free(t0);
5516 }
5517 
5518 /* ecowx */
5519 static void gen_ecowx(DisasContext *ctx)
5520 {
5521     TCGv t0;
5522     /* Should check EAR[E] ! */
5523     gen_set_access_type(ctx, ACCESS_EXT);
5524     t0 = tcg_temp_new();
5525     gen_addr_reg_index(ctx, t0);
5526     tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5527                        DEF_MEMOP(MO_UL | MO_ALIGN));
5528     tcg_temp_free(t0);
5529 }
5530 
5531 /* 602 - 603 - G2 TLB management */
5532 
5533 /* tlbld */
5534 static void gen_tlbld_6xx(DisasContext *ctx)
5535 {
5536 #if defined(CONFIG_USER_ONLY)
5537     GEN_PRIV(ctx);
5538 #else
5539     CHK_SV(ctx);
5540     gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5541 #endif /* defined(CONFIG_USER_ONLY) */
5542 }
5543 
5544 /* tlbli */
5545 static void gen_tlbli_6xx(DisasContext *ctx)
5546 {
5547 #if defined(CONFIG_USER_ONLY)
5548     GEN_PRIV(ctx);
5549 #else
5550     CHK_SV(ctx);
5551     gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5552 #endif /* defined(CONFIG_USER_ONLY) */
5553 }
5554 
5555 /* BookE specific instructions */
5556 
5557 /* XXX: not implemented on 440 ? */
5558 static void gen_mfapidi(DisasContext *ctx)
5559 {
5560     /* XXX: TODO */
5561     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5562 }
5563 
5564 /* XXX: not implemented on 440 ? */
5565 static void gen_tlbiva(DisasContext *ctx)
5566 {
5567 #if defined(CONFIG_USER_ONLY)
5568     GEN_PRIV(ctx);
5569 #else
5570     TCGv t0;
5571 
5572     CHK_SV(ctx);
5573     t0 = tcg_temp_new();
5574     gen_addr_reg_index(ctx, t0);
5575     gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5576     tcg_temp_free(t0);
5577 #endif /* defined(CONFIG_USER_ONLY) */
5578 }
5579 
5580 /* All 405 MAC instructions are translated here */
5581 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5582                                         int ra, int rb, int rt, int Rc)
5583 {
5584     TCGv t0, t1;
5585 
5586     t0 = tcg_temp_local_new();
5587     t1 = tcg_temp_local_new();
5588 
5589     switch (opc3 & 0x0D) {
5590     case 0x05:
5591         /* macchw    - macchw.    - macchwo   - macchwo.   */
5592         /* macchws   - macchws.   - macchwso  - macchwso.  */
5593         /* nmacchw   - nmacchw.   - nmacchwo  - nmacchwo.  */
5594         /* nmacchws  - nmacchws.  - nmacchwso - nmacchwso. */
5595         /* mulchw - mulchw. */
5596         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5597         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5598         tcg_gen_ext16s_tl(t1, t1);
5599         break;
5600     case 0x04:
5601         /* macchwu   - macchwu.   - macchwuo  - macchwuo.  */
5602         /* macchwsu  - macchwsu.  - macchwsuo - macchwsuo. */
5603         /* mulchwu - mulchwu. */
5604         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5605         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5606         tcg_gen_ext16u_tl(t1, t1);
5607         break;
5608     case 0x01:
5609         /* machhw    - machhw.    - machhwo   - machhwo.   */
5610         /* machhws   - machhws.   - machhwso  - machhwso.  */
5611         /* nmachhw   - nmachhw.   - nmachhwo  - nmachhwo.  */
5612         /* nmachhws  - nmachhws.  - nmachhwso - nmachhwso. */
5613         /* mulhhw - mulhhw. */
5614         tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5615         tcg_gen_ext16s_tl(t0, t0);
5616         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5617         tcg_gen_ext16s_tl(t1, t1);
5618         break;
5619     case 0x00:
5620         /* machhwu   - machhwu.   - machhwuo  - machhwuo.  */
5621         /* machhwsu  - machhwsu.  - machhwsuo - machhwsuo. */
5622         /* mulhhwu - mulhhwu. */
5623         tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5624         tcg_gen_ext16u_tl(t0, t0);
5625         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5626         tcg_gen_ext16u_tl(t1, t1);
5627         break;
5628     case 0x0D:
5629         /* maclhw    - maclhw.    - maclhwo   - maclhwo.   */
5630         /* maclhws   - maclhws.   - maclhwso  - maclhwso.  */
5631         /* nmaclhw   - nmaclhw.   - nmaclhwo  - nmaclhwo.  */
5632         /* nmaclhws  - nmaclhws.  - nmaclhwso - nmaclhwso. */
5633         /* mullhw - mullhw. */
5634         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5635         tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5636         break;
5637     case 0x0C:
5638         /* maclhwu   - maclhwu.   - maclhwuo  - maclhwuo.  */
5639         /* maclhwsu  - maclhwsu.  - maclhwsuo - maclhwsuo. */
5640         /* mullhwu - mullhwu. */
5641         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5642         tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5643         break;
5644     }
5645     if (opc2 & 0x04) {
5646         /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5647         tcg_gen_mul_tl(t1, t0, t1);
5648         if (opc2 & 0x02) {
5649             /* nmultiply-and-accumulate (0x0E) */
5650             tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5651         } else {
5652             /* multiply-and-accumulate (0x0C) */
5653             tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5654         }
5655 
5656         if (opc3 & 0x12) {
5657             /* Check overflow and/or saturate */
5658             TCGLabel *l1 = gen_new_label();
5659 
5660             if (opc3 & 0x10) {
5661                 /* Start with XER OV disabled, the most likely case */
5662                 tcg_gen_movi_tl(cpu_ov, 0);
5663             }
5664             if (opc3 & 0x01) {
5665                 /* Signed */
5666                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5667                 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5668                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5669                 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5670                 if (opc3 & 0x02) {
5671                     /* Saturate */
5672                     tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5673                     tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5674                 }
5675             } else {
5676                 /* Unsigned */
5677                 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5678                 if (opc3 & 0x02) {
5679                     /* Saturate */
5680                     tcg_gen_movi_tl(t0, UINT32_MAX);
5681                 }
5682             }
5683             if (opc3 & 0x10) {
5684                 /* Check overflow */
5685                 tcg_gen_movi_tl(cpu_ov, 1);
5686                 tcg_gen_movi_tl(cpu_so, 1);
5687             }
5688             gen_set_label(l1);
5689             tcg_gen_mov_tl(cpu_gpr[rt], t0);
5690         }
5691     } else {
5692         tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5693     }
5694     tcg_temp_free(t0);
5695     tcg_temp_free(t1);
5696     if (unlikely(Rc) != 0) {
5697         /* Update Rc0 */
5698         gen_set_Rc0(ctx, cpu_gpr[rt]);
5699     }
5700 }
5701 
5702 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
5703 static void glue(gen_, name)(DisasContext *ctx)                               \
5704 {                                                                             \
5705     gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode),   \
5706                          rD(ctx->opcode), Rc(ctx->opcode));                   \
5707 }
5708 
5709 /* macchw    - macchw.    */
5710 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5711 /* macchwo   - macchwo.   */
5712 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5713 /* macchws   - macchws.   */
5714 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5715 /* macchwso  - macchwso.  */
5716 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5717 /* macchwsu  - macchwsu.  */
5718 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5719 /* macchwsuo - macchwsuo. */
5720 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5721 /* macchwu   - macchwu.   */
5722 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5723 /* macchwuo  - macchwuo.  */
5724 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5725 /* machhw    - machhw.    */
5726 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5727 /* machhwo   - machhwo.   */
5728 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5729 /* machhws   - machhws.   */
5730 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5731 /* machhwso  - machhwso.  */
5732 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5733 /* machhwsu  - machhwsu.  */
5734 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5735 /* machhwsuo - machhwsuo. */
5736 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5737 /* machhwu   - machhwu.   */
5738 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5739 /* machhwuo  - machhwuo.  */
5740 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5741 /* maclhw    - maclhw.    */
5742 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5743 /* maclhwo   - maclhwo.   */
5744 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5745 /* maclhws   - maclhws.   */
5746 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5747 /* maclhwso  - maclhwso.  */
5748 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5749 /* maclhwu   - maclhwu.   */
5750 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5751 /* maclhwuo  - maclhwuo.  */
5752 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5753 /* maclhwsu  - maclhwsu.  */
5754 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5755 /* maclhwsuo - maclhwsuo. */
5756 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5757 /* nmacchw   - nmacchw.   */
5758 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5759 /* nmacchwo  - nmacchwo.  */
5760 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5761 /* nmacchws  - nmacchws.  */
5762 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5763 /* nmacchwso - nmacchwso. */
5764 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5765 /* nmachhw   - nmachhw.   */
5766 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5767 /* nmachhwo  - nmachhwo.  */
5768 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5769 /* nmachhws  - nmachhws.  */
5770 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5771 /* nmachhwso - nmachhwso. */
5772 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5773 /* nmaclhw   - nmaclhw.   */
5774 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5775 /* nmaclhwo  - nmaclhwo.  */
5776 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5777 /* nmaclhws  - nmaclhws.  */
5778 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5779 /* nmaclhwso - nmaclhwso. */
5780 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5781 
5782 /* mulchw  - mulchw.  */
5783 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5784 /* mulchwu - mulchwu. */
5785 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5786 /* mulhhw  - mulhhw.  */
5787 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5788 /* mulhhwu - mulhhwu. */
5789 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5790 /* mullhw  - mullhw.  */
5791 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5792 /* mullhwu - mullhwu. */
5793 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5794 
5795 /* mfdcr */
5796 static void gen_mfdcr(DisasContext *ctx)
5797 {
5798 #if defined(CONFIG_USER_ONLY)
5799     GEN_PRIV(ctx);
5800 #else
5801     TCGv dcrn;
5802 
5803     CHK_SV(ctx);
5804     dcrn = tcg_const_tl(SPR(ctx->opcode));
5805     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5806     tcg_temp_free(dcrn);
5807 #endif /* defined(CONFIG_USER_ONLY) */
5808 }
5809 
5810 /* mtdcr */
5811 static void gen_mtdcr(DisasContext *ctx)
5812 {
5813 #if defined(CONFIG_USER_ONLY)
5814     GEN_PRIV(ctx);
5815 #else
5816     TCGv dcrn;
5817 
5818     CHK_SV(ctx);
5819     dcrn = tcg_const_tl(SPR(ctx->opcode));
5820     gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5821     tcg_temp_free(dcrn);
5822 #endif /* defined(CONFIG_USER_ONLY) */
5823 }
5824 
5825 /* mfdcrx */
5826 /* XXX: not implemented on 440 ? */
5827 static void gen_mfdcrx(DisasContext *ctx)
5828 {
5829 #if defined(CONFIG_USER_ONLY)
5830     GEN_PRIV(ctx);
5831 #else
5832     CHK_SV(ctx);
5833     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5834                         cpu_gpr[rA(ctx->opcode)]);
5835     /* Note: Rc update flag set leads to undefined state of Rc0 */
5836 #endif /* defined(CONFIG_USER_ONLY) */
5837 }
5838 
5839 /* mtdcrx */
5840 /* XXX: not implemented on 440 ? */
5841 static void gen_mtdcrx(DisasContext *ctx)
5842 {
5843 #if defined(CONFIG_USER_ONLY)
5844     GEN_PRIV(ctx);
5845 #else
5846     CHK_SV(ctx);
5847     gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5848                          cpu_gpr[rS(ctx->opcode)]);
5849     /* Note: Rc update flag set leads to undefined state of Rc0 */
5850 #endif /* defined(CONFIG_USER_ONLY) */
5851 }
5852 
5853 /* dccci */
5854 static void gen_dccci(DisasContext *ctx)
5855 {
5856     CHK_SV(ctx);
5857     /* interpreted as no-op */
5858 }
5859 
5860 /* dcread */
5861 static void gen_dcread(DisasContext *ctx)
5862 {
5863 #if defined(CONFIG_USER_ONLY)
5864     GEN_PRIV(ctx);
5865 #else
5866     TCGv EA, val;
5867 
5868     CHK_SV(ctx);
5869     gen_set_access_type(ctx, ACCESS_CACHE);
5870     EA = tcg_temp_new();
5871     gen_addr_reg_index(ctx, EA);
5872     val = tcg_temp_new();
5873     gen_qemu_ld32u(ctx, val, EA);
5874     tcg_temp_free(val);
5875     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5876     tcg_temp_free(EA);
5877 #endif /* defined(CONFIG_USER_ONLY) */
5878 }
5879 
5880 /* icbt */
5881 static void gen_icbt_40x(DisasContext *ctx)
5882 {
5883     /*
5884      * interpreted as no-op
5885      * XXX: specification say this is treated as a load by the MMU but
5886      *      does not generate any exception
5887      */
5888 }
5889 
5890 /* iccci */
5891 static void gen_iccci(DisasContext *ctx)
5892 {
5893     CHK_SV(ctx);
5894     /* interpreted as no-op */
5895 }
5896 
5897 /* icread */
5898 static void gen_icread(DisasContext *ctx)
5899 {
5900     CHK_SV(ctx);
5901     /* interpreted as no-op */
5902 }
5903 
5904 /* rfci (supervisor only) */
5905 static void gen_rfci_40x(DisasContext *ctx)
5906 {
5907 #if defined(CONFIG_USER_ONLY)
5908     GEN_PRIV(ctx);
5909 #else
5910     CHK_SV(ctx);
5911     /* Restore CPU state */
5912     gen_helper_40x_rfci(cpu_env);
5913     ctx->base.is_jmp = DISAS_EXIT;
5914 #endif /* defined(CONFIG_USER_ONLY) */
5915 }
5916 
5917 static void gen_rfci(DisasContext *ctx)
5918 {
5919 #if defined(CONFIG_USER_ONLY)
5920     GEN_PRIV(ctx);
5921 #else
5922     CHK_SV(ctx);
5923     /* Restore CPU state */
5924     gen_helper_rfci(cpu_env);
5925     ctx->base.is_jmp = DISAS_EXIT;
5926 #endif /* defined(CONFIG_USER_ONLY) */
5927 }
5928 
5929 /* BookE specific */
5930 
5931 /* XXX: not implemented on 440 ? */
5932 static void gen_rfdi(DisasContext *ctx)
5933 {
5934 #if defined(CONFIG_USER_ONLY)
5935     GEN_PRIV(ctx);
5936 #else
5937     CHK_SV(ctx);
5938     /* Restore CPU state */
5939     gen_helper_rfdi(cpu_env);
5940     ctx->base.is_jmp = DISAS_EXIT;
5941 #endif /* defined(CONFIG_USER_ONLY) */
5942 }
5943 
5944 /* XXX: not implemented on 440 ? */
5945 static void gen_rfmci(DisasContext *ctx)
5946 {
5947 #if defined(CONFIG_USER_ONLY)
5948     GEN_PRIV(ctx);
5949 #else
5950     CHK_SV(ctx);
5951     /* Restore CPU state */
5952     gen_helper_rfmci(cpu_env);
5953     ctx->base.is_jmp = DISAS_EXIT;
5954 #endif /* defined(CONFIG_USER_ONLY) */
5955 }
5956 
5957 /* TLB management - PowerPC 405 implementation */
5958 
5959 /* tlbre */
5960 static void gen_tlbre_40x(DisasContext *ctx)
5961 {
5962 #if defined(CONFIG_USER_ONLY)
5963     GEN_PRIV(ctx);
5964 #else
5965     CHK_SV(ctx);
5966     switch (rB(ctx->opcode)) {
5967     case 0:
5968         gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
5969                                 cpu_gpr[rA(ctx->opcode)]);
5970         break;
5971     case 1:
5972         gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
5973                                 cpu_gpr[rA(ctx->opcode)]);
5974         break;
5975     default:
5976         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5977         break;
5978     }
5979 #endif /* defined(CONFIG_USER_ONLY) */
5980 }
5981 
5982 /* tlbsx - tlbsx. */
5983 static void gen_tlbsx_40x(DisasContext *ctx)
5984 {
5985 #if defined(CONFIG_USER_ONLY)
5986     GEN_PRIV(ctx);
5987 #else
5988     TCGv t0;
5989 
5990     CHK_SV(ctx);
5991     t0 = tcg_temp_new();
5992     gen_addr_reg_index(ctx, t0);
5993     gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5994     tcg_temp_free(t0);
5995     if (Rc(ctx->opcode)) {
5996         TCGLabel *l1 = gen_new_label();
5997         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5998         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5999         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6000         gen_set_label(l1);
6001     }
6002 #endif /* defined(CONFIG_USER_ONLY) */
6003 }
6004 
6005 /* tlbwe */
6006 static void gen_tlbwe_40x(DisasContext *ctx)
6007 {
6008 #if defined(CONFIG_USER_ONLY)
6009     GEN_PRIV(ctx);
6010 #else
6011     CHK_SV(ctx);
6012 
6013     switch (rB(ctx->opcode)) {
6014     case 0:
6015         gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
6016                                 cpu_gpr[rS(ctx->opcode)]);
6017         break;
6018     case 1:
6019         gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
6020                                 cpu_gpr[rS(ctx->opcode)]);
6021         break;
6022     default:
6023         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6024         break;
6025     }
6026 #endif /* defined(CONFIG_USER_ONLY) */
6027 }
6028 
6029 /* TLB management - PowerPC 440 implementation */
6030 
6031 /* tlbre */
6032 static void gen_tlbre_440(DisasContext *ctx)
6033 {
6034 #if defined(CONFIG_USER_ONLY)
6035     GEN_PRIV(ctx);
6036 #else
6037     CHK_SV(ctx);
6038 
6039     switch (rB(ctx->opcode)) {
6040     case 0:
6041     case 1:
6042     case 2:
6043         {
6044             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6045             gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
6046                                  t0, cpu_gpr[rA(ctx->opcode)]);
6047             tcg_temp_free_i32(t0);
6048         }
6049         break;
6050     default:
6051         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6052         break;
6053     }
6054 #endif /* defined(CONFIG_USER_ONLY) */
6055 }
6056 
6057 /* tlbsx - tlbsx. */
6058 static void gen_tlbsx_440(DisasContext *ctx)
6059 {
6060 #if defined(CONFIG_USER_ONLY)
6061     GEN_PRIV(ctx);
6062 #else
6063     TCGv t0;
6064 
6065     CHK_SV(ctx);
6066     t0 = tcg_temp_new();
6067     gen_addr_reg_index(ctx, t0);
6068     gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6069     tcg_temp_free(t0);
6070     if (Rc(ctx->opcode)) {
6071         TCGLabel *l1 = gen_new_label();
6072         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6073         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6074         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6075         gen_set_label(l1);
6076     }
6077 #endif /* defined(CONFIG_USER_ONLY) */
6078 }
6079 
6080 /* tlbwe */
6081 static void gen_tlbwe_440(DisasContext *ctx)
6082 {
6083 #if defined(CONFIG_USER_ONLY)
6084     GEN_PRIV(ctx);
6085 #else
6086     CHK_SV(ctx);
6087     switch (rB(ctx->opcode)) {
6088     case 0:
6089     case 1:
6090     case 2:
6091         {
6092             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6093             gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
6094                                  cpu_gpr[rS(ctx->opcode)]);
6095             tcg_temp_free_i32(t0);
6096         }
6097         break;
6098     default:
6099         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6100         break;
6101     }
6102 #endif /* defined(CONFIG_USER_ONLY) */
6103 }
6104 
6105 /* TLB management - PowerPC BookE 2.06 implementation */
6106 
6107 /* tlbre */
6108 static void gen_tlbre_booke206(DisasContext *ctx)
6109 {
6110  #if defined(CONFIG_USER_ONLY)
6111     GEN_PRIV(ctx);
6112 #else
6113    CHK_SV(ctx);
6114     gen_helper_booke206_tlbre(cpu_env);
6115 #endif /* defined(CONFIG_USER_ONLY) */
6116 }
6117 
6118 /* tlbsx - tlbsx. */
6119 static void gen_tlbsx_booke206(DisasContext *ctx)
6120 {
6121 #if defined(CONFIG_USER_ONLY)
6122     GEN_PRIV(ctx);
6123 #else
6124     TCGv t0;
6125 
6126     CHK_SV(ctx);
6127     if (rA(ctx->opcode)) {
6128         t0 = tcg_temp_new();
6129         tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6130     } else {
6131         t0 = tcg_const_tl(0);
6132     }
6133 
6134     tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6135     gen_helper_booke206_tlbsx(cpu_env, t0);
6136     tcg_temp_free(t0);
6137 #endif /* defined(CONFIG_USER_ONLY) */
6138 }
6139 
6140 /* tlbwe */
6141 static void gen_tlbwe_booke206(DisasContext *ctx)
6142 {
6143 #if defined(CONFIG_USER_ONLY)
6144     GEN_PRIV(ctx);
6145 #else
6146     CHK_SV(ctx);
6147     gen_helper_booke206_tlbwe(cpu_env);
6148 #endif /* defined(CONFIG_USER_ONLY) */
6149 }
6150 
6151 static void gen_tlbivax_booke206(DisasContext *ctx)
6152 {
6153 #if defined(CONFIG_USER_ONLY)
6154     GEN_PRIV(ctx);
6155 #else
6156     TCGv t0;
6157 
6158     CHK_SV(ctx);
6159     t0 = tcg_temp_new();
6160     gen_addr_reg_index(ctx, t0);
6161     gen_helper_booke206_tlbivax(cpu_env, t0);
6162     tcg_temp_free(t0);
6163 #endif /* defined(CONFIG_USER_ONLY) */
6164 }
6165 
6166 static void gen_tlbilx_booke206(DisasContext *ctx)
6167 {
6168 #if defined(CONFIG_USER_ONLY)
6169     GEN_PRIV(ctx);
6170 #else
6171     TCGv t0;
6172 
6173     CHK_SV(ctx);
6174     t0 = tcg_temp_new();
6175     gen_addr_reg_index(ctx, t0);
6176 
6177     switch ((ctx->opcode >> 21) & 0x3) {
6178     case 0:
6179         gen_helper_booke206_tlbilx0(cpu_env, t0);
6180         break;
6181     case 1:
6182         gen_helper_booke206_tlbilx1(cpu_env, t0);
6183         break;
6184     case 3:
6185         gen_helper_booke206_tlbilx3(cpu_env, t0);
6186         break;
6187     default:
6188         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6189         break;
6190     }
6191 
6192     tcg_temp_free(t0);
6193 #endif /* defined(CONFIG_USER_ONLY) */
6194 }
6195 
6196 /* wrtee */
6197 static void gen_wrtee(DisasContext *ctx)
6198 {
6199 #if defined(CONFIG_USER_ONLY)
6200     GEN_PRIV(ctx);
6201 #else
6202     TCGv t0;
6203 
6204     CHK_SV(ctx);
6205     t0 = tcg_temp_new();
6206     tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6207     tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6208     tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6209     gen_ppc_maybe_interrupt(ctx);
6210     tcg_temp_free(t0);
6211     /*
6212      * Stop translation to have a chance to raise an exception if we
6213      * just set msr_ee to 1
6214      */
6215     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6216 #endif /* defined(CONFIG_USER_ONLY) */
6217 }
6218 
6219 /* wrteei */
6220 static void gen_wrteei(DisasContext *ctx)
6221 {
6222 #if defined(CONFIG_USER_ONLY)
6223     GEN_PRIV(ctx);
6224 #else
6225     CHK_SV(ctx);
6226     if (ctx->opcode & 0x00008000) {
6227         tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6228         gen_ppc_maybe_interrupt(ctx);
6229         /* Stop translation to have a chance to raise an exception */
6230         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6231     } else {
6232         tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6233     }
6234 #endif /* defined(CONFIG_USER_ONLY) */
6235 }
6236 
6237 /* PowerPC 440 specific instructions */
6238 
6239 /* dlmzb */
6240 static void gen_dlmzb(DisasContext *ctx)
6241 {
6242     TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6243     gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
6244                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
6245     tcg_temp_free_i32(t0);
6246 }
6247 
6248 /* mbar replaces eieio on 440 */
6249 static void gen_mbar(DisasContext *ctx)
6250 {
6251     /* interpreted as no-op */
6252 }
6253 
6254 /* msync replaces sync on 440 */
6255 static void gen_msync_4xx(DisasContext *ctx)
6256 {
6257     /* Only e500 seems to treat reserved bits as invalid */
6258     if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
6259         (ctx->opcode & 0x03FFF801)) {
6260         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6261     }
6262     /* otherwise interpreted as no-op */
6263 }
6264 
6265 /* icbt */
6266 static void gen_icbt_440(DisasContext *ctx)
6267 {
6268     /*
6269      * interpreted as no-op
6270      * XXX: specification say this is treated as a load by the MMU but
6271      *      does not generate any exception
6272      */
6273 }
6274 
6275 #if defined(TARGET_PPC64)
6276 static void gen_maddld(DisasContext *ctx)
6277 {
6278     TCGv_i64 t1 = tcg_temp_new_i64();
6279 
6280     tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6281     tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
6282     tcg_temp_free_i64(t1);
6283 }
6284 
6285 /* maddhd maddhdu */
6286 static void gen_maddhd_maddhdu(DisasContext *ctx)
6287 {
6288     TCGv_i64 lo = tcg_temp_new_i64();
6289     TCGv_i64 hi = tcg_temp_new_i64();
6290     TCGv_i64 t1 = tcg_temp_new_i64();
6291 
6292     if (Rc(ctx->opcode)) {
6293         tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6294                           cpu_gpr[rB(ctx->opcode)]);
6295         tcg_gen_movi_i64(t1, 0);
6296     } else {
6297         tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6298                           cpu_gpr[rB(ctx->opcode)]);
6299         tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
6300     }
6301     tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
6302                      cpu_gpr[rC(ctx->opcode)], t1);
6303     tcg_temp_free_i64(lo);
6304     tcg_temp_free_i64(hi);
6305     tcg_temp_free_i64(t1);
6306 }
6307 #endif /* defined(TARGET_PPC64) */
6308 
6309 static void gen_tbegin(DisasContext *ctx)
6310 {
6311     if (unlikely(!ctx->tm_enabled)) {
6312         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6313         return;
6314     }
6315     gen_helper_tbegin(cpu_env);
6316 }
6317 
6318 #define GEN_TM_NOOP(name)                                      \
6319 static inline void gen_##name(DisasContext *ctx)               \
6320 {                                                              \
6321     if (unlikely(!ctx->tm_enabled)) {                          \
6322         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6323         return;                                                \
6324     }                                                          \
6325     /*                                                         \
6326      * Because tbegin always fails in QEMU, these user         \
6327      * space instructions all have a simple implementation:    \
6328      *                                                         \
6329      *     CR[0] = 0b0 || MSR[TS] || 0b0                       \
6330      *           = 0b0 || 0b00    || 0b0                       \
6331      */                                                        \
6332     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6333 }
6334 
6335 GEN_TM_NOOP(tend);
6336 GEN_TM_NOOP(tabort);
6337 GEN_TM_NOOP(tabortwc);
6338 GEN_TM_NOOP(tabortwci);
6339 GEN_TM_NOOP(tabortdc);
6340 GEN_TM_NOOP(tabortdci);
6341 GEN_TM_NOOP(tsr);
6342 
6343 static inline void gen_cp_abort(DisasContext *ctx)
6344 {
6345     /* Do Nothing */
6346 }
6347 
6348 #define GEN_CP_PASTE_NOOP(name)                           \
6349 static inline void gen_##name(DisasContext *ctx)          \
6350 {                                                         \
6351     /*                                                    \
6352      * Generate invalid exception until we have an        \
6353      * implementation of the copy paste facility          \
6354      */                                                   \
6355     gen_invalid(ctx);                                     \
6356 }
6357 
6358 GEN_CP_PASTE_NOOP(copy)
6359 GEN_CP_PASTE_NOOP(paste)
6360 
6361 static void gen_tcheck(DisasContext *ctx)
6362 {
6363     if (unlikely(!ctx->tm_enabled)) {
6364         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6365         return;
6366     }
6367     /*
6368      * Because tbegin always fails, the tcheck implementation is
6369      * simple:
6370      *
6371      * CR[CRF] = TDOOMED || MSR[TS] || 0b0
6372      *         = 0b1 || 0b00 || 0b0
6373      */
6374     tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
6375 }
6376 
6377 #if defined(CONFIG_USER_ONLY)
6378 #define GEN_TM_PRIV_NOOP(name)                                 \
6379 static inline void gen_##name(DisasContext *ctx)               \
6380 {                                                              \
6381     gen_priv_opc(ctx);                                         \
6382 }
6383 
6384 #else
6385 
6386 #define GEN_TM_PRIV_NOOP(name)                                 \
6387 static inline void gen_##name(DisasContext *ctx)               \
6388 {                                                              \
6389     CHK_SV(ctx);                                               \
6390     if (unlikely(!ctx->tm_enabled)) {                          \
6391         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6392         return;                                                \
6393     }                                                          \
6394     /*                                                         \
6395      * Because tbegin always fails, the implementation is      \
6396      * simple:                                                 \
6397      *                                                         \
6398      *   CR[0] = 0b0 || MSR[TS] || 0b0                         \
6399      *         = 0b0 || 0b00 | 0b0                             \
6400      */                                                        \
6401     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6402 }
6403 
6404 #endif
6405 
6406 GEN_TM_PRIV_NOOP(treclaim);
6407 GEN_TM_PRIV_NOOP(trechkpt);
6408 
6409 static inline void get_fpr(TCGv_i64 dst, int regno)
6410 {
6411     tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
6412 }
6413 
6414 static inline void set_fpr(int regno, TCGv_i64 src)
6415 {
6416     tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
6417     /*
6418      * Before PowerISA v3.1 the result of doubleword 1 of the VSR
6419      * corresponding to the target FPR was undefined. However,
6420      * most (if not all) real hardware were setting the result to 0.
6421      * Starting at ISA v3.1, the result for doubleword 1 is now defined
6422      * to be 0.
6423      */
6424     tcg_gen_st_i64(tcg_constant_i64(0), cpu_env, vsr64_offset(regno, false));
6425 }
6426 
6427 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
6428 {
6429     tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
6430 }
6431 
6432 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
6433 {
6434     tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
6435 }
6436 
6437 /*
6438  * Helpers for decodetree used by !function for decoding arguments.
6439  */
6440 static int times_2(DisasContext *ctx, int x)
6441 {
6442     return x * 2;
6443 }
6444 
6445 static int times_4(DisasContext *ctx, int x)
6446 {
6447     return x * 4;
6448 }
6449 
6450 static int times_16(DisasContext *ctx, int x)
6451 {
6452     return x * 16;
6453 }
6454 
6455 static int64_t dw_compose_ea(DisasContext *ctx, int x)
6456 {
6457     return deposit64(0xfffffffffffffe00, 3, 6, x);
6458 }
6459 
6460 /*
6461  * Helpers for trans_* functions to check for specific insns flags.
6462  * Use token pasting to ensure that we use the proper flag with the
6463  * proper variable.
6464  */
6465 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
6466     do {                                                \
6467         if (((CTX)->insns_flags & PPC_##NAME) == 0) {   \
6468             return false;                               \
6469         }                                               \
6470     } while (0)
6471 
6472 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
6473     do {                                                \
6474         if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
6475             return false;                               \
6476         }                                               \
6477     } while (0)
6478 
6479 /* Then special-case the check for 64-bit so that we elide code for ppc32. */
6480 #if TARGET_LONG_BITS == 32
6481 # define REQUIRE_64BIT(CTX)  return false
6482 #else
6483 # define REQUIRE_64BIT(CTX)  REQUIRE_INSNS_FLAGS(CTX, 64B)
6484 #endif
6485 
6486 #define REQUIRE_VECTOR(CTX)                             \
6487     do {                                                \
6488         if (unlikely(!(CTX)->altivec_enabled)) {        \
6489             gen_exception((CTX), POWERPC_EXCP_VPU);     \
6490             return true;                                \
6491         }                                               \
6492     } while (0)
6493 
6494 #define REQUIRE_VSX(CTX)                                \
6495     do {                                                \
6496         if (unlikely(!(CTX)->vsx_enabled)) {            \
6497             gen_exception((CTX), POWERPC_EXCP_VSXU);    \
6498             return true;                                \
6499         }                                               \
6500     } while (0)
6501 
6502 #define REQUIRE_FPU(ctx)                                \
6503     do {                                                \
6504         if (unlikely(!(ctx)->fpu_enabled)) {            \
6505             gen_exception((ctx), POWERPC_EXCP_FPU);     \
6506             return true;                                \
6507         }                                               \
6508     } while (0)
6509 
6510 #if !defined(CONFIG_USER_ONLY)
6511 #define REQUIRE_SV(CTX)             \
6512     do {                            \
6513         if (unlikely((CTX)->pr)) {  \
6514             gen_priv_opc(CTX);      \
6515             return true;            \
6516         }                           \
6517     } while (0)
6518 
6519 #define REQUIRE_HV(CTX)                             \
6520     do {                                            \
6521         if (unlikely((CTX)->pr || !(CTX)->hv)) {    \
6522             gen_priv_opc(CTX);                      \
6523             return true;                            \
6524         }                                           \
6525     } while (0)
6526 #else
6527 #define REQUIRE_SV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6528 #define REQUIRE_HV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6529 #endif
6530 
6531 /*
6532  * Helpers for implementing sets of trans_* functions.
6533  * Defer the implementation of NAME to FUNC, with optional extra arguments.
6534  */
6535 #define TRANS(NAME, FUNC, ...) \
6536     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6537     { return FUNC(ctx, a, __VA_ARGS__); }
6538 #define TRANS_FLAGS(FLAGS, NAME, FUNC, ...) \
6539     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6540     {                                                          \
6541         REQUIRE_INSNS_FLAGS(ctx, FLAGS);                       \
6542         return FUNC(ctx, a, __VA_ARGS__);                      \
6543     }
6544 #define TRANS_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6545     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6546     {                                                          \
6547         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6548         return FUNC(ctx, a, __VA_ARGS__);                      \
6549     }
6550 
6551 #define TRANS64(NAME, FUNC, ...) \
6552     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6553     { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
6554 #define TRANS64_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6555     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6556     {                                                          \
6557         REQUIRE_64BIT(ctx);                                    \
6558         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6559         return FUNC(ctx, a, __VA_ARGS__);                      \
6560     }
6561 
6562 /* TODO: More TRANS* helpers for extra insn_flags checks. */
6563 
6564 
6565 #include "decode-insn32.c.inc"
6566 #include "decode-insn64.c.inc"
6567 #include "power8-pmu-regs.c.inc"
6568 
6569 /*
6570  * Incorporate CIA into the constant when R=1.
6571  * Validate that when R=1, RA=0.
6572  */
6573 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
6574 {
6575     d->rt = a->rt;
6576     d->ra = a->ra;
6577     d->si = a->si;
6578     if (a->r) {
6579         if (unlikely(a->ra != 0)) {
6580             gen_invalid(ctx);
6581             return false;
6582         }
6583         d->si += ctx->cia;
6584     }
6585     return true;
6586 }
6587 
6588 #include "translate/fixedpoint-impl.c.inc"
6589 
6590 #include "translate/fp-impl.c.inc"
6591 
6592 #include "translate/vmx-impl.c.inc"
6593 
6594 #include "translate/vsx-impl.c.inc"
6595 
6596 #include "translate/dfp-impl.c.inc"
6597 
6598 #include "translate/spe-impl.c.inc"
6599 
6600 #include "translate/branch-impl.c.inc"
6601 
6602 #include "translate/processor-ctrl-impl.c.inc"
6603 
6604 #include "translate/storage-ctrl-impl.c.inc"
6605 
6606 /* Handles lfdp */
6607 static void gen_dform39(DisasContext *ctx)
6608 {
6609     if ((ctx->opcode & 0x3) == 0) {
6610         if (ctx->insns_flags2 & PPC2_ISA205) {
6611             return gen_lfdp(ctx);
6612         }
6613     }
6614     return gen_invalid(ctx);
6615 }
6616 
6617 /* Handles stfdp */
6618 static void gen_dform3D(DisasContext *ctx)
6619 {
6620     if ((ctx->opcode & 3) == 0) { /* DS-FORM */
6621         /* stfdp */
6622         if (ctx->insns_flags2 & PPC2_ISA205) {
6623             return gen_stfdp(ctx);
6624         }
6625     }
6626     return gen_invalid(ctx);
6627 }
6628 
6629 #if defined(TARGET_PPC64)
6630 /* brd */
6631 static void gen_brd(DisasContext *ctx)
6632 {
6633     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6634 }
6635 
6636 /* brw */
6637 static void gen_brw(DisasContext *ctx)
6638 {
6639     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6640     tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
6641 
6642 }
6643 
6644 /* brh */
6645 static void gen_brh(DisasContext *ctx)
6646 {
6647     TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
6648     TCGv_i64 t1 = tcg_temp_new_i64();
6649     TCGv_i64 t2 = tcg_temp_new_i64();
6650 
6651     tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
6652     tcg_gen_and_i64(t2, t1, mask);
6653     tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
6654     tcg_gen_shli_i64(t1, t1, 8);
6655     tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
6656 
6657     tcg_temp_free_i64(t1);
6658     tcg_temp_free_i64(t2);
6659 }
6660 #endif
6661 
6662 static opcode_t opcodes[] = {
6663 #if defined(TARGET_PPC64)
6664 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
6665 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
6666 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
6667 #endif
6668 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
6669 #if defined(TARGET_PPC64)
6670 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
6671 #endif
6672 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
6673 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
6674 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
6675 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6676 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6677 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
6678 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
6679 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
6680 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
6681 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6682 #if defined(TARGET_PPC64)
6683 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
6684 #endif
6685 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
6686 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
6687 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6688 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6689 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6690 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
6691 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
6692 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
6693 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6694 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
6695 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
6696 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
6697 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6698 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6699 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6700 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6701 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
6702 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
6703 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
6704 #if defined(TARGET_PPC64)
6705 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
6706 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
6707 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
6708 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
6709 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
6710 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
6711 #endif
6712 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6713 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6714 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6715 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
6716 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
6717 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
6718 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
6719 #if defined(TARGET_PPC64)
6720 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
6721 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
6722 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
6723 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
6724 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
6725 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
6726                PPC_NONE, PPC2_ISA300),
6727 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
6728                PPC_NONE, PPC2_ISA300),
6729 #endif
6730 /* handles lfdp, lxsd, lxssp */
6731 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6732 /* handles stfdp, stxsd, stxssp */
6733 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6734 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6735 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6736 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
6737 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
6738 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
6739 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
6740 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
6741 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
6742 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6743 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6744 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
6745 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
6746 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
6747 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6748 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6749 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
6750 #if defined(TARGET_PPC64)
6751 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
6752 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
6753 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
6754 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
6755 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
6756 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
6757 #endif
6758 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
6759 /* ISA v3.0 changed the extended opcode from 62 to 30 */
6760 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x039FF801, PPC_WAIT),
6761 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039CF801, PPC_NONE, PPC2_ISA300),
6762 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6763 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6764 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
6765 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
6766 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
6767 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
6768 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
6769 #if defined(TARGET_PPC64)
6770 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
6771 #if !defined(CONFIG_USER_ONLY)
6772 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6773 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6774 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6775 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
6776 #endif
6777 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6778 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6779 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6780 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6781 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6782 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
6783 #endif
6784 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6785 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
6786 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
6787 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
6788 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6789 #if defined(TARGET_PPC64)
6790 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
6791 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
6792 #endif
6793 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
6794 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
6795 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
6796 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
6797 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
6798 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
6799 #if defined(TARGET_PPC64)
6800 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
6801 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
6802 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
6803 #endif
6804 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
6805 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
6806 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
6807 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6808 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
6809 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
6810 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6811 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
6812 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6813 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
6814 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6815 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6816 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
6817 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6818 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
6819 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
6820 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
6821 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
6822 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6823 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
6824 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
6825 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
6826 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
6827 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
6828 #if defined(TARGET_PPC64)
6829 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
6830 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
6831              PPC_SEGMENT_64B),
6832 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
6833 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
6834              PPC_SEGMENT_64B),
6835 #endif
6836 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
6837 /*
6838  * XXX Those instructions will need to be handled differently for
6839  * different ISA versions
6840  */
6841 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
6842 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
6843 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
6844 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
6845 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
6846 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
6847 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
6848 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
6849 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
6850 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
6851 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
6852 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
6853 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
6854 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
6855 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
6856 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
6857 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
6858 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
6859 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
6860 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
6861 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
6862 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
6863 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
6864 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
6865 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
6866 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
6867 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
6868                PPC_NONE, PPC2_BOOKE206),
6869 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
6870                PPC_NONE, PPC2_BOOKE206),
6871 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
6872                PPC_NONE, PPC2_BOOKE206),
6873 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
6874                PPC_NONE, PPC2_BOOKE206),
6875 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
6876                PPC_NONE, PPC2_BOOKE206),
6877 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
6878 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
6879 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
6880 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
6881               PPC_BOOKE, PPC2_BOOKE206),
6882 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
6883 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
6884                PPC_BOOKE, PPC2_BOOKE206),
6885 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
6886              PPC_440_SPEC),
6887 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
6888 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
6889 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
6890 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
6891 #if defined(TARGET_PPC64)
6892 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
6893               PPC2_ISA300),
6894 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
6895 #endif
6896 
6897 #undef GEN_INT_ARITH_ADD
6898 #undef GEN_INT_ARITH_ADD_CONST
6899 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov)         \
6900 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
6901 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val,                        \
6902                                 add_ca, compute_ca, compute_ov)               \
6903 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
6904 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
6905 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
6906 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
6907 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
6908 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
6909 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
6910 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
6911 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
6912 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
6913 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
6914 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
6915 
6916 #undef GEN_INT_ARITH_DIVW
6917 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
6918 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
6919 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
6920 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
6921 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
6922 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
6923 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6924 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6925 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6926 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6927 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6928 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6929 
6930 #if defined(TARGET_PPC64)
6931 #undef GEN_INT_ARITH_DIVD
6932 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
6933 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6934 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
6935 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
6936 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
6937 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
6938 
6939 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6940 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6941 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6942 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6943 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6944 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6945 
6946 #undef GEN_INT_ARITH_MUL_HELPER
6947 #define GEN_INT_ARITH_MUL_HELPER(name, opc3)                                  \
6948 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6949 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
6950 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
6951 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
6952 #endif
6953 
6954 #undef GEN_INT_ARITH_SUBF
6955 #undef GEN_INT_ARITH_SUBF_CONST
6956 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
6957 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
6958 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
6959                                 add_ca, compute_ca, compute_ov)               \
6960 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
6961 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
6962 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
6963 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
6964 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
6965 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
6966 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
6967 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
6968 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
6969 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
6970 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
6971 
6972 #undef GEN_LOGICAL1
6973 #undef GEN_LOGICAL2
6974 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
6975 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
6976 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
6977 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
6978 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
6979 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
6980 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
6981 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
6982 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
6983 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
6984 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
6985 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
6986 #if defined(TARGET_PPC64)
6987 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
6988 #endif
6989 
6990 #if defined(TARGET_PPC64)
6991 #undef GEN_PPC64_R2
6992 #undef GEN_PPC64_R4
6993 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
6994 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6995 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
6996              PPC_64B)
6997 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
6998 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6999 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000,   \
7000              PPC_64B),                                                        \
7001 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
7002              PPC_64B),                                                        \
7003 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000,   \
7004              PPC_64B)
7005 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
7006 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
7007 GEN_PPC64_R4(rldic, 0x1E, 0x04),
7008 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
7009 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
7010 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
7011 #endif
7012 
7013 #undef GEN_LDX_E
7014 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
7015 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
7016 
7017 #if defined(TARGET_PPC64)
7018 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
7019 
7020 /* HV/P7 and later only */
7021 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
7022 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
7023 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
7024 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
7025 #endif
7026 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
7027 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
7028 
7029 /* External PID based load */
7030 #undef GEN_LDEPX
7031 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
7032 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
7033               0x00000001, PPC_NONE, PPC2_BOOKE206),
7034 
7035 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
7036 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
7037 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
7038 #if defined(TARGET_PPC64)
7039 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
7040 #endif
7041 
7042 #undef GEN_STX_E
7043 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
7044 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
7045 
7046 #if defined(TARGET_PPC64)
7047 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
7048 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
7049 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
7050 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
7051 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
7052 #endif
7053 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
7054 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
7055 
7056 #undef GEN_STEPX
7057 #define GEN_STEPX(name, ldop, opc2, opc3)                                     \
7058 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
7059               0x00000001, PPC_NONE, PPC2_BOOKE206),
7060 
7061 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
7062 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
7063 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
7064 #if defined(TARGET_PPC64)
7065 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04)
7066 #endif
7067 
7068 #undef GEN_CRLOGIC
7069 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
7070 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
7071 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
7072 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
7073 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
7074 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
7075 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
7076 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
7077 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
7078 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
7079 
7080 #undef GEN_MAC_HANDLER
7081 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
7082 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
7083 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
7084 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
7085 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
7086 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
7087 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
7088 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
7089 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
7090 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
7091 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
7092 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
7093 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
7094 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
7095 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
7096 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
7097 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
7098 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
7099 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
7100 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
7101 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
7102 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
7103 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
7104 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
7105 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
7106 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
7107 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
7108 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
7109 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
7110 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
7111 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
7112 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
7113 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
7114 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
7115 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
7116 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
7117 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
7118 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
7119 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
7120 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
7121 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
7122 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
7123 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
7124 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
7125 
7126 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
7127                PPC_NONE, PPC2_TM),
7128 GEN_HANDLER2_E(tend,   "tend",   0x1F, 0x0E, 0x15, 0x01FFF800, \
7129                PPC_NONE, PPC2_TM),
7130 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
7131                PPC_NONE, PPC2_TM),
7132 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
7133                PPC_NONE, PPC2_TM),
7134 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
7135                PPC_NONE, PPC2_TM),
7136 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
7137                PPC_NONE, PPC2_TM),
7138 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
7139                PPC_NONE, PPC2_TM),
7140 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
7141                PPC_NONE, PPC2_TM),
7142 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
7143                PPC_NONE, PPC2_TM),
7144 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
7145                PPC_NONE, PPC2_TM),
7146 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
7147                PPC_NONE, PPC2_TM),
7148 
7149 #include "translate/fp-ops.c.inc"
7150 
7151 #include "translate/vmx-ops.c.inc"
7152 
7153 #include "translate/vsx-ops.c.inc"
7154 
7155 #include "translate/spe-ops.c.inc"
7156 };
7157 
7158 /*****************************************************************************/
7159 /* Opcode types */
7160 enum {
7161     PPC_DIRECT   = 0, /* Opcode routine        */
7162     PPC_INDIRECT = 1, /* Indirect opcode table */
7163 };
7164 
7165 #define PPC_OPCODE_MASK 0x3
7166 
7167 static inline int is_indirect_opcode(void *handler)
7168 {
7169     return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
7170 }
7171 
7172 static inline opc_handler_t **ind_table(void *handler)
7173 {
7174     return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
7175 }
7176 
7177 /* Instruction table creation */
7178 /* Opcodes tables creation */
7179 static void fill_new_table(opc_handler_t **table, int len)
7180 {
7181     int i;
7182 
7183     for (i = 0; i < len; i++) {
7184         table[i] = &invalid_handler;
7185     }
7186 }
7187 
7188 static int create_new_table(opc_handler_t **table, unsigned char idx)
7189 {
7190     opc_handler_t **tmp;
7191 
7192     tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
7193     fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
7194     table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
7195 
7196     return 0;
7197 }
7198 
7199 static int insert_in_table(opc_handler_t **table, unsigned char idx,
7200                             opc_handler_t *handler)
7201 {
7202     if (table[idx] != &invalid_handler) {
7203         return -1;
7204     }
7205     table[idx] = handler;
7206 
7207     return 0;
7208 }
7209 
7210 static int register_direct_insn(opc_handler_t **ppc_opcodes,
7211                                 unsigned char idx, opc_handler_t *handler)
7212 {
7213     if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
7214         printf("*** ERROR: opcode %02x already assigned in main "
7215                "opcode table\n", idx);
7216         return -1;
7217     }
7218 
7219     return 0;
7220 }
7221 
7222 static int register_ind_in_table(opc_handler_t **table,
7223                                  unsigned char idx1, unsigned char idx2,
7224                                  opc_handler_t *handler)
7225 {
7226     if (table[idx1] == &invalid_handler) {
7227         if (create_new_table(table, idx1) < 0) {
7228             printf("*** ERROR: unable to create indirect table "
7229                    "idx=%02x\n", idx1);
7230             return -1;
7231         }
7232     } else {
7233         if (!is_indirect_opcode(table[idx1])) {
7234             printf("*** ERROR: idx %02x already assigned to a direct "
7235                    "opcode\n", idx1);
7236             return -1;
7237         }
7238     }
7239     if (handler != NULL &&
7240         insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
7241         printf("*** ERROR: opcode %02x already assigned in "
7242                "opcode table %02x\n", idx2, idx1);
7243         return -1;
7244     }
7245 
7246     return 0;
7247 }
7248 
7249 static int register_ind_insn(opc_handler_t **ppc_opcodes,
7250                              unsigned char idx1, unsigned char idx2,
7251                              opc_handler_t *handler)
7252 {
7253     return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
7254 }
7255 
7256 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
7257                                 unsigned char idx1, unsigned char idx2,
7258                                 unsigned char idx3, opc_handler_t *handler)
7259 {
7260     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7261         printf("*** ERROR: unable to join indirect table idx "
7262                "[%02x-%02x]\n", idx1, idx2);
7263         return -1;
7264     }
7265     if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
7266                               handler) < 0) {
7267         printf("*** ERROR: unable to insert opcode "
7268                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7269         return -1;
7270     }
7271 
7272     return 0;
7273 }
7274 
7275 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
7276                                  unsigned char idx1, unsigned char idx2,
7277                                  unsigned char idx3, unsigned char idx4,
7278                                  opc_handler_t *handler)
7279 {
7280     opc_handler_t **table;
7281 
7282     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7283         printf("*** ERROR: unable to join indirect table idx "
7284                "[%02x-%02x]\n", idx1, idx2);
7285         return -1;
7286     }
7287     table = ind_table(ppc_opcodes[idx1]);
7288     if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
7289         printf("*** ERROR: unable to join 2nd-level indirect table idx "
7290                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7291         return -1;
7292     }
7293     table = ind_table(table[idx2]);
7294     if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
7295         printf("*** ERROR: unable to insert opcode "
7296                "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
7297         return -1;
7298     }
7299     return 0;
7300 }
7301 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
7302 {
7303     if (insn->opc2 != 0xFF) {
7304         if (insn->opc3 != 0xFF) {
7305             if (insn->opc4 != 0xFF) {
7306                 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7307                                           insn->opc3, insn->opc4,
7308                                           &insn->handler) < 0) {
7309                     return -1;
7310                 }
7311             } else {
7312                 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7313                                          insn->opc3, &insn->handler) < 0) {
7314                     return -1;
7315                 }
7316             }
7317         } else {
7318             if (register_ind_insn(ppc_opcodes, insn->opc1,
7319                                   insn->opc2, &insn->handler) < 0) {
7320                 return -1;
7321             }
7322         }
7323     } else {
7324         if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
7325             return -1;
7326         }
7327     }
7328 
7329     return 0;
7330 }
7331 
7332 static int test_opcode_table(opc_handler_t **table, int len)
7333 {
7334     int i, count, tmp;
7335 
7336     for (i = 0, count = 0; i < len; i++) {
7337         /* Consistency fixup */
7338         if (table[i] == NULL) {
7339             table[i] = &invalid_handler;
7340         }
7341         if (table[i] != &invalid_handler) {
7342             if (is_indirect_opcode(table[i])) {
7343                 tmp = test_opcode_table(ind_table(table[i]),
7344                     PPC_CPU_INDIRECT_OPCODES_LEN);
7345                 if (tmp == 0) {
7346                     free(table[i]);
7347                     table[i] = &invalid_handler;
7348                 } else {
7349                     count++;
7350                 }
7351             } else {
7352                 count++;
7353             }
7354         }
7355     }
7356 
7357     return count;
7358 }
7359 
7360 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
7361 {
7362     if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
7363         printf("*** WARNING: no opcode defined !\n");
7364     }
7365 }
7366 
7367 /*****************************************************************************/
7368 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
7369 {
7370     PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
7371     opcode_t *opc;
7372 
7373     fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
7374     for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
7375         if (((opc->handler.type & pcc->insns_flags) != 0) ||
7376             ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
7377             if (register_insn(cpu->opcodes, opc) < 0) {
7378                 error_setg(errp, "ERROR initializing PowerPC instruction "
7379                            "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
7380                            opc->opc3);
7381                 return;
7382             }
7383         }
7384     }
7385     fix_opcode_tables(cpu->opcodes);
7386     fflush(stdout);
7387     fflush(stderr);
7388 }
7389 
7390 void destroy_ppc_opcodes(PowerPCCPU *cpu)
7391 {
7392     opc_handler_t **table, **table_2;
7393     int i, j, k;
7394 
7395     for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
7396         if (cpu->opcodes[i] == &invalid_handler) {
7397             continue;
7398         }
7399         if (is_indirect_opcode(cpu->opcodes[i])) {
7400             table = ind_table(cpu->opcodes[i]);
7401             for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
7402                 if (table[j] == &invalid_handler) {
7403                     continue;
7404                 }
7405                 if (is_indirect_opcode(table[j])) {
7406                     table_2 = ind_table(table[j]);
7407                     for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
7408                         if (table_2[k] != &invalid_handler &&
7409                             is_indirect_opcode(table_2[k])) {
7410                             g_free((opc_handler_t *)((uintptr_t)table_2[k] &
7411                                                      ~PPC_INDIRECT));
7412                         }
7413                     }
7414                     g_free((opc_handler_t *)((uintptr_t)table[j] &
7415                                              ~PPC_INDIRECT));
7416                 }
7417             }
7418             g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
7419                 ~PPC_INDIRECT));
7420         }
7421     }
7422 }
7423 
7424 int ppc_fixup_cpu(PowerPCCPU *cpu)
7425 {
7426     CPUPPCState *env = &cpu->env;
7427 
7428     /*
7429      * TCG doesn't (yet) emulate some groups of instructions that are
7430      * implemented on some otherwise supported CPUs (e.g. VSX and
7431      * decimal floating point instructions on POWER7).  We remove
7432      * unsupported instruction groups from the cpu state's instruction
7433      * masks and hope the guest can cope.  For at least the pseries
7434      * machine, the unavailability of these instructions can be
7435      * advertised to the guest via the device tree.
7436      */
7437     if ((env->insns_flags & ~PPC_TCG_INSNS)
7438         || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
7439         warn_report("Disabling some instructions which are not "
7440                     "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
7441                     env->insns_flags & ~PPC_TCG_INSNS,
7442                     env->insns_flags2 & ~PPC_TCG_INSNS2);
7443     }
7444     env->insns_flags &= PPC_TCG_INSNS;
7445     env->insns_flags2 &= PPC_TCG_INSNS2;
7446     return 0;
7447 }
7448 
7449 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
7450 {
7451     opc_handler_t **table, *handler;
7452     uint32_t inval;
7453 
7454     ctx->opcode = insn;
7455 
7456     LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
7457               insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7458               ctx->le_mode ? "little" : "big");
7459 
7460     table = cpu->opcodes;
7461     handler = table[opc1(insn)];
7462     if (is_indirect_opcode(handler)) {
7463         table = ind_table(handler);
7464         handler = table[opc2(insn)];
7465         if (is_indirect_opcode(handler)) {
7466             table = ind_table(handler);
7467             handler = table[opc3(insn)];
7468             if (is_indirect_opcode(handler)) {
7469                 table = ind_table(handler);
7470                 handler = table[opc4(insn)];
7471             }
7472         }
7473     }
7474 
7475     /* Is opcode *REALLY* valid ? */
7476     if (unlikely(handler->handler == &gen_invalid)) {
7477         qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
7478                       "%02x - %02x - %02x - %02x (%08x) "
7479                       TARGET_FMT_lx "\n",
7480                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7481                       insn, ctx->cia);
7482         return false;
7483     }
7484 
7485     if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
7486                  && Rc(insn))) {
7487         inval = handler->inval2;
7488     } else {
7489         inval = handler->inval1;
7490     }
7491 
7492     if (unlikely((insn & inval) != 0)) {
7493         qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
7494                       "%02x - %02x - %02x - %02x (%08x) "
7495                       TARGET_FMT_lx "\n", insn & inval,
7496                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7497                       insn, ctx->cia);
7498         return false;
7499     }
7500 
7501     handler->handler(ctx);
7502     return true;
7503 }
7504 
7505 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
7506 {
7507     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7508     CPUPPCState *env = cs->env_ptr;
7509     uint32_t hflags = ctx->base.tb->flags;
7510 
7511     ctx->spr_cb = env->spr_cb;
7512     ctx->pr = (hflags >> HFLAGS_PR) & 1;
7513     ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
7514     ctx->dr = (hflags >> HFLAGS_DR) & 1;
7515     ctx->hv = (hflags >> HFLAGS_HV) & 1;
7516     ctx->insns_flags = env->insns_flags;
7517     ctx->insns_flags2 = env->insns_flags2;
7518     ctx->access_type = -1;
7519     ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
7520     ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
7521     ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
7522     ctx->flags = env->flags;
7523 #if defined(TARGET_PPC64)
7524     ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
7525     ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
7526 #endif
7527     ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
7528         || env->mmu_model & POWERPC_MMU_64;
7529 
7530     ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
7531     ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
7532     ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
7533     ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
7534     ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
7535     ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
7536     ctx->hr = (hflags >> HFLAGS_HR) & 1;
7537     ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
7538     ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
7539     ctx->mmcr0_pmcjce = (hflags >> HFLAGS_PMCJCE) & 1;
7540     ctx->pmc_other = (hflags >> HFLAGS_PMC_OTHER) & 1;
7541     ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
7542 
7543     ctx->singlestep_enabled = 0;
7544     if ((hflags >> HFLAGS_SE) & 1) {
7545         ctx->singlestep_enabled |= CPU_SINGLE_STEP;
7546         ctx->base.max_insns = 1;
7547     }
7548     if ((hflags >> HFLAGS_BE) & 1) {
7549         ctx->singlestep_enabled |= CPU_BRANCH_STEP;
7550     }
7551 }
7552 
7553 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
7554 {
7555 }
7556 
7557 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
7558 {
7559     tcg_gen_insn_start(dcbase->pc_next);
7560 }
7561 
7562 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
7563 {
7564     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
7565     return opc1(insn) == 1;
7566 }
7567 
7568 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
7569 {
7570     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7571     PowerPCCPU *cpu = POWERPC_CPU(cs);
7572     CPUPPCState *env = cs->env_ptr;
7573     target_ulong pc;
7574     uint32_t insn;
7575     bool ok;
7576 
7577     LOG_DISAS("----------------\n");
7578     LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
7579               ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
7580 
7581     ctx->cia = pc = ctx->base.pc_next;
7582     insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
7583     ctx->base.pc_next = pc += 4;
7584 
7585     if (!is_prefix_insn(ctx, insn)) {
7586         ok = (decode_insn32(ctx, insn) ||
7587               decode_legacy(cpu, ctx, insn));
7588     } else if ((pc & 63) == 0) {
7589         /*
7590          * Power v3.1, section 1.9 Exceptions:
7591          * attempt to execute a prefixed instruction that crosses a
7592          * 64-byte address boundary (system alignment error).
7593          */
7594         gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
7595         ok = true;
7596     } else {
7597         uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
7598                                              need_byteswap(ctx));
7599         ctx->base.pc_next = pc += 4;
7600         ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
7601     }
7602     if (!ok) {
7603         gen_invalid(ctx);
7604     }
7605 
7606     /* End the TB when crossing a page boundary. */
7607     if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
7608         ctx->base.is_jmp = DISAS_TOO_MANY;
7609     }
7610 
7611     translator_loop_temp_check(&ctx->base);
7612 }
7613 
7614 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
7615 {
7616     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7617     DisasJumpType is_jmp = ctx->base.is_jmp;
7618     target_ulong nip = ctx->base.pc_next;
7619 
7620     if (is_jmp == DISAS_NORETURN) {
7621         /* We have already exited the TB. */
7622         return;
7623     }
7624 
7625     /* Honor single stepping. */
7626     if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)
7627         && (nip <= 0x100 || nip > 0xf00)) {
7628         switch (is_jmp) {
7629         case DISAS_TOO_MANY:
7630         case DISAS_EXIT_UPDATE:
7631         case DISAS_CHAIN_UPDATE:
7632             gen_update_nip(ctx, nip);
7633             break;
7634         case DISAS_EXIT:
7635         case DISAS_CHAIN:
7636             break;
7637         default:
7638             g_assert_not_reached();
7639         }
7640 
7641         gen_debug_exception(ctx);
7642         return;
7643     }
7644 
7645     switch (is_jmp) {
7646     case DISAS_TOO_MANY:
7647         if (use_goto_tb(ctx, nip)) {
7648             pmu_count_insns(ctx);
7649             tcg_gen_goto_tb(0);
7650             gen_update_nip(ctx, nip);
7651             tcg_gen_exit_tb(ctx->base.tb, 0);
7652             break;
7653         }
7654         /* fall through */
7655     case DISAS_CHAIN_UPDATE:
7656         gen_update_nip(ctx, nip);
7657         /* fall through */
7658     case DISAS_CHAIN:
7659         /*
7660          * tcg_gen_lookup_and_goto_ptr will exit the TB if
7661          * CF_NO_GOTO_PTR is set. Count insns now.
7662          */
7663         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
7664             pmu_count_insns(ctx);
7665         }
7666 
7667         tcg_gen_lookup_and_goto_ptr();
7668         break;
7669 
7670     case DISAS_EXIT_UPDATE:
7671         gen_update_nip(ctx, nip);
7672         /* fall through */
7673     case DISAS_EXIT:
7674         pmu_count_insns(ctx);
7675         tcg_gen_exit_tb(NULL, 0);
7676         break;
7677 
7678     default:
7679         g_assert_not_reached();
7680     }
7681 }
7682 
7683 static void ppc_tr_disas_log(const DisasContextBase *dcbase,
7684                              CPUState *cs, FILE *logfile)
7685 {
7686     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
7687     target_disas(logfile, cs, dcbase->pc_first, dcbase->tb->size);
7688 }
7689 
7690 static const TranslatorOps ppc_tr_ops = {
7691     .init_disas_context = ppc_tr_init_disas_context,
7692     .tb_start           = ppc_tr_tb_start,
7693     .insn_start         = ppc_tr_insn_start,
7694     .translate_insn     = ppc_tr_translate_insn,
7695     .tb_stop            = ppc_tr_tb_stop,
7696     .disas_log          = ppc_tr_disas_log,
7697 };
7698 
7699 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns,
7700                            target_ulong pc, void *host_pc)
7701 {
7702     DisasContext ctx;
7703 
7704     translator_loop(cs, tb, max_insns, pc, host_pc, &ppc_tr_ops, &ctx.base);
7705 }
7706