xref: /qemu/target/ppc/translate.c (revision 6b40847a)
1 /*
2  *  PowerPC emulation for qemu: main translation routines.
3  *
4  *  Copyright (c) 2003-2007 Jocelyn Mayer
5  *  Copyright (C) 2011 Freescale Semiconductor, Inc.
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
31 
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
34 
35 #include "exec/translator.h"
36 #include "exec/log.h"
37 #include "qemu/atomic128.h"
38 #include "spr_common.h"
39 #include "power8-pmu.h"
40 
41 #include "qemu/qemu-print.h"
42 #include "qapi/error.h"
43 
44 #define HELPER_H "helper.h"
45 #include "exec/helper-info.c.inc"
46 #undef  HELPER_H
47 
48 #define CPU_SINGLE_STEP 0x1
49 #define CPU_BRANCH_STEP 0x2
50 
51 /* Include definitions for instructions classes and implementations flags */
52 /* #define PPC_DEBUG_DISAS */
53 
54 #ifdef PPC_DEBUG_DISAS
55 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
56 #else
57 #  define LOG_DISAS(...) do { } while (0)
58 #endif
59 /*****************************************************************************/
60 /* Code translation helpers                                                  */
61 
62 /* global register indexes */
63 static char cpu_reg_names[10 * 3 + 22 * 4   /* GPR */
64                           + 10 * 4 + 22 * 5 /* SPE GPRh */
65                           + 8 * 5           /* CRF */];
66 static TCGv cpu_gpr[32];
67 static TCGv cpu_gprh[32];
68 static TCGv_i32 cpu_crf[8];
69 static TCGv cpu_nip;
70 static TCGv cpu_msr;
71 static TCGv cpu_ctr;
72 static TCGv cpu_lr;
73 #if defined(TARGET_PPC64)
74 static TCGv cpu_cfar;
75 #endif
76 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
77 static TCGv cpu_reserve;
78 static TCGv cpu_reserve_val;
79 static TCGv cpu_reserve_val2;
80 static TCGv cpu_fpscr;
81 static TCGv_i32 cpu_access_type;
82 
83 void ppc_translate_init(void)
84 {
85     int i;
86     char *p;
87     size_t cpu_reg_names_size;
88 
89     p = cpu_reg_names;
90     cpu_reg_names_size = sizeof(cpu_reg_names);
91 
92     for (i = 0; i < 8; i++) {
93         snprintf(p, cpu_reg_names_size, "crf%d", i);
94         cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
95                                             offsetof(CPUPPCState, crf[i]), p);
96         p += 5;
97         cpu_reg_names_size -= 5;
98     }
99 
100     for (i = 0; i < 32; i++) {
101         snprintf(p, cpu_reg_names_size, "r%d", i);
102         cpu_gpr[i] = tcg_global_mem_new(cpu_env,
103                                         offsetof(CPUPPCState, gpr[i]), p);
104         p += (i < 10) ? 3 : 4;
105         cpu_reg_names_size -= (i < 10) ? 3 : 4;
106         snprintf(p, cpu_reg_names_size, "r%dH", i);
107         cpu_gprh[i] = tcg_global_mem_new(cpu_env,
108                                          offsetof(CPUPPCState, gprh[i]), p);
109         p += (i < 10) ? 4 : 5;
110         cpu_reg_names_size -= (i < 10) ? 4 : 5;
111     }
112 
113     cpu_nip = tcg_global_mem_new(cpu_env,
114                                  offsetof(CPUPPCState, nip), "nip");
115 
116     cpu_msr = tcg_global_mem_new(cpu_env,
117                                  offsetof(CPUPPCState, msr), "msr");
118 
119     cpu_ctr = tcg_global_mem_new(cpu_env,
120                                  offsetof(CPUPPCState, ctr), "ctr");
121 
122     cpu_lr = tcg_global_mem_new(cpu_env,
123                                 offsetof(CPUPPCState, lr), "lr");
124 
125 #if defined(TARGET_PPC64)
126     cpu_cfar = tcg_global_mem_new(cpu_env,
127                                   offsetof(CPUPPCState, cfar), "cfar");
128 #endif
129 
130     cpu_xer = tcg_global_mem_new(cpu_env,
131                                  offsetof(CPUPPCState, xer), "xer");
132     cpu_so = tcg_global_mem_new(cpu_env,
133                                 offsetof(CPUPPCState, so), "SO");
134     cpu_ov = tcg_global_mem_new(cpu_env,
135                                 offsetof(CPUPPCState, ov), "OV");
136     cpu_ca = tcg_global_mem_new(cpu_env,
137                                 offsetof(CPUPPCState, ca), "CA");
138     cpu_ov32 = tcg_global_mem_new(cpu_env,
139                                   offsetof(CPUPPCState, ov32), "OV32");
140     cpu_ca32 = tcg_global_mem_new(cpu_env,
141                                   offsetof(CPUPPCState, ca32), "CA32");
142 
143     cpu_reserve = tcg_global_mem_new(cpu_env,
144                                      offsetof(CPUPPCState, reserve_addr),
145                                      "reserve_addr");
146     cpu_reserve_val = tcg_global_mem_new(cpu_env,
147                                          offsetof(CPUPPCState, reserve_val),
148                                          "reserve_val");
149     cpu_reserve_val2 = tcg_global_mem_new(cpu_env,
150                                           offsetof(CPUPPCState, reserve_val2),
151                                           "reserve_val2");
152 
153     cpu_fpscr = tcg_global_mem_new(cpu_env,
154                                    offsetof(CPUPPCState, fpscr), "fpscr");
155 
156     cpu_access_type = tcg_global_mem_new_i32(cpu_env,
157                                              offsetof(CPUPPCState, access_type),
158                                              "access_type");
159 }
160 
161 /* internal defines */
162 struct DisasContext {
163     DisasContextBase base;
164     target_ulong cia;  /* current instruction address */
165     uint32_t opcode;
166     /* Routine used to access memory */
167     bool pr, hv, dr, le_mode;
168     bool lazy_tlb_flush;
169     bool need_access_type;
170     int mem_idx;
171     int access_type;
172     /* Translation flags */
173     MemOp default_tcg_memop_mask;
174 #if defined(TARGET_PPC64)
175     bool sf_mode;
176     bool has_cfar;
177 #endif
178     bool fpu_enabled;
179     bool altivec_enabled;
180     bool vsx_enabled;
181     bool spe_enabled;
182     bool tm_enabled;
183     bool gtse;
184     bool hr;
185     bool mmcr0_pmcc0;
186     bool mmcr0_pmcc1;
187     bool mmcr0_pmcjce;
188     bool pmc_other;
189     bool pmu_insn_cnt;
190     ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
191     int singlestep_enabled;
192     uint32_t flags;
193     uint64_t insns_flags;
194     uint64_t insns_flags2;
195 };
196 
197 #define DISAS_EXIT         DISAS_TARGET_0  /* exit to main loop, pc updated */
198 #define DISAS_EXIT_UPDATE  DISAS_TARGET_1  /* exit to main loop, pc stale */
199 #define DISAS_CHAIN        DISAS_TARGET_2  /* lookup next tb, pc updated */
200 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3  /* lookup next tb, pc stale */
201 
202 /* Return true iff byteswap is needed in a scalar memop */
203 static inline bool need_byteswap(const DisasContext *ctx)
204 {
205 #if TARGET_BIG_ENDIAN
206      return ctx->le_mode;
207 #else
208      return !ctx->le_mode;
209 #endif
210 }
211 
212 /* True when active word size < size of target_long.  */
213 #ifdef TARGET_PPC64
214 # define NARROW_MODE(C)  (!(C)->sf_mode)
215 #else
216 # define NARROW_MODE(C)  0
217 #endif
218 
219 struct opc_handler_t {
220     /* invalid bits for instruction 1 (Rc(opcode) == 0) */
221     uint32_t inval1;
222     /* invalid bits for instruction 2 (Rc(opcode) == 1) */
223     uint32_t inval2;
224     /* instruction type */
225     uint64_t type;
226     /* extended instruction type */
227     uint64_t type2;
228     /* handler */
229     void (*handler)(DisasContext *ctx);
230 };
231 
232 /* SPR load/store helpers */
233 static inline void gen_load_spr(TCGv t, int reg)
234 {
235     tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
236 }
237 
238 static inline void gen_store_spr(int reg, TCGv t)
239 {
240     tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
241 }
242 
243 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
244 {
245     if (ctx->need_access_type && ctx->access_type != access_type) {
246         tcg_gen_movi_i32(cpu_access_type, access_type);
247         ctx->access_type = access_type;
248     }
249 }
250 
251 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
252 {
253     if (NARROW_MODE(ctx)) {
254         nip = (uint32_t)nip;
255     }
256     tcg_gen_movi_tl(cpu_nip, nip);
257 }
258 
259 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
260 {
261     TCGv_i32 t0, t1;
262 
263     /*
264      * These are all synchronous exceptions, we set the PC back to the
265      * faulting instruction
266      */
267     gen_update_nip(ctx, ctx->cia);
268     t0 = tcg_constant_i32(excp);
269     t1 = tcg_constant_i32(error);
270     gen_helper_raise_exception_err(cpu_env, t0, t1);
271     ctx->base.is_jmp = DISAS_NORETURN;
272 }
273 
274 static void gen_exception(DisasContext *ctx, uint32_t excp)
275 {
276     TCGv_i32 t0;
277 
278     /*
279      * These are all synchronous exceptions, we set the PC back to the
280      * faulting instruction
281      */
282     gen_update_nip(ctx, ctx->cia);
283     t0 = tcg_constant_i32(excp);
284     gen_helper_raise_exception(cpu_env, t0);
285     ctx->base.is_jmp = DISAS_NORETURN;
286 }
287 
288 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
289                               target_ulong nip)
290 {
291     TCGv_i32 t0;
292 
293     gen_update_nip(ctx, nip);
294     t0 = tcg_constant_i32(excp);
295     gen_helper_raise_exception(cpu_env, t0);
296     ctx->base.is_jmp = DISAS_NORETURN;
297 }
298 
299 #if !defined(CONFIG_USER_ONLY)
300 static void gen_ppc_maybe_interrupt(DisasContext *ctx)
301 {
302     translator_io_start(&ctx->base);
303     gen_helper_ppc_maybe_interrupt(cpu_env);
304 }
305 #endif
306 
307 /*
308  * Tells the caller what is the appropriate exception to generate and prepares
309  * SPR registers for this exception.
310  *
311  * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
312  * POWERPC_EXCP_DEBUG (on BookE).
313  */
314 static uint32_t gen_prep_dbgex(DisasContext *ctx)
315 {
316     if (ctx->flags & POWERPC_FLAG_DE) {
317         target_ulong dbsr = 0;
318         if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
319             dbsr = DBCR0_ICMP;
320         } else {
321             /* Must have been branch */
322             dbsr = DBCR0_BRT;
323         }
324         TCGv t0 = tcg_temp_new();
325         gen_load_spr(t0, SPR_BOOKE_DBSR);
326         tcg_gen_ori_tl(t0, t0, dbsr);
327         gen_store_spr(SPR_BOOKE_DBSR, t0);
328         return POWERPC_EXCP_DEBUG;
329     } else {
330         return POWERPC_EXCP_TRACE;
331     }
332 }
333 
334 static void gen_debug_exception(DisasContext *ctx)
335 {
336     gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx)));
337     ctx->base.is_jmp = DISAS_NORETURN;
338 }
339 
340 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
341 {
342     /* Will be converted to program check if needed */
343     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
344 }
345 
346 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
347 {
348     gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
349 }
350 
351 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
352 {
353     /* Will be converted to program check if needed */
354     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
355 }
356 
357 /*****************************************************************************/
358 /* SPR READ/WRITE CALLBACKS */
359 
360 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
361 {
362 #if 0
363     sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
364     printf("ERROR: try to access SPR %d !\n", sprn);
365 #endif
366 }
367 
368 /* #define PPC_DUMP_SPR_ACCESSES */
369 
370 /*
371  * Generic callbacks:
372  * do nothing but store/retrieve spr value
373  */
374 static void spr_load_dump_spr(int sprn)
375 {
376 #ifdef PPC_DUMP_SPR_ACCESSES
377     TCGv_i32 t0 = tcg_constant_i32(sprn);
378     gen_helper_load_dump_spr(cpu_env, t0);
379 #endif
380 }
381 
382 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
383 {
384     gen_load_spr(cpu_gpr[gprn], sprn);
385     spr_load_dump_spr(sprn);
386 }
387 
388 static void spr_store_dump_spr(int sprn)
389 {
390 #ifdef PPC_DUMP_SPR_ACCESSES
391     TCGv_i32 t0 = tcg_constant_i32(sprn);
392     gen_helper_store_dump_spr(cpu_env, t0);
393 #endif
394 }
395 
396 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
397 {
398     gen_store_spr(sprn, cpu_gpr[gprn]);
399     spr_store_dump_spr(sprn);
400 }
401 
402 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
403 {
404 #ifdef TARGET_PPC64
405     TCGv t0 = tcg_temp_new();
406     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
407     gen_store_spr(sprn, t0);
408     spr_store_dump_spr(sprn);
409 #else
410     spr_write_generic(ctx, sprn, gprn);
411 #endif
412 }
413 
414 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
415 {
416     spr_write_generic32(ctx, sprn, gprn);
417 
418     /*
419      * SPR_CTRL writes must force a new translation block,
420      * allowing the PMU to calculate the run latch events with
421      * more accuracy.
422      */
423     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
424 }
425 
426 #if !defined(CONFIG_USER_ONLY)
427 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
428 {
429     TCGv t0 = tcg_temp_new();
430     TCGv t1 = tcg_temp_new();
431     gen_load_spr(t0, sprn);
432     tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
433     tcg_gen_and_tl(t0, t0, t1);
434     gen_store_spr(sprn, t0);
435 }
436 
437 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
438 {
439 }
440 
441 #endif
442 
443 /* SPR common to all PowerPC */
444 /* XER */
445 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
446 {
447     TCGv dst = cpu_gpr[gprn];
448     TCGv t0 = tcg_temp_new();
449     TCGv t1 = tcg_temp_new();
450     TCGv t2 = tcg_temp_new();
451     tcg_gen_mov_tl(dst, cpu_xer);
452     tcg_gen_shli_tl(t0, cpu_so, XER_SO);
453     tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
454     tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
455     tcg_gen_or_tl(t0, t0, t1);
456     tcg_gen_or_tl(dst, dst, t2);
457     tcg_gen_or_tl(dst, dst, t0);
458     if (is_isa300(ctx)) {
459         tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
460         tcg_gen_or_tl(dst, dst, t0);
461         tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
462         tcg_gen_or_tl(dst, dst, t0);
463     }
464 }
465 
466 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
467 {
468     TCGv src = cpu_gpr[gprn];
469     /* Write all flags, while reading back check for isa300 */
470     tcg_gen_andi_tl(cpu_xer, src,
471                     ~((1u << XER_SO) |
472                       (1u << XER_OV) | (1u << XER_OV32) |
473                       (1u << XER_CA) | (1u << XER_CA32)));
474     tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
475     tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
476     tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
477     tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
478     tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
479 }
480 
481 /* LR */
482 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
483 {
484     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
485 }
486 
487 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
488 {
489     tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
490 }
491 
492 /* CFAR */
493 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
494 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
495 {
496     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
497 }
498 
499 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
500 {
501     tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
502 }
503 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
504 
505 /* CTR */
506 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
507 {
508     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
509 }
510 
511 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
512 {
513     tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
514 }
515 
516 /* User read access to SPR */
517 /* USPRx */
518 /* UMMCRx */
519 /* UPMCx */
520 /* USIA */
521 /* UDECR */
522 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
523 {
524     gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
525 }
526 
527 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
528 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
529 {
530     gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
531 }
532 #endif
533 
534 /* SPR common to all non-embedded PowerPC */
535 /* DECR */
536 #if !defined(CONFIG_USER_ONLY)
537 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
538 {
539     translator_io_start(&ctx->base);
540     gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
541 }
542 
543 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
544 {
545     translator_io_start(&ctx->base);
546     gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
547 }
548 #endif
549 
550 /* SPR common to all non-embedded PowerPC, except 601 */
551 /* Time base */
552 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
553 {
554     translator_io_start(&ctx->base);
555     gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
556 }
557 
558 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
559 {
560     translator_io_start(&ctx->base);
561     gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
562 }
563 
564 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
565 {
566     gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
567 }
568 
569 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
570 {
571     gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
572 }
573 
574 #if !defined(CONFIG_USER_ONLY)
575 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
576 {
577     translator_io_start(&ctx->base);
578     gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
579 }
580 
581 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
582 {
583     translator_io_start(&ctx->base);
584     gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
585 }
586 
587 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
588 {
589     gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
590 }
591 
592 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
593 {
594     gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
595 }
596 
597 #if defined(TARGET_PPC64)
598 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
599 {
600     translator_io_start(&ctx->base);
601     gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
602 }
603 
604 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
605 {
606     translator_io_start(&ctx->base);
607     gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
608 }
609 
610 /* HDECR */
611 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
612 {
613     translator_io_start(&ctx->base);
614     gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
615 }
616 
617 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
618 {
619     translator_io_start(&ctx->base);
620     gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
621 }
622 
623 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
624 {
625     translator_io_start(&ctx->base);
626     gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
627 }
628 
629 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
630 {
631     translator_io_start(&ctx->base);
632     gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
633 }
634 
635 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
636 {
637     translator_io_start(&ctx->base);
638     gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
639 }
640 
641 #endif
642 #endif
643 
644 #if !defined(CONFIG_USER_ONLY)
645 /* IBAT0U...IBAT0U */
646 /* IBAT0L...IBAT7L */
647 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
648 {
649     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
650                   offsetof(CPUPPCState,
651                            IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
652 }
653 
654 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
655 {
656     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
657                   offsetof(CPUPPCState,
658                            IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
659 }
660 
661 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
662 {
663     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_IBAT0U) / 2);
664     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
665 }
666 
667 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
668 {
669     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_IBAT4U) / 2) + 4);
670     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
671 }
672 
673 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
674 {
675     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_IBAT0L) / 2);
676     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
677 }
678 
679 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
680 {
681     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_IBAT4L) / 2) + 4);
682     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
683 }
684 
685 /* DBAT0U...DBAT7U */
686 /* DBAT0L...DBAT7L */
687 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
688 {
689     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
690                   offsetof(CPUPPCState,
691                            DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
692 }
693 
694 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
695 {
696     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
697                   offsetof(CPUPPCState,
698                            DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
699 }
700 
701 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
702 {
703     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_DBAT0U) / 2);
704     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
705 }
706 
707 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
708 {
709     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_DBAT4U) / 2) + 4);
710     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
711 }
712 
713 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
714 {
715     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_DBAT0L) / 2);
716     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
717 }
718 
719 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
720 {
721     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_DBAT4L) / 2) + 4);
722     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
723 }
724 
725 /* SDR1 */
726 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
727 {
728     gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
729 }
730 
731 #if defined(TARGET_PPC64)
732 /* 64 bits PowerPC specific SPRs */
733 /* PIDR */
734 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
735 {
736     gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
737 }
738 
739 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
740 {
741     gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
742 }
743 
744 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
745 {
746     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
747 }
748 
749 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
750 {
751     TCGv t0 = tcg_temp_new();
752     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
753     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
754 }
755 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
756 {
757     gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
758 }
759 
760 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
761 {
762     gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
763 }
764 
765 /* DPDES */
766 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
767 {
768     gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
769 }
770 
771 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
772 {
773     gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
774 }
775 #endif
776 #endif
777 
778 /* PowerPC 40x specific registers */
779 #if !defined(CONFIG_USER_ONLY)
780 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
781 {
782     translator_io_start(&ctx->base);
783     gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
784 }
785 
786 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
787 {
788     translator_io_start(&ctx->base);
789     gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
790 }
791 
792 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
793 {
794     translator_io_start(&ctx->base);
795     gen_store_spr(sprn, cpu_gpr[gprn]);
796     gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
797     /* We must stop translation as we may have rebooted */
798     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
799 }
800 
801 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
802 {
803     translator_io_start(&ctx->base);
804     gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
805 }
806 
807 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
808 {
809     translator_io_start(&ctx->base);
810     gen_helper_store_40x_tcr(cpu_env, cpu_gpr[gprn]);
811 }
812 
813 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
814 {
815     translator_io_start(&ctx->base);
816     gen_helper_store_40x_tsr(cpu_env, cpu_gpr[gprn]);
817 }
818 
819 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn)
820 {
821     TCGv t0 = tcg_temp_new();
822     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF);
823     gen_helper_store_40x_pid(cpu_env, t0);
824 }
825 
826 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
827 {
828     translator_io_start(&ctx->base);
829     gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
830 }
831 
832 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
833 {
834     translator_io_start(&ctx->base);
835     gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
836 }
837 #endif
838 
839 /* PIR */
840 #if !defined(CONFIG_USER_ONLY)
841 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
842 {
843     TCGv t0 = tcg_temp_new();
844     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
845     gen_store_spr(SPR_PIR, t0);
846 }
847 #endif
848 
849 /* SPE specific registers */
850 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
851 {
852     TCGv_i32 t0 = tcg_temp_new_i32();
853     tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
854     tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
855 }
856 
857 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
858 {
859     TCGv_i32 t0 = tcg_temp_new_i32();
860     tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
861     tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
862 }
863 
864 #if !defined(CONFIG_USER_ONLY)
865 /* Callback used to write the exception vector base */
866 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
867 {
868     TCGv t0 = tcg_temp_new();
869     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
870     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
871     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
872     gen_store_spr(sprn, t0);
873 }
874 
875 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
876 {
877     int sprn_offs;
878 
879     if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
880         sprn_offs = sprn - SPR_BOOKE_IVOR0;
881     } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
882         sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
883     } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
884         sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
885     } else {
886         qemu_log_mask(LOG_GUEST_ERROR, "Trying to write an unknown exception"
887                       " vector 0x%03x\n", sprn);
888         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
889         return;
890     }
891 
892     TCGv t0 = tcg_temp_new();
893     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
894     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
895     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
896     gen_store_spr(sprn, t0);
897 }
898 #endif
899 
900 #ifdef TARGET_PPC64
901 #ifndef CONFIG_USER_ONLY
902 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
903 {
904     TCGv t0 = tcg_temp_new();
905     TCGv t1 = tcg_temp_new();
906     TCGv t2 = tcg_temp_new();
907 
908     /*
909      * Note, the HV=1 PR=0 case is handled earlier by simply using
910      * spr_write_generic for HV mode in the SPR table
911      */
912 
913     /* Build insertion mask into t1 based on context */
914     if (ctx->pr) {
915         gen_load_spr(t1, SPR_UAMOR);
916     } else {
917         gen_load_spr(t1, SPR_AMOR);
918     }
919 
920     /* Mask new bits into t2 */
921     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
922 
923     /* Load AMR and clear new bits in t0 */
924     gen_load_spr(t0, SPR_AMR);
925     tcg_gen_andc_tl(t0, t0, t1);
926 
927     /* Or'in new bits and write it out */
928     tcg_gen_or_tl(t0, t0, t2);
929     gen_store_spr(SPR_AMR, t0);
930     spr_store_dump_spr(SPR_AMR);
931 }
932 
933 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
934 {
935     TCGv t0 = tcg_temp_new();
936     TCGv t1 = tcg_temp_new();
937     TCGv t2 = tcg_temp_new();
938 
939     /*
940      * Note, the HV=1 case is handled earlier by simply using
941      * spr_write_generic for HV mode in the SPR table
942      */
943 
944     /* Build insertion mask into t1 based on context */
945     gen_load_spr(t1, SPR_AMOR);
946 
947     /* Mask new bits into t2 */
948     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
949 
950     /* Load AMR and clear new bits in t0 */
951     gen_load_spr(t0, SPR_UAMOR);
952     tcg_gen_andc_tl(t0, t0, t1);
953 
954     /* Or'in new bits and write it out */
955     tcg_gen_or_tl(t0, t0, t2);
956     gen_store_spr(SPR_UAMOR, t0);
957     spr_store_dump_spr(SPR_UAMOR);
958 }
959 
960 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
961 {
962     TCGv t0 = tcg_temp_new();
963     TCGv t1 = tcg_temp_new();
964     TCGv t2 = tcg_temp_new();
965 
966     /*
967      * Note, the HV=1 case is handled earlier by simply using
968      * spr_write_generic for HV mode in the SPR table
969      */
970 
971     /* Build insertion mask into t1 based on context */
972     gen_load_spr(t1, SPR_AMOR);
973 
974     /* Mask new bits into t2 */
975     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
976 
977     /* Load AMR and clear new bits in t0 */
978     gen_load_spr(t0, SPR_IAMR);
979     tcg_gen_andc_tl(t0, t0, t1);
980 
981     /* Or'in new bits and write it out */
982     tcg_gen_or_tl(t0, t0, t2);
983     gen_store_spr(SPR_IAMR, t0);
984     spr_store_dump_spr(SPR_IAMR);
985 }
986 #endif
987 #endif
988 
989 #ifndef CONFIG_USER_ONLY
990 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
991 {
992     gen_helper_fixup_thrm(cpu_env);
993     gen_load_spr(cpu_gpr[gprn], sprn);
994     spr_load_dump_spr(sprn);
995 }
996 #endif /* !CONFIG_USER_ONLY */
997 
998 #if !defined(CONFIG_USER_ONLY)
999 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1000 {
1001     TCGv t0 = tcg_temp_new();
1002 
1003     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1004     gen_store_spr(sprn, t0);
1005 }
1006 
1007 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1008 {
1009     TCGv t0 = tcg_temp_new();
1010 
1011     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1012     gen_store_spr(sprn, t0);
1013 }
1014 
1015 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1016 {
1017     TCGv t0 = tcg_temp_new();
1018 
1019     tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1020                     ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1021     gen_store_spr(sprn, t0);
1022 }
1023 
1024 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1025 {
1026     gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1027 }
1028 
1029 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1030 {
1031     TCGv_i32 t0 = tcg_constant_i32(sprn);
1032     gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1033 }
1034 
1035 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1036 {
1037     gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1038 }
1039 
1040 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1041 {
1042     gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1043 }
1044 
1045 #endif
1046 
1047 #if !defined(CONFIG_USER_ONLY)
1048 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1049 {
1050     TCGv val = tcg_temp_new();
1051     tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1052     gen_store_spr(SPR_BOOKE_MAS3, val);
1053     tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1054     gen_store_spr(SPR_BOOKE_MAS7, val);
1055 }
1056 
1057 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1058 {
1059     TCGv mas7 = tcg_temp_new();
1060     TCGv mas3 = tcg_temp_new();
1061     gen_load_spr(mas7, SPR_BOOKE_MAS7);
1062     tcg_gen_shli_tl(mas7, mas7, 32);
1063     gen_load_spr(mas3, SPR_BOOKE_MAS3);
1064     tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1065 }
1066 
1067 #endif
1068 
1069 #ifdef TARGET_PPC64
1070 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1071                                     int bit, int sprn, int cause)
1072 {
1073     TCGv_i32 t1 = tcg_constant_i32(bit);
1074     TCGv_i32 t2 = tcg_constant_i32(sprn);
1075     TCGv_i32 t3 = tcg_constant_i32(cause);
1076 
1077     gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1078 }
1079 
1080 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1081                                    int bit, int sprn, int cause)
1082 {
1083     TCGv_i32 t1 = tcg_constant_i32(bit);
1084     TCGv_i32 t2 = tcg_constant_i32(sprn);
1085     TCGv_i32 t3 = tcg_constant_i32(cause);
1086 
1087     gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1088 }
1089 
1090 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1091 {
1092     TCGv spr_up = tcg_temp_new();
1093     TCGv spr = tcg_temp_new();
1094 
1095     gen_load_spr(spr, sprn - 1);
1096     tcg_gen_shri_tl(spr_up, spr, 32);
1097     tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1098 }
1099 
1100 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1101 {
1102     TCGv spr = tcg_temp_new();
1103 
1104     gen_load_spr(spr, sprn - 1);
1105     tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1106     gen_store_spr(sprn - 1, spr);
1107 }
1108 
1109 #if !defined(CONFIG_USER_ONLY)
1110 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1111 {
1112     TCGv hmer = tcg_temp_new();
1113 
1114     gen_load_spr(hmer, sprn);
1115     tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1116     gen_store_spr(sprn, hmer);
1117     spr_store_dump_spr(sprn);
1118 }
1119 
1120 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1121 {
1122     gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1123 }
1124 #endif /* !defined(CONFIG_USER_ONLY) */
1125 
1126 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1127 {
1128     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1129     spr_read_generic(ctx, gprn, sprn);
1130 }
1131 
1132 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1133 {
1134     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1135     spr_write_generic(ctx, sprn, gprn);
1136 }
1137 
1138 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1139 {
1140     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1141     spr_read_generic(ctx, gprn, sprn);
1142 }
1143 
1144 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1145 {
1146     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1147     spr_write_generic(ctx, sprn, gprn);
1148 }
1149 
1150 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1151 {
1152     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1153     spr_read_prev_upper32(ctx, gprn, sprn);
1154 }
1155 
1156 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1157 {
1158     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1159     spr_write_prev_upper32(ctx, sprn, gprn);
1160 }
1161 
1162 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1163 {
1164     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1165     spr_read_generic(ctx, gprn, sprn);
1166 }
1167 
1168 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1169 {
1170     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1171     spr_write_generic(ctx, sprn, gprn);
1172 }
1173 
1174 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1175 {
1176     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1177     spr_read_prev_upper32(ctx, gprn, sprn);
1178 }
1179 
1180 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1181 {
1182     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1183     spr_write_prev_upper32(ctx, sprn, gprn);
1184 }
1185 
1186 void spr_read_dexcr_ureg(DisasContext *ctx, int gprn, int sprn)
1187 {
1188     TCGv t0 = tcg_temp_new();
1189 
1190     /*
1191      * Access to the (H)DEXCR in problem state is done using separated
1192      * SPR indexes which are 16 below the SPR indexes which have full
1193      * access to the (H)DEXCR in privileged state. Problem state can
1194      * only read bits 32:63, bits 0:31 return 0.
1195      *
1196      * See section 9.3.1-9.3.2 of PowerISA v3.1B
1197      */
1198 
1199     gen_load_spr(t0, sprn + 16);
1200     tcg_gen_ext32u_tl(cpu_gpr[gprn], t0);
1201 }
1202 #endif
1203 
1204 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type)                      \
1205 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1206 
1207 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2)             \
1208 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1209 
1210 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type)               \
1211 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1212 
1213 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2)      \
1214 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1215 
1216 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2)     \
1217 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1218 
1219 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1220 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1221 
1222 typedef struct opcode_t {
1223     unsigned char opc1, opc2, opc3, opc4;
1224 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1225     unsigned char pad[4];
1226 #endif
1227     opc_handler_t handler;
1228     const char *oname;
1229 } opcode_t;
1230 
1231 static void gen_priv_opc(DisasContext *ctx)
1232 {
1233     gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
1234 }
1235 
1236 /* Helpers for priv. check */
1237 #define GEN_PRIV(CTX)              \
1238     do {                           \
1239         gen_priv_opc(CTX); return; \
1240     } while (0)
1241 
1242 #if defined(CONFIG_USER_ONLY)
1243 #define CHK_HV(CTX) GEN_PRIV(CTX)
1244 #define CHK_SV(CTX) GEN_PRIV(CTX)
1245 #define CHK_HVRM(CTX) GEN_PRIV(CTX)
1246 #else
1247 #define CHK_HV(CTX)                         \
1248     do {                                    \
1249         if (unlikely(ctx->pr || !ctx->hv)) {\
1250             GEN_PRIV(CTX);                  \
1251         }                                   \
1252     } while (0)
1253 #define CHK_SV(CTX)              \
1254     do {                         \
1255         if (unlikely(ctx->pr)) { \
1256             GEN_PRIV(CTX);       \
1257         }                        \
1258     } while (0)
1259 #define CHK_HVRM(CTX)                                   \
1260     do {                                                \
1261         if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1262             GEN_PRIV(CTX);                              \
1263         }                                               \
1264     } while (0)
1265 #endif
1266 
1267 #define CHK_NONE(CTX)
1268 
1269 /*****************************************************************************/
1270 /* PowerPC instructions table                                                */
1271 
1272 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2)                    \
1273 {                                                                             \
1274     .opc1 = op1,                                                              \
1275     .opc2 = op2,                                                              \
1276     .opc3 = op3,                                                              \
1277     .opc4 = 0xff,                                                             \
1278     .handler = {                                                              \
1279         .inval1  = invl,                                                      \
1280         .type = _typ,                                                         \
1281         .type2 = _typ2,                                                       \
1282         .handler = &gen_##name,                                               \
1283     },                                                                        \
1284     .oname = stringify(name),                                                 \
1285 }
1286 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2)       \
1287 {                                                                             \
1288     .opc1 = op1,                                                              \
1289     .opc2 = op2,                                                              \
1290     .opc3 = op3,                                                              \
1291     .opc4 = 0xff,                                                             \
1292     .handler = {                                                              \
1293         .inval1  = invl1,                                                     \
1294         .inval2  = invl2,                                                     \
1295         .type = _typ,                                                         \
1296         .type2 = _typ2,                                                       \
1297         .handler = &gen_##name,                                               \
1298     },                                                                        \
1299     .oname = stringify(name),                                                 \
1300 }
1301 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2)             \
1302 {                                                                             \
1303     .opc1 = op1,                                                              \
1304     .opc2 = op2,                                                              \
1305     .opc3 = op3,                                                              \
1306     .opc4 = 0xff,                                                             \
1307     .handler = {                                                              \
1308         .inval1  = invl,                                                      \
1309         .type = _typ,                                                         \
1310         .type2 = _typ2,                                                       \
1311         .handler = &gen_##name,                                               \
1312     },                                                                        \
1313     .oname = onam,                                                            \
1314 }
1315 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2)              \
1316 {                                                                             \
1317     .opc1 = op1,                                                              \
1318     .opc2 = op2,                                                              \
1319     .opc3 = op3,                                                              \
1320     .opc4 = op4,                                                              \
1321     .handler = {                                                              \
1322         .inval1  = invl,                                                      \
1323         .type = _typ,                                                         \
1324         .type2 = _typ2,                                                       \
1325         .handler = &gen_##name,                                               \
1326     },                                                                        \
1327     .oname = stringify(name),                                                 \
1328 }
1329 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2)        \
1330 {                                                                             \
1331     .opc1 = op1,                                                              \
1332     .opc2 = op2,                                                              \
1333     .opc3 = op3,                                                              \
1334     .opc4 = op4,                                                              \
1335     .handler = {                                                              \
1336         .inval1  = invl,                                                      \
1337         .type = _typ,                                                         \
1338         .type2 = _typ2,                                                       \
1339         .handler = &gen_##name,                                               \
1340     },                                                                        \
1341     .oname = onam,                                                            \
1342 }
1343 
1344 /* Invalid instruction */
1345 static void gen_invalid(DisasContext *ctx)
1346 {
1347     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1348 }
1349 
1350 static opc_handler_t invalid_handler = {
1351     .inval1  = 0xFFFFFFFF,
1352     .inval2  = 0xFFFFFFFF,
1353     .type    = PPC_NONE,
1354     .type2   = PPC_NONE,
1355     .handler = gen_invalid,
1356 };
1357 
1358 /***                           Integer comparison                          ***/
1359 
1360 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1361 {
1362     TCGv t0 = tcg_temp_new();
1363     TCGv t1 = tcg_temp_new();
1364     TCGv_i32 t = tcg_temp_new_i32();
1365 
1366     tcg_gen_movi_tl(t0, CRF_EQ);
1367     tcg_gen_movi_tl(t1, CRF_LT);
1368     tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1369                        t0, arg0, arg1, t1, t0);
1370     tcg_gen_movi_tl(t1, CRF_GT);
1371     tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1372                        t0, arg0, arg1, t1, t0);
1373 
1374     tcg_gen_trunc_tl_i32(t, t0);
1375     tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1376     tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1377 }
1378 
1379 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1380 {
1381     TCGv t0 = tcg_constant_tl(arg1);
1382     gen_op_cmp(arg0, t0, s, crf);
1383 }
1384 
1385 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1386 {
1387     TCGv t0, t1;
1388     t0 = tcg_temp_new();
1389     t1 = tcg_temp_new();
1390     if (s) {
1391         tcg_gen_ext32s_tl(t0, arg0);
1392         tcg_gen_ext32s_tl(t1, arg1);
1393     } else {
1394         tcg_gen_ext32u_tl(t0, arg0);
1395         tcg_gen_ext32u_tl(t1, arg1);
1396     }
1397     gen_op_cmp(t0, t1, s, crf);
1398 }
1399 
1400 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1401 {
1402     TCGv t0 = tcg_constant_tl(arg1);
1403     gen_op_cmp32(arg0, t0, s, crf);
1404 }
1405 
1406 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1407 {
1408     if (NARROW_MODE(ctx)) {
1409         gen_op_cmpi32(reg, 0, 1, 0);
1410     } else {
1411         gen_op_cmpi(reg, 0, 1, 0);
1412     }
1413 }
1414 
1415 /* cmprb - range comparison: isupper, isaplha, islower*/
1416 static void gen_cmprb(DisasContext *ctx)
1417 {
1418     TCGv_i32 src1 = tcg_temp_new_i32();
1419     TCGv_i32 src2 = tcg_temp_new_i32();
1420     TCGv_i32 src2lo = tcg_temp_new_i32();
1421     TCGv_i32 src2hi = tcg_temp_new_i32();
1422     TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1423 
1424     tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1425     tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1426 
1427     tcg_gen_andi_i32(src1, src1, 0xFF);
1428     tcg_gen_ext8u_i32(src2lo, src2);
1429     tcg_gen_shri_i32(src2, src2, 8);
1430     tcg_gen_ext8u_i32(src2hi, src2);
1431 
1432     tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1433     tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1434     tcg_gen_and_i32(crf, src2lo, src2hi);
1435 
1436     if (ctx->opcode & 0x00200000) {
1437         tcg_gen_shri_i32(src2, src2, 8);
1438         tcg_gen_ext8u_i32(src2lo, src2);
1439         tcg_gen_shri_i32(src2, src2, 8);
1440         tcg_gen_ext8u_i32(src2hi, src2);
1441         tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1442         tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1443         tcg_gen_and_i32(src2lo, src2lo, src2hi);
1444         tcg_gen_or_i32(crf, crf, src2lo);
1445     }
1446     tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1447 }
1448 
1449 #if defined(TARGET_PPC64)
1450 /* cmpeqb */
1451 static void gen_cmpeqb(DisasContext *ctx)
1452 {
1453     gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1454                       cpu_gpr[rB(ctx->opcode)]);
1455 }
1456 #endif
1457 
1458 /* isel (PowerPC 2.03 specification) */
1459 static void gen_isel(DisasContext *ctx)
1460 {
1461     uint32_t bi = rC(ctx->opcode);
1462     uint32_t mask = 0x08 >> (bi & 0x03);
1463     TCGv t0 = tcg_temp_new();
1464     TCGv zr;
1465 
1466     tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1467     tcg_gen_andi_tl(t0, t0, mask);
1468 
1469     zr = tcg_constant_tl(0);
1470     tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1471                        rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1472                        cpu_gpr[rB(ctx->opcode)]);
1473 }
1474 
1475 /* cmpb: PowerPC 2.05 specification */
1476 static void gen_cmpb(DisasContext *ctx)
1477 {
1478     gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1479                     cpu_gpr[rB(ctx->opcode)]);
1480 }
1481 
1482 /***                           Integer arithmetic                          ***/
1483 
1484 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1485                                            TCGv arg1, TCGv arg2, int sub)
1486 {
1487     TCGv t0 = tcg_temp_new();
1488 
1489     tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1490     tcg_gen_xor_tl(t0, arg1, arg2);
1491     if (sub) {
1492         tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1493     } else {
1494         tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1495     }
1496     if (NARROW_MODE(ctx)) {
1497         tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1498         if (is_isa300(ctx)) {
1499             tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1500         }
1501     } else {
1502         if (is_isa300(ctx)) {
1503             tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1504         }
1505         tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1506     }
1507     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1508 }
1509 
1510 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1511                                              TCGv res, TCGv arg0, TCGv arg1,
1512                                              TCGv ca32, int sub)
1513 {
1514     TCGv t0;
1515 
1516     if (!is_isa300(ctx)) {
1517         return;
1518     }
1519 
1520     t0 = tcg_temp_new();
1521     if (sub) {
1522         tcg_gen_eqv_tl(t0, arg0, arg1);
1523     } else {
1524         tcg_gen_xor_tl(t0, arg0, arg1);
1525     }
1526     tcg_gen_xor_tl(t0, t0, res);
1527     tcg_gen_extract_tl(ca32, t0, 32, 1);
1528 }
1529 
1530 /* Common add function */
1531 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1532                                     TCGv arg2, TCGv ca, TCGv ca32,
1533                                     bool add_ca, bool compute_ca,
1534                                     bool compute_ov, bool compute_rc0)
1535 {
1536     TCGv t0 = ret;
1537 
1538     if (compute_ca || compute_ov) {
1539         t0 = tcg_temp_new();
1540     }
1541 
1542     if (compute_ca) {
1543         if (NARROW_MODE(ctx)) {
1544             /*
1545              * Caution: a non-obvious corner case of the spec is that
1546              * we must produce the *entire* 64-bit addition, but
1547              * produce the carry into bit 32.
1548              */
1549             TCGv t1 = tcg_temp_new();
1550             tcg_gen_xor_tl(t1, arg1, arg2);        /* add without carry */
1551             tcg_gen_add_tl(t0, arg1, arg2);
1552             if (add_ca) {
1553                 tcg_gen_add_tl(t0, t0, ca);
1554             }
1555             tcg_gen_xor_tl(ca, t0, t1);        /* bits changed w/ carry */
1556             tcg_gen_extract_tl(ca, ca, 32, 1);
1557             if (is_isa300(ctx)) {
1558                 tcg_gen_mov_tl(ca32, ca);
1559             }
1560         } else {
1561             TCGv zero = tcg_constant_tl(0);
1562             if (add_ca) {
1563                 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1564                 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1565             } else {
1566                 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1567             }
1568             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1569         }
1570     } else {
1571         tcg_gen_add_tl(t0, arg1, arg2);
1572         if (add_ca) {
1573             tcg_gen_add_tl(t0, t0, ca);
1574         }
1575     }
1576 
1577     if (compute_ov) {
1578         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1579     }
1580     if (unlikely(compute_rc0)) {
1581         gen_set_Rc0(ctx, t0);
1582     }
1583 
1584     if (t0 != ret) {
1585         tcg_gen_mov_tl(ret, t0);
1586     }
1587 }
1588 /* Add functions with two operands */
1589 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov)     \
1590 static void glue(gen_, name)(DisasContext *ctx)                               \
1591 {                                                                             \
1592     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1593                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1594                      ca, glue(ca, 32),                                        \
1595                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1596 }
1597 /* Add functions with one operand and one immediate */
1598 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca,                    \
1599                                 add_ca, compute_ca, compute_ov)               \
1600 static void glue(gen_, name)(DisasContext *ctx)                               \
1601 {                                                                             \
1602     TCGv t0 = tcg_constant_tl(const_val);                                     \
1603     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1604                      cpu_gpr[rA(ctx->opcode)], t0,                            \
1605                      ca, glue(ca, 32),                                        \
1606                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1607 }
1608 
1609 /* add  add.  addo  addo. */
1610 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1611 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1612 /* addc  addc.  addco  addco. */
1613 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1614 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1615 /* adde  adde.  addeo  addeo. */
1616 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1617 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1618 /* addme  addme.  addmeo  addmeo.  */
1619 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1620 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1621 /* addex */
1622 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1623 /* addze  addze.  addzeo  addzeo.*/
1624 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1625 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1626 /* addic  addic.*/
1627 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1628 {
1629     TCGv c = tcg_constant_tl(SIMM(ctx->opcode));
1630     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1631                      c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1632 }
1633 
1634 static void gen_addic(DisasContext *ctx)
1635 {
1636     gen_op_addic(ctx, 0);
1637 }
1638 
1639 static void gen_addic_(DisasContext *ctx)
1640 {
1641     gen_op_addic(ctx, 1);
1642 }
1643 
1644 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1645                                      TCGv arg2, int sign, int compute_ov)
1646 {
1647     TCGv_i32 t0 = tcg_temp_new_i32();
1648     TCGv_i32 t1 = tcg_temp_new_i32();
1649     TCGv_i32 t2 = tcg_temp_new_i32();
1650     TCGv_i32 t3 = tcg_temp_new_i32();
1651 
1652     tcg_gen_trunc_tl_i32(t0, arg1);
1653     tcg_gen_trunc_tl_i32(t1, arg2);
1654     if (sign) {
1655         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1656         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1657         tcg_gen_and_i32(t2, t2, t3);
1658         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1659         tcg_gen_or_i32(t2, t2, t3);
1660         tcg_gen_movi_i32(t3, 0);
1661         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1662         tcg_gen_div_i32(t3, t0, t1);
1663         tcg_gen_extu_i32_tl(ret, t3);
1664     } else {
1665         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1666         tcg_gen_movi_i32(t3, 0);
1667         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1668         tcg_gen_divu_i32(t3, t0, t1);
1669         tcg_gen_extu_i32_tl(ret, t3);
1670     }
1671     if (compute_ov) {
1672         tcg_gen_extu_i32_tl(cpu_ov, t2);
1673         if (is_isa300(ctx)) {
1674             tcg_gen_extu_i32_tl(cpu_ov32, t2);
1675         }
1676         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1677     }
1678 
1679     if (unlikely(Rc(ctx->opcode) != 0)) {
1680         gen_set_Rc0(ctx, ret);
1681     }
1682 }
1683 /* Div functions */
1684 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
1685 static void glue(gen_, name)(DisasContext *ctx)                               \
1686 {                                                                             \
1687     gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1688                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1689                      sign, compute_ov);                                       \
1690 }
1691 /* divwu  divwu.  divwuo  divwuo.   */
1692 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1693 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1694 /* divw  divw.  divwo  divwo.   */
1695 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1696 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1697 
1698 /* div[wd]eu[o][.] */
1699 #define GEN_DIVE(name, hlpr, compute_ov)                                      \
1700 static void gen_##name(DisasContext *ctx)                                     \
1701 {                                                                             \
1702     TCGv_i32 t0 = tcg_constant_i32(compute_ov);                               \
1703     gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env,                      \
1704                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1705     if (unlikely(Rc(ctx->opcode) != 0)) {                                     \
1706         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);                           \
1707     }                                                                         \
1708 }
1709 
1710 GEN_DIVE(divweu, divweu, 0);
1711 GEN_DIVE(divweuo, divweu, 1);
1712 GEN_DIVE(divwe, divwe, 0);
1713 GEN_DIVE(divweo, divwe, 1);
1714 
1715 #if defined(TARGET_PPC64)
1716 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1717                                      TCGv arg2, int sign, int compute_ov)
1718 {
1719     TCGv_i64 t0 = tcg_temp_new_i64();
1720     TCGv_i64 t1 = tcg_temp_new_i64();
1721     TCGv_i64 t2 = tcg_temp_new_i64();
1722     TCGv_i64 t3 = tcg_temp_new_i64();
1723 
1724     tcg_gen_mov_i64(t0, arg1);
1725     tcg_gen_mov_i64(t1, arg2);
1726     if (sign) {
1727         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1728         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1729         tcg_gen_and_i64(t2, t2, t3);
1730         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1731         tcg_gen_or_i64(t2, t2, t3);
1732         tcg_gen_movi_i64(t3, 0);
1733         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1734         tcg_gen_div_i64(ret, t0, t1);
1735     } else {
1736         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1737         tcg_gen_movi_i64(t3, 0);
1738         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1739         tcg_gen_divu_i64(ret, t0, t1);
1740     }
1741     if (compute_ov) {
1742         tcg_gen_mov_tl(cpu_ov, t2);
1743         if (is_isa300(ctx)) {
1744             tcg_gen_mov_tl(cpu_ov32, t2);
1745         }
1746         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1747     }
1748 
1749     if (unlikely(Rc(ctx->opcode) != 0)) {
1750         gen_set_Rc0(ctx, ret);
1751     }
1752 }
1753 
1754 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
1755 static void glue(gen_, name)(DisasContext *ctx)                               \
1756 {                                                                             \
1757     gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1758                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
1759                       sign, compute_ov);                                      \
1760 }
1761 /* divdu  divdu.  divduo  divduo.   */
1762 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1763 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1764 /* divd  divd.  divdo  divdo.   */
1765 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1766 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1767 
1768 GEN_DIVE(divdeu, divdeu, 0);
1769 GEN_DIVE(divdeuo, divdeu, 1);
1770 GEN_DIVE(divde, divde, 0);
1771 GEN_DIVE(divdeo, divde, 1);
1772 #endif
1773 
1774 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1775                                      TCGv arg2, int sign)
1776 {
1777     TCGv_i32 t0 = tcg_temp_new_i32();
1778     TCGv_i32 t1 = tcg_temp_new_i32();
1779 
1780     tcg_gen_trunc_tl_i32(t0, arg1);
1781     tcg_gen_trunc_tl_i32(t1, arg2);
1782     if (sign) {
1783         TCGv_i32 t2 = tcg_temp_new_i32();
1784         TCGv_i32 t3 = tcg_temp_new_i32();
1785         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1786         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1787         tcg_gen_and_i32(t2, t2, t3);
1788         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1789         tcg_gen_or_i32(t2, t2, t3);
1790         tcg_gen_movi_i32(t3, 0);
1791         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1792         tcg_gen_rem_i32(t3, t0, t1);
1793         tcg_gen_ext_i32_tl(ret, t3);
1794     } else {
1795         TCGv_i32 t2 = tcg_constant_i32(1);
1796         TCGv_i32 t3 = tcg_constant_i32(0);
1797         tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1798         tcg_gen_remu_i32(t0, t0, t1);
1799         tcg_gen_extu_i32_tl(ret, t0);
1800     }
1801 }
1802 
1803 #define GEN_INT_ARITH_MODW(name, opc3, sign)                                \
1804 static void glue(gen_, name)(DisasContext *ctx)                             \
1805 {                                                                           \
1806     gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1807                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1808                       sign);                                                \
1809 }
1810 
1811 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
1812 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
1813 
1814 #if defined(TARGET_PPC64)
1815 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1816                                      TCGv arg2, int sign)
1817 {
1818     TCGv_i64 t0 = tcg_temp_new_i64();
1819     TCGv_i64 t1 = tcg_temp_new_i64();
1820 
1821     tcg_gen_mov_i64(t0, arg1);
1822     tcg_gen_mov_i64(t1, arg2);
1823     if (sign) {
1824         TCGv_i64 t2 = tcg_temp_new_i64();
1825         TCGv_i64 t3 = tcg_temp_new_i64();
1826         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1827         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1828         tcg_gen_and_i64(t2, t2, t3);
1829         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1830         tcg_gen_or_i64(t2, t2, t3);
1831         tcg_gen_movi_i64(t3, 0);
1832         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1833         tcg_gen_rem_i64(ret, t0, t1);
1834     } else {
1835         TCGv_i64 t2 = tcg_constant_i64(1);
1836         TCGv_i64 t3 = tcg_constant_i64(0);
1837         tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1838         tcg_gen_remu_i64(ret, t0, t1);
1839     }
1840 }
1841 
1842 #define GEN_INT_ARITH_MODD(name, opc3, sign)                            \
1843 static void glue(gen_, name)(DisasContext *ctx)                           \
1844 {                                                                         \
1845   gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1846                     cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1847                     sign);                                                \
1848 }
1849 
1850 GEN_INT_ARITH_MODD(modud, 0x08, 0);
1851 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
1852 #endif
1853 
1854 /* mulhw  mulhw. */
1855 static void gen_mulhw(DisasContext *ctx)
1856 {
1857     TCGv_i32 t0 = tcg_temp_new_i32();
1858     TCGv_i32 t1 = tcg_temp_new_i32();
1859 
1860     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1861     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1862     tcg_gen_muls2_i32(t0, t1, t0, t1);
1863     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1864     if (unlikely(Rc(ctx->opcode) != 0)) {
1865         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1866     }
1867 }
1868 
1869 /* mulhwu  mulhwu.  */
1870 static void gen_mulhwu(DisasContext *ctx)
1871 {
1872     TCGv_i32 t0 = tcg_temp_new_i32();
1873     TCGv_i32 t1 = tcg_temp_new_i32();
1874 
1875     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1876     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1877     tcg_gen_mulu2_i32(t0, t1, t0, t1);
1878     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1879     if (unlikely(Rc(ctx->opcode) != 0)) {
1880         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1881     }
1882 }
1883 
1884 /* mullw  mullw. */
1885 static void gen_mullw(DisasContext *ctx)
1886 {
1887 #if defined(TARGET_PPC64)
1888     TCGv_i64 t0, t1;
1889     t0 = tcg_temp_new_i64();
1890     t1 = tcg_temp_new_i64();
1891     tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1892     tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1893     tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
1894 #else
1895     tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1896                     cpu_gpr[rB(ctx->opcode)]);
1897 #endif
1898     if (unlikely(Rc(ctx->opcode) != 0)) {
1899         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1900     }
1901 }
1902 
1903 /* mullwo  mullwo. */
1904 static void gen_mullwo(DisasContext *ctx)
1905 {
1906     TCGv_i32 t0 = tcg_temp_new_i32();
1907     TCGv_i32 t1 = tcg_temp_new_i32();
1908 
1909     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1910     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1911     tcg_gen_muls2_i32(t0, t1, t0, t1);
1912 #if defined(TARGET_PPC64)
1913     tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
1914 #else
1915     tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
1916 #endif
1917 
1918     tcg_gen_sari_i32(t0, t0, 31);
1919     tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
1920     tcg_gen_extu_i32_tl(cpu_ov, t0);
1921     if (is_isa300(ctx)) {
1922         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1923     }
1924     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1925 
1926     if (unlikely(Rc(ctx->opcode) != 0)) {
1927         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1928     }
1929 }
1930 
1931 /* mulli */
1932 static void gen_mulli(DisasContext *ctx)
1933 {
1934     tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1935                     SIMM(ctx->opcode));
1936 }
1937 
1938 #if defined(TARGET_PPC64)
1939 /* mulhd  mulhd. */
1940 static void gen_mulhd(DisasContext *ctx)
1941 {
1942     TCGv lo = tcg_temp_new();
1943     tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1944                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1945     if (unlikely(Rc(ctx->opcode) != 0)) {
1946         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1947     }
1948 }
1949 
1950 /* mulhdu  mulhdu. */
1951 static void gen_mulhdu(DisasContext *ctx)
1952 {
1953     TCGv lo = tcg_temp_new();
1954     tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1955                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1956     if (unlikely(Rc(ctx->opcode) != 0)) {
1957         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1958     }
1959 }
1960 
1961 /* mulld  mulld. */
1962 static void gen_mulld(DisasContext *ctx)
1963 {
1964     tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1965                    cpu_gpr[rB(ctx->opcode)]);
1966     if (unlikely(Rc(ctx->opcode) != 0)) {
1967         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1968     }
1969 }
1970 
1971 /* mulldo  mulldo. */
1972 static void gen_mulldo(DisasContext *ctx)
1973 {
1974     TCGv_i64 t0 = tcg_temp_new_i64();
1975     TCGv_i64 t1 = tcg_temp_new_i64();
1976 
1977     tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
1978                       cpu_gpr[rB(ctx->opcode)]);
1979     tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
1980 
1981     tcg_gen_sari_i64(t0, t0, 63);
1982     tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
1983     if (is_isa300(ctx)) {
1984         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1985     }
1986     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1987 
1988     if (unlikely(Rc(ctx->opcode) != 0)) {
1989         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1990     }
1991 }
1992 #endif
1993 
1994 /* Common subf function */
1995 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1996                                      TCGv arg2, bool add_ca, bool compute_ca,
1997                                      bool compute_ov, bool compute_rc0)
1998 {
1999     TCGv t0 = ret;
2000 
2001     if (compute_ca || compute_ov) {
2002         t0 = tcg_temp_new();
2003     }
2004 
2005     if (compute_ca) {
2006         /* dest = ~arg1 + arg2 [+ ca].  */
2007         if (NARROW_MODE(ctx)) {
2008             /*
2009              * Caution: a non-obvious corner case of the spec is that
2010              * we must produce the *entire* 64-bit addition, but
2011              * produce the carry into bit 32.
2012              */
2013             TCGv inv1 = tcg_temp_new();
2014             TCGv t1 = tcg_temp_new();
2015             tcg_gen_not_tl(inv1, arg1);
2016             if (add_ca) {
2017                 tcg_gen_add_tl(t0, arg2, cpu_ca);
2018             } else {
2019                 tcg_gen_addi_tl(t0, arg2, 1);
2020             }
2021             tcg_gen_xor_tl(t1, arg2, inv1);         /* add without carry */
2022             tcg_gen_add_tl(t0, t0, inv1);
2023             tcg_gen_xor_tl(cpu_ca, t0, t1);         /* bits changes w/ carry */
2024             tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2025             if (is_isa300(ctx)) {
2026                 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2027             }
2028         } else if (add_ca) {
2029             TCGv zero, inv1 = tcg_temp_new();
2030             tcg_gen_not_tl(inv1, arg1);
2031             zero = tcg_constant_tl(0);
2032             tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2033             tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2034             gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2035         } else {
2036             tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2037             tcg_gen_sub_tl(t0, arg2, arg1);
2038             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2039         }
2040     } else if (add_ca) {
2041         /*
2042          * Since we're ignoring carry-out, we can simplify the
2043          * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2044          */
2045         tcg_gen_sub_tl(t0, arg2, arg1);
2046         tcg_gen_add_tl(t0, t0, cpu_ca);
2047         tcg_gen_subi_tl(t0, t0, 1);
2048     } else {
2049         tcg_gen_sub_tl(t0, arg2, arg1);
2050     }
2051 
2052     if (compute_ov) {
2053         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2054     }
2055     if (unlikely(compute_rc0)) {
2056         gen_set_Rc0(ctx, t0);
2057     }
2058 
2059     if (t0 != ret) {
2060         tcg_gen_mov_tl(ret, t0);
2061     }
2062 }
2063 /* Sub functions with Two operands functions */
2064 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
2065 static void glue(gen_, name)(DisasContext *ctx)                               \
2066 {                                                                             \
2067     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2068                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
2069                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2070 }
2071 /* Sub functions with one operand and one immediate */
2072 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
2073                                 add_ca, compute_ca, compute_ov)               \
2074 static void glue(gen_, name)(DisasContext *ctx)                               \
2075 {                                                                             \
2076     TCGv t0 = tcg_constant_tl(const_val);                                     \
2077     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2078                       cpu_gpr[rA(ctx->opcode)], t0,                           \
2079                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2080 }
2081 /* subf  subf.  subfo  subfo. */
2082 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2083 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2084 /* subfc  subfc.  subfco  subfco. */
2085 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2086 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2087 /* subfe  subfe.  subfeo  subfo. */
2088 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2089 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2090 /* subfme  subfme.  subfmeo  subfmeo.  */
2091 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2092 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2093 /* subfze  subfze.  subfzeo  subfzeo.*/
2094 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2095 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2096 
2097 /* subfic */
2098 static void gen_subfic(DisasContext *ctx)
2099 {
2100     TCGv c = tcg_constant_tl(SIMM(ctx->opcode));
2101     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2102                       c, 0, 1, 0, 0);
2103 }
2104 
2105 /* neg neg. nego nego. */
2106 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2107 {
2108     TCGv zero = tcg_constant_tl(0);
2109     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2110                       zero, 0, 0, compute_ov, Rc(ctx->opcode));
2111 }
2112 
2113 static void gen_neg(DisasContext *ctx)
2114 {
2115     tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2116     if (unlikely(Rc(ctx->opcode))) {
2117         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2118     }
2119 }
2120 
2121 static void gen_nego(DisasContext *ctx)
2122 {
2123     gen_op_arith_neg(ctx, 1);
2124 }
2125 
2126 /***                            Integer logical                            ***/
2127 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
2128 static void glue(gen_, name)(DisasContext *ctx)                               \
2129 {                                                                             \
2130     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],                \
2131        cpu_gpr[rB(ctx->opcode)]);                                             \
2132     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2133         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2134 }
2135 
2136 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
2137 static void glue(gen_, name)(DisasContext *ctx)                               \
2138 {                                                                             \
2139     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);               \
2140     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2141         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2142 }
2143 
2144 /* and & and. */
2145 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2146 /* andc & andc. */
2147 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2148 
2149 /* andi. */
2150 static void gen_andi_(DisasContext *ctx)
2151 {
2152     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2153                     UIMM(ctx->opcode));
2154     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2155 }
2156 
2157 /* andis. */
2158 static void gen_andis_(DisasContext *ctx)
2159 {
2160     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2161                     UIMM(ctx->opcode) << 16);
2162     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2163 }
2164 
2165 /* cntlzw */
2166 static void gen_cntlzw(DisasContext *ctx)
2167 {
2168     TCGv_i32 t = tcg_temp_new_i32();
2169 
2170     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2171     tcg_gen_clzi_i32(t, t, 32);
2172     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2173 
2174     if (unlikely(Rc(ctx->opcode) != 0)) {
2175         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2176     }
2177 }
2178 
2179 /* cnttzw */
2180 static void gen_cnttzw(DisasContext *ctx)
2181 {
2182     TCGv_i32 t = tcg_temp_new_i32();
2183 
2184     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2185     tcg_gen_ctzi_i32(t, t, 32);
2186     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2187 
2188     if (unlikely(Rc(ctx->opcode) != 0)) {
2189         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2190     }
2191 }
2192 
2193 /* eqv & eqv. */
2194 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2195 /* extsb & extsb. */
2196 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2197 /* extsh & extsh. */
2198 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2199 /* nand & nand. */
2200 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2201 /* nor & nor. */
2202 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2203 
2204 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2205 static void gen_pause(DisasContext *ctx)
2206 {
2207     TCGv_i32 t0 = tcg_constant_i32(0);
2208     tcg_gen_st_i32(t0, cpu_env,
2209                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2210 
2211     /* Stop translation, this gives other CPUs a chance to run */
2212     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2213 }
2214 #endif /* defined(TARGET_PPC64) */
2215 
2216 /* or & or. */
2217 static void gen_or(DisasContext *ctx)
2218 {
2219     int rs, ra, rb;
2220 
2221     rs = rS(ctx->opcode);
2222     ra = rA(ctx->opcode);
2223     rb = rB(ctx->opcode);
2224     /* Optimisation for mr. ri case */
2225     if (rs != ra || rs != rb) {
2226         if (rs != rb) {
2227             tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2228         } else {
2229             tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2230         }
2231         if (unlikely(Rc(ctx->opcode) != 0)) {
2232             gen_set_Rc0(ctx, cpu_gpr[ra]);
2233         }
2234     } else if (unlikely(Rc(ctx->opcode) != 0)) {
2235         gen_set_Rc0(ctx, cpu_gpr[rs]);
2236 #if defined(TARGET_PPC64)
2237     } else if (rs != 0) { /* 0 is nop */
2238         int prio = 0;
2239 
2240         switch (rs) {
2241         case 1:
2242             /* Set process priority to low */
2243             prio = 2;
2244             break;
2245         case 6:
2246             /* Set process priority to medium-low */
2247             prio = 3;
2248             break;
2249         case 2:
2250             /* Set process priority to normal */
2251             prio = 4;
2252             break;
2253 #if !defined(CONFIG_USER_ONLY)
2254         case 31:
2255             if (!ctx->pr) {
2256                 /* Set process priority to very low */
2257                 prio = 1;
2258             }
2259             break;
2260         case 5:
2261             if (!ctx->pr) {
2262                 /* Set process priority to medium-hight */
2263                 prio = 5;
2264             }
2265             break;
2266         case 3:
2267             if (!ctx->pr) {
2268                 /* Set process priority to high */
2269                 prio = 6;
2270             }
2271             break;
2272         case 7:
2273             if (ctx->hv && !ctx->pr) {
2274                 /* Set process priority to very high */
2275                 prio = 7;
2276             }
2277             break;
2278 #endif
2279         default:
2280             break;
2281         }
2282         if (prio) {
2283             TCGv t0 = tcg_temp_new();
2284             gen_load_spr(t0, SPR_PPR);
2285             tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2286             tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2287             gen_store_spr(SPR_PPR, t0);
2288         }
2289 #if !defined(CONFIG_USER_ONLY)
2290         /*
2291          * Pause out of TCG otherwise spin loops with smt_low eat too
2292          * much CPU and the kernel hangs.  This applies to all
2293          * encodings other than no-op, e.g., miso(rs=26), yield(27),
2294          * mdoio(29), mdoom(30), and all currently undefined.
2295          */
2296         gen_pause(ctx);
2297 #endif
2298 #endif
2299     }
2300 }
2301 /* orc & orc. */
2302 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2303 
2304 /* xor & xor. */
2305 static void gen_xor(DisasContext *ctx)
2306 {
2307     /* Optimisation for "set to zero" case */
2308     if (rS(ctx->opcode) != rB(ctx->opcode)) {
2309         tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2310                        cpu_gpr[rB(ctx->opcode)]);
2311     } else {
2312         tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2313     }
2314     if (unlikely(Rc(ctx->opcode) != 0)) {
2315         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2316     }
2317 }
2318 
2319 /* ori */
2320 static void gen_ori(DisasContext *ctx)
2321 {
2322     target_ulong uimm = UIMM(ctx->opcode);
2323 
2324     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2325         return;
2326     }
2327     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2328 }
2329 
2330 /* oris */
2331 static void gen_oris(DisasContext *ctx)
2332 {
2333     target_ulong uimm = UIMM(ctx->opcode);
2334 
2335     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2336         /* NOP */
2337         return;
2338     }
2339     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2340                    uimm << 16);
2341 }
2342 
2343 /* xori */
2344 static void gen_xori(DisasContext *ctx)
2345 {
2346     target_ulong uimm = UIMM(ctx->opcode);
2347 
2348     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2349         /* NOP */
2350         return;
2351     }
2352     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2353 }
2354 
2355 /* xoris */
2356 static void gen_xoris(DisasContext *ctx)
2357 {
2358     target_ulong uimm = UIMM(ctx->opcode);
2359 
2360     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2361         /* NOP */
2362         return;
2363     }
2364     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2365                     uimm << 16);
2366 }
2367 
2368 /* popcntb : PowerPC 2.03 specification */
2369 static void gen_popcntb(DisasContext *ctx)
2370 {
2371     gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2372 }
2373 
2374 static void gen_popcntw(DisasContext *ctx)
2375 {
2376 #if defined(TARGET_PPC64)
2377     gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2378 #else
2379     tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2380 #endif
2381 }
2382 
2383 #if defined(TARGET_PPC64)
2384 /* popcntd: PowerPC 2.06 specification */
2385 static void gen_popcntd(DisasContext *ctx)
2386 {
2387     tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2388 }
2389 #endif
2390 
2391 /* prtyw: PowerPC 2.05 specification */
2392 static void gen_prtyw(DisasContext *ctx)
2393 {
2394     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2395     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2396     TCGv t0 = tcg_temp_new();
2397     tcg_gen_shri_tl(t0, rs, 16);
2398     tcg_gen_xor_tl(ra, rs, t0);
2399     tcg_gen_shri_tl(t0, ra, 8);
2400     tcg_gen_xor_tl(ra, ra, t0);
2401     tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2402 }
2403 
2404 #if defined(TARGET_PPC64)
2405 /* prtyd: PowerPC 2.05 specification */
2406 static void gen_prtyd(DisasContext *ctx)
2407 {
2408     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2409     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2410     TCGv t0 = tcg_temp_new();
2411     tcg_gen_shri_tl(t0, rs, 32);
2412     tcg_gen_xor_tl(ra, rs, t0);
2413     tcg_gen_shri_tl(t0, ra, 16);
2414     tcg_gen_xor_tl(ra, ra, t0);
2415     tcg_gen_shri_tl(t0, ra, 8);
2416     tcg_gen_xor_tl(ra, ra, t0);
2417     tcg_gen_andi_tl(ra, ra, 1);
2418 }
2419 #endif
2420 
2421 #if defined(TARGET_PPC64)
2422 /* bpermd */
2423 static void gen_bpermd(DisasContext *ctx)
2424 {
2425     gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2426                       cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2427 }
2428 #endif
2429 
2430 #if defined(TARGET_PPC64)
2431 /* extsw & extsw. */
2432 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2433 
2434 /* cntlzd */
2435 static void gen_cntlzd(DisasContext *ctx)
2436 {
2437     tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2438     if (unlikely(Rc(ctx->opcode) != 0)) {
2439         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2440     }
2441 }
2442 
2443 /* cnttzd */
2444 static void gen_cnttzd(DisasContext *ctx)
2445 {
2446     tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2447     if (unlikely(Rc(ctx->opcode) != 0)) {
2448         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2449     }
2450 }
2451 
2452 /* darn */
2453 static void gen_darn(DisasContext *ctx)
2454 {
2455     int l = L(ctx->opcode);
2456 
2457     if (l > 2) {
2458         tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2459     } else {
2460         translator_io_start(&ctx->base);
2461         if (l == 0) {
2462             gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2463         } else {
2464             /* Return 64-bit random for both CRN and RRN */
2465             gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2466         }
2467     }
2468 }
2469 #endif
2470 
2471 /***                             Integer rotate                            ***/
2472 
2473 /* rlwimi & rlwimi. */
2474 static void gen_rlwimi(DisasContext *ctx)
2475 {
2476     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2477     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2478     uint32_t sh = SH(ctx->opcode);
2479     uint32_t mb = MB(ctx->opcode);
2480     uint32_t me = ME(ctx->opcode);
2481 
2482     if (sh == (31 - me) && mb <= me) {
2483         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2484     } else {
2485         target_ulong mask;
2486         bool mask_in_32b = true;
2487         TCGv t1;
2488 
2489 #if defined(TARGET_PPC64)
2490         mb += 32;
2491         me += 32;
2492 #endif
2493         mask = MASK(mb, me);
2494 
2495 #if defined(TARGET_PPC64)
2496         if (mask > 0xffffffffu) {
2497             mask_in_32b = false;
2498         }
2499 #endif
2500         t1 = tcg_temp_new();
2501         if (mask_in_32b) {
2502             TCGv_i32 t0 = tcg_temp_new_i32();
2503             tcg_gen_trunc_tl_i32(t0, t_rs);
2504             tcg_gen_rotli_i32(t0, t0, sh);
2505             tcg_gen_extu_i32_tl(t1, t0);
2506         } else {
2507 #if defined(TARGET_PPC64)
2508             tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2509             tcg_gen_rotli_i64(t1, t1, sh);
2510 #else
2511             g_assert_not_reached();
2512 #endif
2513         }
2514 
2515         tcg_gen_andi_tl(t1, t1, mask);
2516         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2517         tcg_gen_or_tl(t_ra, t_ra, t1);
2518     }
2519     if (unlikely(Rc(ctx->opcode) != 0)) {
2520         gen_set_Rc0(ctx, t_ra);
2521     }
2522 }
2523 
2524 /* rlwinm & rlwinm. */
2525 static void gen_rlwinm(DisasContext *ctx)
2526 {
2527     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2528     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2529     int sh = SH(ctx->opcode);
2530     int mb = MB(ctx->opcode);
2531     int me = ME(ctx->opcode);
2532     int len = me - mb + 1;
2533     int rsh = (32 - sh) & 31;
2534 
2535     if (sh != 0 && len > 0 && me == (31 - sh)) {
2536         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2537     } else if (me == 31 && rsh + len <= 32) {
2538         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2539     } else {
2540         target_ulong mask;
2541         bool mask_in_32b = true;
2542 #if defined(TARGET_PPC64)
2543         mb += 32;
2544         me += 32;
2545 #endif
2546         mask = MASK(mb, me);
2547 #if defined(TARGET_PPC64)
2548         if (mask > 0xffffffffu) {
2549             mask_in_32b = false;
2550         }
2551 #endif
2552         if (mask_in_32b) {
2553             if (sh == 0) {
2554                 tcg_gen_andi_tl(t_ra, t_rs, mask);
2555             } else {
2556                 TCGv_i32 t0 = tcg_temp_new_i32();
2557                 tcg_gen_trunc_tl_i32(t0, t_rs);
2558                 tcg_gen_rotli_i32(t0, t0, sh);
2559                 tcg_gen_andi_i32(t0, t0, mask);
2560                 tcg_gen_extu_i32_tl(t_ra, t0);
2561             }
2562         } else {
2563 #if defined(TARGET_PPC64)
2564             tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2565             tcg_gen_rotli_i64(t_ra, t_ra, sh);
2566             tcg_gen_andi_i64(t_ra, t_ra, mask);
2567 #else
2568             g_assert_not_reached();
2569 #endif
2570         }
2571     }
2572     if (unlikely(Rc(ctx->opcode) != 0)) {
2573         gen_set_Rc0(ctx, t_ra);
2574     }
2575 }
2576 
2577 /* rlwnm & rlwnm. */
2578 static void gen_rlwnm(DisasContext *ctx)
2579 {
2580     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2581     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2582     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2583     uint32_t mb = MB(ctx->opcode);
2584     uint32_t me = ME(ctx->opcode);
2585     target_ulong mask;
2586     bool mask_in_32b = true;
2587 
2588 #if defined(TARGET_PPC64)
2589     mb += 32;
2590     me += 32;
2591 #endif
2592     mask = MASK(mb, me);
2593 
2594 #if defined(TARGET_PPC64)
2595     if (mask > 0xffffffffu) {
2596         mask_in_32b = false;
2597     }
2598 #endif
2599     if (mask_in_32b) {
2600         TCGv_i32 t0 = tcg_temp_new_i32();
2601         TCGv_i32 t1 = tcg_temp_new_i32();
2602         tcg_gen_trunc_tl_i32(t0, t_rb);
2603         tcg_gen_trunc_tl_i32(t1, t_rs);
2604         tcg_gen_andi_i32(t0, t0, 0x1f);
2605         tcg_gen_rotl_i32(t1, t1, t0);
2606         tcg_gen_extu_i32_tl(t_ra, t1);
2607     } else {
2608 #if defined(TARGET_PPC64)
2609         TCGv_i64 t0 = tcg_temp_new_i64();
2610         tcg_gen_andi_i64(t0, t_rb, 0x1f);
2611         tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2612         tcg_gen_rotl_i64(t_ra, t_ra, t0);
2613 #else
2614         g_assert_not_reached();
2615 #endif
2616     }
2617 
2618     tcg_gen_andi_tl(t_ra, t_ra, mask);
2619 
2620     if (unlikely(Rc(ctx->opcode) != 0)) {
2621         gen_set_Rc0(ctx, t_ra);
2622     }
2623 }
2624 
2625 #if defined(TARGET_PPC64)
2626 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
2627 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2628 {                                                                             \
2629     gen_##name(ctx, 0);                                                       \
2630 }                                                                             \
2631                                                                               \
2632 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2633 {                                                                             \
2634     gen_##name(ctx, 1);                                                       \
2635 }
2636 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
2637 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2638 {                                                                             \
2639     gen_##name(ctx, 0, 0);                                                    \
2640 }                                                                             \
2641                                                                               \
2642 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2643 {                                                                             \
2644     gen_##name(ctx, 0, 1);                                                    \
2645 }                                                                             \
2646                                                                               \
2647 static void glue(gen_, name##2)(DisasContext *ctx)                            \
2648 {                                                                             \
2649     gen_##name(ctx, 1, 0);                                                    \
2650 }                                                                             \
2651                                                                               \
2652 static void glue(gen_, name##3)(DisasContext *ctx)                            \
2653 {                                                                             \
2654     gen_##name(ctx, 1, 1);                                                    \
2655 }
2656 
2657 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2658 {
2659     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2660     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2661     int len = me - mb + 1;
2662     int rsh = (64 - sh) & 63;
2663 
2664     if (sh != 0 && len > 0 && me == (63 - sh)) {
2665         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2666     } else if (me == 63 && rsh + len <= 64) {
2667         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2668     } else {
2669         tcg_gen_rotli_tl(t_ra, t_rs, sh);
2670         tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2671     }
2672     if (unlikely(Rc(ctx->opcode) != 0)) {
2673         gen_set_Rc0(ctx, t_ra);
2674     }
2675 }
2676 
2677 /* rldicl - rldicl. */
2678 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2679 {
2680     uint32_t sh, mb;
2681 
2682     sh = SH(ctx->opcode) | (shn << 5);
2683     mb = MB(ctx->opcode) | (mbn << 5);
2684     gen_rldinm(ctx, mb, 63, sh);
2685 }
2686 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2687 
2688 /* rldicr - rldicr. */
2689 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2690 {
2691     uint32_t sh, me;
2692 
2693     sh = SH(ctx->opcode) | (shn << 5);
2694     me = MB(ctx->opcode) | (men << 5);
2695     gen_rldinm(ctx, 0, me, sh);
2696 }
2697 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2698 
2699 /* rldic - rldic. */
2700 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2701 {
2702     uint32_t sh, mb;
2703 
2704     sh = SH(ctx->opcode) | (shn << 5);
2705     mb = MB(ctx->opcode) | (mbn << 5);
2706     gen_rldinm(ctx, mb, 63 - sh, sh);
2707 }
2708 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2709 
2710 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2711 {
2712     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2713     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2714     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2715     TCGv t0;
2716 
2717     t0 = tcg_temp_new();
2718     tcg_gen_andi_tl(t0, t_rb, 0x3f);
2719     tcg_gen_rotl_tl(t_ra, t_rs, t0);
2720 
2721     tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2722     if (unlikely(Rc(ctx->opcode) != 0)) {
2723         gen_set_Rc0(ctx, t_ra);
2724     }
2725 }
2726 
2727 /* rldcl - rldcl. */
2728 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2729 {
2730     uint32_t mb;
2731 
2732     mb = MB(ctx->opcode) | (mbn << 5);
2733     gen_rldnm(ctx, mb, 63);
2734 }
2735 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2736 
2737 /* rldcr - rldcr. */
2738 static inline void gen_rldcr(DisasContext *ctx, int men)
2739 {
2740     uint32_t me;
2741 
2742     me = MB(ctx->opcode) | (men << 5);
2743     gen_rldnm(ctx, 0, me);
2744 }
2745 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2746 
2747 /* rldimi - rldimi. */
2748 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2749 {
2750     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2751     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2752     uint32_t sh = SH(ctx->opcode) | (shn << 5);
2753     uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2754     uint32_t me = 63 - sh;
2755 
2756     if (mb <= me) {
2757         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2758     } else {
2759         target_ulong mask = MASK(mb, me);
2760         TCGv t1 = tcg_temp_new();
2761 
2762         tcg_gen_rotli_tl(t1, t_rs, sh);
2763         tcg_gen_andi_tl(t1, t1, mask);
2764         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2765         tcg_gen_or_tl(t_ra, t_ra, t1);
2766     }
2767     if (unlikely(Rc(ctx->opcode) != 0)) {
2768         gen_set_Rc0(ctx, t_ra);
2769     }
2770 }
2771 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2772 #endif
2773 
2774 /***                             Integer shift                             ***/
2775 
2776 /* slw & slw. */
2777 static void gen_slw(DisasContext *ctx)
2778 {
2779     TCGv t0, t1;
2780 
2781     t0 = tcg_temp_new();
2782     /* AND rS with a mask that is 0 when rB >= 0x20 */
2783 #if defined(TARGET_PPC64)
2784     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2785     tcg_gen_sari_tl(t0, t0, 0x3f);
2786 #else
2787     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2788     tcg_gen_sari_tl(t0, t0, 0x1f);
2789 #endif
2790     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2791     t1 = tcg_temp_new();
2792     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2793     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2794     tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2795     if (unlikely(Rc(ctx->opcode) != 0)) {
2796         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2797     }
2798 }
2799 
2800 /* sraw & sraw. */
2801 static void gen_sraw(DisasContext *ctx)
2802 {
2803     gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
2804                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2805     if (unlikely(Rc(ctx->opcode) != 0)) {
2806         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2807     }
2808 }
2809 
2810 /* srawi & srawi. */
2811 static void gen_srawi(DisasContext *ctx)
2812 {
2813     int sh = SH(ctx->opcode);
2814     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2815     TCGv src = cpu_gpr[rS(ctx->opcode)];
2816     if (sh == 0) {
2817         tcg_gen_ext32s_tl(dst, src);
2818         tcg_gen_movi_tl(cpu_ca, 0);
2819         if (is_isa300(ctx)) {
2820             tcg_gen_movi_tl(cpu_ca32, 0);
2821         }
2822     } else {
2823         TCGv t0;
2824         tcg_gen_ext32s_tl(dst, src);
2825         tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2826         t0 = tcg_temp_new();
2827         tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2828         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2829         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2830         if (is_isa300(ctx)) {
2831             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2832         }
2833         tcg_gen_sari_tl(dst, dst, sh);
2834     }
2835     if (unlikely(Rc(ctx->opcode) != 0)) {
2836         gen_set_Rc0(ctx, dst);
2837     }
2838 }
2839 
2840 /* srw & srw. */
2841 static void gen_srw(DisasContext *ctx)
2842 {
2843     TCGv t0, t1;
2844 
2845     t0 = tcg_temp_new();
2846     /* AND rS with a mask that is 0 when rB >= 0x20 */
2847 #if defined(TARGET_PPC64)
2848     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2849     tcg_gen_sari_tl(t0, t0, 0x3f);
2850 #else
2851     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2852     tcg_gen_sari_tl(t0, t0, 0x1f);
2853 #endif
2854     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2855     tcg_gen_ext32u_tl(t0, t0);
2856     t1 = tcg_temp_new();
2857     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2858     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2859     if (unlikely(Rc(ctx->opcode) != 0)) {
2860         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2861     }
2862 }
2863 
2864 #if defined(TARGET_PPC64)
2865 /* sld & sld. */
2866 static void gen_sld(DisasContext *ctx)
2867 {
2868     TCGv t0, t1;
2869 
2870     t0 = tcg_temp_new();
2871     /* AND rS with a mask that is 0 when rB >= 0x40 */
2872     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2873     tcg_gen_sari_tl(t0, t0, 0x3f);
2874     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2875     t1 = tcg_temp_new();
2876     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2877     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2878     if (unlikely(Rc(ctx->opcode) != 0)) {
2879         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2880     }
2881 }
2882 
2883 /* srad & srad. */
2884 static void gen_srad(DisasContext *ctx)
2885 {
2886     gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
2887                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2888     if (unlikely(Rc(ctx->opcode) != 0)) {
2889         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2890     }
2891 }
2892 /* sradi & sradi. */
2893 static inline void gen_sradi(DisasContext *ctx, int n)
2894 {
2895     int sh = SH(ctx->opcode) + (n << 5);
2896     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2897     TCGv src = cpu_gpr[rS(ctx->opcode)];
2898     if (sh == 0) {
2899         tcg_gen_mov_tl(dst, src);
2900         tcg_gen_movi_tl(cpu_ca, 0);
2901         if (is_isa300(ctx)) {
2902             tcg_gen_movi_tl(cpu_ca32, 0);
2903         }
2904     } else {
2905         TCGv t0;
2906         tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
2907         t0 = tcg_temp_new();
2908         tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
2909         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2910         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2911         if (is_isa300(ctx)) {
2912             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2913         }
2914         tcg_gen_sari_tl(dst, src, sh);
2915     }
2916     if (unlikely(Rc(ctx->opcode) != 0)) {
2917         gen_set_Rc0(ctx, dst);
2918     }
2919 }
2920 
2921 static void gen_sradi0(DisasContext *ctx)
2922 {
2923     gen_sradi(ctx, 0);
2924 }
2925 
2926 static void gen_sradi1(DisasContext *ctx)
2927 {
2928     gen_sradi(ctx, 1);
2929 }
2930 
2931 /* extswsli & extswsli. */
2932 static inline void gen_extswsli(DisasContext *ctx, int n)
2933 {
2934     int sh = SH(ctx->opcode) + (n << 5);
2935     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2936     TCGv src = cpu_gpr[rS(ctx->opcode)];
2937 
2938     tcg_gen_ext32s_tl(dst, src);
2939     tcg_gen_shli_tl(dst, dst, sh);
2940     if (unlikely(Rc(ctx->opcode) != 0)) {
2941         gen_set_Rc0(ctx, dst);
2942     }
2943 }
2944 
2945 static void gen_extswsli0(DisasContext *ctx)
2946 {
2947     gen_extswsli(ctx, 0);
2948 }
2949 
2950 static void gen_extswsli1(DisasContext *ctx)
2951 {
2952     gen_extswsli(ctx, 1);
2953 }
2954 
2955 /* srd & srd. */
2956 static void gen_srd(DisasContext *ctx)
2957 {
2958     TCGv t0, t1;
2959 
2960     t0 = tcg_temp_new();
2961     /* AND rS with a mask that is 0 when rB >= 0x40 */
2962     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2963     tcg_gen_sari_tl(t0, t0, 0x3f);
2964     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2965     t1 = tcg_temp_new();
2966     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2967     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2968     if (unlikely(Rc(ctx->opcode) != 0)) {
2969         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2970     }
2971 }
2972 #endif
2973 
2974 /***                           Addressing modes                            ***/
2975 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2976 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2977                                       target_long maskl)
2978 {
2979     target_long simm = SIMM(ctx->opcode);
2980 
2981     simm &= ~maskl;
2982     if (rA(ctx->opcode) == 0) {
2983         if (NARROW_MODE(ctx)) {
2984             simm = (uint32_t)simm;
2985         }
2986         tcg_gen_movi_tl(EA, simm);
2987     } else if (likely(simm != 0)) {
2988         tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2989         if (NARROW_MODE(ctx)) {
2990             tcg_gen_ext32u_tl(EA, EA);
2991         }
2992     } else {
2993         if (NARROW_MODE(ctx)) {
2994             tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2995         } else {
2996             tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2997         }
2998     }
2999 }
3000 
3001 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3002 {
3003     if (rA(ctx->opcode) == 0) {
3004         if (NARROW_MODE(ctx)) {
3005             tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3006         } else {
3007             tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3008         }
3009     } else {
3010         tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3011         if (NARROW_MODE(ctx)) {
3012             tcg_gen_ext32u_tl(EA, EA);
3013         }
3014     }
3015 }
3016 
3017 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3018 {
3019     if (rA(ctx->opcode) == 0) {
3020         tcg_gen_movi_tl(EA, 0);
3021     } else if (NARROW_MODE(ctx)) {
3022         tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3023     } else {
3024         tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3025     }
3026 }
3027 
3028 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3029                                 target_long val)
3030 {
3031     tcg_gen_addi_tl(ret, arg1, val);
3032     if (NARROW_MODE(ctx)) {
3033         tcg_gen_ext32u_tl(ret, ret);
3034     }
3035 }
3036 
3037 static inline void gen_align_no_le(DisasContext *ctx)
3038 {
3039     gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3040                       (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3041 }
3042 
3043 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
3044 {
3045     TCGv ea = tcg_temp_new();
3046     if (ra) {
3047         tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
3048     } else {
3049         tcg_gen_mov_tl(ea, displ);
3050     }
3051     if (NARROW_MODE(ctx)) {
3052         tcg_gen_ext32u_tl(ea, ea);
3053     }
3054     return ea;
3055 }
3056 
3057 /***                             Integer load                              ***/
3058 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3059 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3060 
3061 #define GEN_QEMU_LOAD_TL(ldop, op)                                      \
3062 static void glue(gen_qemu_, ldop)(DisasContext *ctx,                    \
3063                                   TCGv val,                             \
3064                                   TCGv addr)                            \
3065 {                                                                       \
3066     tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op);                    \
3067 }
3068 
3069 GEN_QEMU_LOAD_TL(ld8u,  DEF_MEMOP(MO_UB))
3070 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3071 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3072 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3073 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3074 
3075 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3076 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3077 
3078 #define GEN_QEMU_LOAD_64(ldop, op)                                  \
3079 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx,    \
3080                                              TCGv_i64 val,          \
3081                                              TCGv addr)             \
3082 {                                                                   \
3083     tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op);               \
3084 }
3085 
3086 GEN_QEMU_LOAD_64(ld8u,  DEF_MEMOP(MO_UB))
3087 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3088 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3089 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3090 GEN_QEMU_LOAD_64(ld64,  DEF_MEMOP(MO_UQ))
3091 
3092 #if defined(TARGET_PPC64)
3093 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ))
3094 #endif
3095 
3096 #define GEN_QEMU_STORE_TL(stop, op)                                     \
3097 static void glue(gen_qemu_, stop)(DisasContext *ctx,                    \
3098                                   TCGv val,                             \
3099                                   TCGv addr)                            \
3100 {                                                                       \
3101     tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op);                    \
3102 }
3103 
3104 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
3105 GEN_QEMU_STORE_TL(st8,  DEF_MEMOP(MO_UB))
3106 #endif
3107 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3108 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3109 
3110 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3111 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3112 
3113 #define GEN_QEMU_STORE_64(stop, op)                               \
3114 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx,  \
3115                                               TCGv_i64 val,       \
3116                                               TCGv addr)          \
3117 {                                                                 \
3118     tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op);             \
3119 }
3120 
3121 GEN_QEMU_STORE_64(st8,  DEF_MEMOP(MO_UB))
3122 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3123 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3124 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ))
3125 
3126 #if defined(TARGET_PPC64)
3127 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ))
3128 #endif
3129 
3130 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
3131 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3132 {                                                                             \
3133     TCGv EA;                                                                  \
3134     chk(ctx);                                                                 \
3135     gen_set_access_type(ctx, ACCESS_INT);                                     \
3136     EA = tcg_temp_new();                                                      \
3137     gen_addr_reg_index(ctx, EA);                                              \
3138     gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA);                       \
3139 }
3140 
3141 #define GEN_LDX(name, ldop, opc2, opc3, type)                                 \
3142     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3143 
3144 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type)                            \
3145     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3146 
3147 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
3148 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3149 {                                                                             \
3150     TCGv EA;                                                                  \
3151     CHK_SV(ctx);                                                              \
3152     gen_set_access_type(ctx, ACCESS_INT);                                     \
3153     EA = tcg_temp_new();                                                      \
3154     gen_addr_reg_index(ctx, EA);                                              \
3155     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3156 }
3157 
3158 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3159 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3160 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3161 #if defined(TARGET_PPC64)
3162 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
3163 #endif
3164 
3165 #if defined(TARGET_PPC64)
3166 /* CI load/store variants */
3167 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3168 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3169 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3170 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3171 #endif
3172 
3173 /***                              Integer store                            ***/
3174 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
3175 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3176 {                                                                             \
3177     TCGv EA;                                                                  \
3178     chk(ctx);                                                                 \
3179     gen_set_access_type(ctx, ACCESS_INT);                                     \
3180     EA = tcg_temp_new();                                                      \
3181     gen_addr_reg_index(ctx, EA);                                              \
3182     gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA);                       \
3183 }
3184 #define GEN_STX(name, stop, opc2, opc3, type)                                 \
3185     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3186 
3187 #define GEN_STX_HVRM(name, stop, opc2, opc3, type)                            \
3188     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3189 
3190 #define GEN_STEPX(name, stop, opc2, opc3)                                     \
3191 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3192 {                                                                             \
3193     TCGv EA;                                                                  \
3194     CHK_SV(ctx);                                                              \
3195     gen_set_access_type(ctx, ACCESS_INT);                                     \
3196     EA = tcg_temp_new();                                                      \
3197     gen_addr_reg_index(ctx, EA);                                              \
3198     tcg_gen_qemu_st_tl(                                                       \
3199         cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop);              \
3200 }
3201 
3202 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3203 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3204 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3205 #if defined(TARGET_PPC64)
3206 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04)
3207 #endif
3208 
3209 #if defined(TARGET_PPC64)
3210 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3211 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3212 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3213 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3214 #endif
3215 /***                Integer load and store with byte reverse               ***/
3216 
3217 /* lhbrx */
3218 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3219 
3220 /* lwbrx */
3221 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3222 
3223 #if defined(TARGET_PPC64)
3224 /* ldbrx */
3225 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3226 /* stdbrx */
3227 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3228 #endif  /* TARGET_PPC64 */
3229 
3230 /* sthbrx */
3231 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3232 /* stwbrx */
3233 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3234 
3235 /***                    Integer load and store multiple                    ***/
3236 
3237 /* lmw */
3238 static void gen_lmw(DisasContext *ctx)
3239 {
3240     TCGv t0;
3241     TCGv_i32 t1;
3242 
3243     if (ctx->le_mode) {
3244         gen_align_no_le(ctx);
3245         return;
3246     }
3247     gen_set_access_type(ctx, ACCESS_INT);
3248     t0 = tcg_temp_new();
3249     t1 = tcg_constant_i32(rD(ctx->opcode));
3250     gen_addr_imm_index(ctx, t0, 0);
3251     gen_helper_lmw(cpu_env, t0, t1);
3252 }
3253 
3254 /* stmw */
3255 static void gen_stmw(DisasContext *ctx)
3256 {
3257     TCGv t0;
3258     TCGv_i32 t1;
3259 
3260     if (ctx->le_mode) {
3261         gen_align_no_le(ctx);
3262         return;
3263     }
3264     gen_set_access_type(ctx, ACCESS_INT);
3265     t0 = tcg_temp_new();
3266     t1 = tcg_constant_i32(rS(ctx->opcode));
3267     gen_addr_imm_index(ctx, t0, 0);
3268     gen_helper_stmw(cpu_env, t0, t1);
3269 }
3270 
3271 /***                    Integer load and store strings                     ***/
3272 
3273 /* lswi */
3274 /*
3275  * PowerPC32 specification says we must generate an exception if rA is
3276  * in the range of registers to be loaded.  In an other hand, IBM says
3277  * this is valid, but rA won't be loaded.  For now, I'll follow the
3278  * spec...
3279  */
3280 static void gen_lswi(DisasContext *ctx)
3281 {
3282     TCGv t0;
3283     TCGv_i32 t1, t2;
3284     int nb = NB(ctx->opcode);
3285     int start = rD(ctx->opcode);
3286     int ra = rA(ctx->opcode);
3287     int nr;
3288 
3289     if (ctx->le_mode) {
3290         gen_align_no_le(ctx);
3291         return;
3292     }
3293     if (nb == 0) {
3294         nb = 32;
3295     }
3296     nr = DIV_ROUND_UP(nb, 4);
3297     if (unlikely(lsw_reg_in_range(start, nr, ra))) {
3298         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3299         return;
3300     }
3301     gen_set_access_type(ctx, ACCESS_INT);
3302     t0 = tcg_temp_new();
3303     gen_addr_register(ctx, t0);
3304     t1 = tcg_constant_i32(nb);
3305     t2 = tcg_constant_i32(start);
3306     gen_helper_lsw(cpu_env, t0, t1, t2);
3307 }
3308 
3309 /* lswx */
3310 static void gen_lswx(DisasContext *ctx)
3311 {
3312     TCGv t0;
3313     TCGv_i32 t1, t2, t3;
3314 
3315     if (ctx->le_mode) {
3316         gen_align_no_le(ctx);
3317         return;
3318     }
3319     gen_set_access_type(ctx, ACCESS_INT);
3320     t0 = tcg_temp_new();
3321     gen_addr_reg_index(ctx, t0);
3322     t1 = tcg_constant_i32(rD(ctx->opcode));
3323     t2 = tcg_constant_i32(rA(ctx->opcode));
3324     t3 = tcg_constant_i32(rB(ctx->opcode));
3325     gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3326 }
3327 
3328 /* stswi */
3329 static void gen_stswi(DisasContext *ctx)
3330 {
3331     TCGv t0;
3332     TCGv_i32 t1, t2;
3333     int nb = NB(ctx->opcode);
3334 
3335     if (ctx->le_mode) {
3336         gen_align_no_le(ctx);
3337         return;
3338     }
3339     gen_set_access_type(ctx, ACCESS_INT);
3340     t0 = tcg_temp_new();
3341     gen_addr_register(ctx, t0);
3342     if (nb == 0) {
3343         nb = 32;
3344     }
3345     t1 = tcg_constant_i32(nb);
3346     t2 = tcg_constant_i32(rS(ctx->opcode));
3347     gen_helper_stsw(cpu_env, t0, t1, t2);
3348 }
3349 
3350 /* stswx */
3351 static void gen_stswx(DisasContext *ctx)
3352 {
3353     TCGv t0;
3354     TCGv_i32 t1, t2;
3355 
3356     if (ctx->le_mode) {
3357         gen_align_no_le(ctx);
3358         return;
3359     }
3360     gen_set_access_type(ctx, ACCESS_INT);
3361     t0 = tcg_temp_new();
3362     gen_addr_reg_index(ctx, t0);
3363     t1 = tcg_temp_new_i32();
3364     tcg_gen_trunc_tl_i32(t1, cpu_xer);
3365     tcg_gen_andi_i32(t1, t1, 0x7F);
3366     t2 = tcg_constant_i32(rS(ctx->opcode));
3367     gen_helper_stsw(cpu_env, t0, t1, t2);
3368 }
3369 
3370 /***                        Memory synchronisation                         ***/
3371 /* eieio */
3372 static void gen_eieio(DisasContext *ctx)
3373 {
3374     TCGBar bar = TCG_MO_ALL;
3375 
3376     /*
3377      * eieio has complex semanitcs. It provides memory ordering between
3378      * operations in the set:
3379      * - loads from CI memory.
3380      * - stores to CI memory.
3381      * - stores to WT memory.
3382      *
3383      * It separately also orders memory for operations in the set:
3384      * - stores to cacheble memory.
3385      *
3386      * It also serializes instructions:
3387      * - dcbt and dcbst.
3388      *
3389      * It separately serializes:
3390      * - tlbie and tlbsync.
3391      *
3392      * And separately serializes:
3393      * - slbieg, slbiag, and slbsync.
3394      *
3395      * The end result is that CI memory ordering requires TCG_MO_ALL
3396      * and it is not possible to special-case more relaxed ordering for
3397      * cacheable accesses. TCG_BAR_SC is required to provide this
3398      * serialization.
3399      */
3400 
3401     /*
3402      * POWER9 has a eieio instruction variant using bit 6 as a hint to
3403      * tell the CPU it is a store-forwarding barrier.
3404      */
3405     if (ctx->opcode & 0x2000000) {
3406         /*
3407          * ISA says that "Reserved fields in instructions are ignored
3408          * by the processor". So ignore the bit 6 on non-POWER9 CPU but
3409          * as this is not an instruction software should be using,
3410          * complain to the user.
3411          */
3412         if (!(ctx->insns_flags2 & PPC2_ISA300)) {
3413             qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
3414                           TARGET_FMT_lx "\n", ctx->cia);
3415         } else {
3416             bar = TCG_MO_ST_LD;
3417         }
3418     }
3419 
3420     tcg_gen_mb(bar | TCG_BAR_SC);
3421 }
3422 
3423 #if !defined(CONFIG_USER_ONLY)
3424 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
3425 {
3426     TCGv_i32 t;
3427     TCGLabel *l;
3428 
3429     if (!ctx->lazy_tlb_flush) {
3430         return;
3431     }
3432     l = gen_new_label();
3433     t = tcg_temp_new_i32();
3434     tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
3435     tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
3436     if (global) {
3437         gen_helper_check_tlb_flush_global(cpu_env);
3438     } else {
3439         gen_helper_check_tlb_flush_local(cpu_env);
3440     }
3441     gen_set_label(l);
3442 }
3443 #else
3444 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
3445 #endif
3446 
3447 /* isync */
3448 static void gen_isync(DisasContext *ctx)
3449 {
3450     /*
3451      * We need to check for a pending TLB flush. This can only happen in
3452      * kernel mode however so check MSR_PR
3453      */
3454     if (!ctx->pr) {
3455         gen_check_tlb_flush(ctx, false);
3456     }
3457     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
3458     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
3459 }
3460 
3461 #define MEMOP_GET_SIZE(x)  (1 << ((x) & MO_SIZE))
3462 
3463 static void gen_load_locked(DisasContext *ctx, MemOp memop)
3464 {
3465     TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3466     TCGv t0 = tcg_temp_new();
3467 
3468     gen_set_access_type(ctx, ACCESS_RES);
3469     gen_addr_reg_index(ctx, t0);
3470     tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
3471     tcg_gen_mov_tl(cpu_reserve, t0);
3472     tcg_gen_mov_tl(cpu_reserve_val, gpr);
3473     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
3474 }
3475 
3476 #define LARX(name, memop)                  \
3477 static void gen_##name(DisasContext *ctx)  \
3478 {                                          \
3479     gen_load_locked(ctx, memop);           \
3480 }
3481 
3482 /* lwarx */
3483 LARX(lbarx, DEF_MEMOP(MO_UB))
3484 LARX(lharx, DEF_MEMOP(MO_UW))
3485 LARX(lwarx, DEF_MEMOP(MO_UL))
3486 
3487 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
3488                                       TCGv EA, TCGCond cond, int addend)
3489 {
3490     TCGv t = tcg_temp_new();
3491     TCGv t2 = tcg_temp_new();
3492     TCGv u = tcg_temp_new();
3493 
3494     tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3495     tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
3496     tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
3497     tcg_gen_addi_tl(u, t, addend);
3498 
3499     /* E.g. for fetch and increment bounded... */
3500     /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
3501     tcg_gen_movcond_tl(cond, u, t, t2, u, t);
3502     tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
3503 
3504     /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
3505     tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
3506     tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
3507 }
3508 
3509 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
3510 {
3511     uint32_t gpr_FC = FC(ctx->opcode);
3512     TCGv EA = tcg_temp_new();
3513     int rt = rD(ctx->opcode);
3514     bool need_serial;
3515     TCGv src, dst;
3516 
3517     gen_addr_register(ctx, EA);
3518     dst = cpu_gpr[rt];
3519     src = cpu_gpr[(rt + 1) & 31];
3520 
3521     need_serial = false;
3522     memop |= MO_ALIGN;
3523     switch (gpr_FC) {
3524     case 0: /* Fetch and add */
3525         tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
3526         break;
3527     case 1: /* Fetch and xor */
3528         tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
3529         break;
3530     case 2: /* Fetch and or */
3531         tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3532         break;
3533     case 3: /* Fetch and 'and' */
3534         tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3535         break;
3536     case 4:  /* Fetch and max unsigned */
3537         tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3538         break;
3539     case 5:  /* Fetch and max signed */
3540         tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3541         break;
3542     case 6:  /* Fetch and min unsigned */
3543         tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3544         break;
3545     case 7:  /* Fetch and min signed */
3546         tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3547         break;
3548     case 8: /* Swap */
3549         tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3550         break;
3551 
3552     case 16: /* Compare and swap not equal */
3553         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3554             need_serial = true;
3555         } else {
3556             TCGv t0 = tcg_temp_new();
3557             TCGv t1 = tcg_temp_new();
3558 
3559             tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3560             if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3561                 tcg_gen_mov_tl(t1, src);
3562             } else {
3563                 tcg_gen_ext32u_tl(t1, src);
3564             }
3565             tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3566                                cpu_gpr[(rt + 2) & 31], t0);
3567             tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3568             tcg_gen_mov_tl(dst, t0);
3569         }
3570         break;
3571 
3572     case 24: /* Fetch and increment bounded */
3573         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3574             need_serial = true;
3575         } else {
3576             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3577         }
3578         break;
3579     case 25: /* Fetch and increment equal */
3580         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3581             need_serial = true;
3582         } else {
3583             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3584         }
3585         break;
3586     case 28: /* Fetch and decrement bounded */
3587         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3588             need_serial = true;
3589         } else {
3590             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3591         }
3592         break;
3593 
3594     default:
3595         /* invoke data storage error handler */
3596         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3597     }
3598 
3599     if (need_serial) {
3600         /* Restart with exclusive lock.  */
3601         gen_helper_exit_atomic(cpu_env);
3602         ctx->base.is_jmp = DISAS_NORETURN;
3603     }
3604 }
3605 
3606 static void gen_lwat(DisasContext *ctx)
3607 {
3608     gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3609 }
3610 
3611 #ifdef TARGET_PPC64
3612 static void gen_ldat(DisasContext *ctx)
3613 {
3614     gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ));
3615 }
3616 #endif
3617 
3618 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3619 {
3620     uint32_t gpr_FC = FC(ctx->opcode);
3621     TCGv EA = tcg_temp_new();
3622     TCGv src, discard;
3623 
3624     gen_addr_register(ctx, EA);
3625     src = cpu_gpr[rD(ctx->opcode)];
3626     discard = tcg_temp_new();
3627 
3628     memop |= MO_ALIGN;
3629     switch (gpr_FC) {
3630     case 0: /* add and Store */
3631         tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3632         break;
3633     case 1: /* xor and Store */
3634         tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3635         break;
3636     case 2: /* Or and Store */
3637         tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3638         break;
3639     case 3: /* 'and' and Store */
3640         tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3641         break;
3642     case 4:  /* Store max unsigned */
3643         tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3644         break;
3645     case 5:  /* Store max signed */
3646         tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3647         break;
3648     case 6:  /* Store min unsigned */
3649         tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3650         break;
3651     case 7:  /* Store min signed */
3652         tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3653         break;
3654     case 24: /* Store twin  */
3655         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3656             /* Restart with exclusive lock.  */
3657             gen_helper_exit_atomic(cpu_env);
3658             ctx->base.is_jmp = DISAS_NORETURN;
3659         } else {
3660             TCGv t = tcg_temp_new();
3661             TCGv t2 = tcg_temp_new();
3662             TCGv s = tcg_temp_new();
3663             TCGv s2 = tcg_temp_new();
3664             TCGv ea_plus_s = tcg_temp_new();
3665 
3666             tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3667             tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
3668             tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3669             tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3670             tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3671             tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3672             tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3673         }
3674         break;
3675     default:
3676         /* invoke data storage error handler */
3677         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3678     }
3679 }
3680 
3681 static void gen_stwat(DisasContext *ctx)
3682 {
3683     gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3684 }
3685 
3686 #ifdef TARGET_PPC64
3687 static void gen_stdat(DisasContext *ctx)
3688 {
3689     gen_st_atomic(ctx, DEF_MEMOP(MO_UQ));
3690 }
3691 #endif
3692 
3693 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3694 {
3695     TCGLabel *l1 = gen_new_label();
3696     TCGLabel *l2 = gen_new_label();
3697     TCGv t0 = tcg_temp_new();
3698     int reg = rS(ctx->opcode);
3699 
3700     gen_set_access_type(ctx, ACCESS_RES);
3701     gen_addr_reg_index(ctx, t0);
3702     tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3703 
3704     t0 = tcg_temp_new();
3705     tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3706                               cpu_gpr[reg], ctx->mem_idx,
3707                               DEF_MEMOP(memop) | MO_ALIGN);
3708     tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3709     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3710     tcg_gen_or_tl(t0, t0, cpu_so);
3711     tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
3712     tcg_gen_br(l2);
3713 
3714     gen_set_label(l1);
3715 
3716     /*
3717      * Address mismatch implies failure.  But we still need to provide
3718      * the memory barrier semantics of the instruction.
3719      */
3720     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
3721     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3722 
3723     gen_set_label(l2);
3724     tcg_gen_movi_tl(cpu_reserve, -1);
3725 }
3726 
3727 #define STCX(name, memop)                  \
3728 static void gen_##name(DisasContext *ctx)  \
3729 {                                          \
3730     gen_conditional_store(ctx, memop);     \
3731 }
3732 
3733 STCX(stbcx_, DEF_MEMOP(MO_UB))
3734 STCX(sthcx_, DEF_MEMOP(MO_UW))
3735 STCX(stwcx_, DEF_MEMOP(MO_UL))
3736 
3737 #if defined(TARGET_PPC64)
3738 /* ldarx */
3739 LARX(ldarx, DEF_MEMOP(MO_UQ))
3740 /* stdcx. */
3741 STCX(stdcx_, DEF_MEMOP(MO_UQ))
3742 
3743 /* lqarx */
3744 static void gen_lqarx(DisasContext *ctx)
3745 {
3746     int rd = rD(ctx->opcode);
3747     TCGv EA, hi, lo;
3748     TCGv_i128 t16;
3749 
3750     if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3751                  (rd == rB(ctx->opcode)))) {
3752         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3753         return;
3754     }
3755 
3756     gen_set_access_type(ctx, ACCESS_RES);
3757     EA = tcg_temp_new();
3758     gen_addr_reg_index(ctx, EA);
3759 
3760     /* Note that the low part is always in RD+1, even in LE mode.  */
3761     lo = cpu_gpr[rd + 1];
3762     hi = cpu_gpr[rd];
3763 
3764     t16 = tcg_temp_new_i128();
3765     tcg_gen_qemu_ld_i128(t16, EA, ctx->mem_idx, DEF_MEMOP(MO_128 | MO_ALIGN));
3766     tcg_gen_extr_i128_i64(lo, hi, t16);
3767 
3768     tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
3769     tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
3770 }
3771 
3772 /* stqcx. */
3773 static void gen_stqcx_(DisasContext *ctx)
3774 {
3775     TCGLabel *lab_fail, *lab_over;
3776     int rs = rS(ctx->opcode);
3777     TCGv EA, t0, t1;
3778     TCGv_i128 cmp, val;
3779 
3780     if (unlikely(rs & 1)) {
3781         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3782         return;
3783     }
3784 
3785     lab_fail = gen_new_label();
3786     lab_over = gen_new_label();
3787 
3788     gen_set_access_type(ctx, ACCESS_RES);
3789     EA = tcg_temp_new();
3790     gen_addr_reg_index(ctx, EA);
3791 
3792     tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
3793 
3794     cmp = tcg_temp_new_i128();
3795     val = tcg_temp_new_i128();
3796 
3797     tcg_gen_concat_i64_i128(cmp, cpu_reserve_val2, cpu_reserve_val);
3798 
3799     /* Note that the low part is always in RS+1, even in LE mode.  */
3800     tcg_gen_concat_i64_i128(val, cpu_gpr[rs + 1], cpu_gpr[rs]);
3801 
3802     tcg_gen_atomic_cmpxchg_i128(val, cpu_reserve, cmp, val, ctx->mem_idx,
3803                                 DEF_MEMOP(MO_128 | MO_ALIGN));
3804 
3805     t0 = tcg_temp_new();
3806     t1 = tcg_temp_new();
3807     tcg_gen_extr_i128_i64(t1, t0, val);
3808 
3809     tcg_gen_xor_tl(t1, t1, cpu_reserve_val2);
3810     tcg_gen_xor_tl(t0, t0, cpu_reserve_val);
3811     tcg_gen_or_tl(t0, t0, t1);
3812 
3813     tcg_gen_setcondi_tl(TCG_COND_EQ, t0, t0, 0);
3814     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3815     tcg_gen_or_tl(t0, t0, cpu_so);
3816     tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
3817 
3818     tcg_gen_br(lab_over);
3819     gen_set_label(lab_fail);
3820 
3821     /*
3822      * Address mismatch implies failure.  But we still need to provide
3823      * the memory barrier semantics of the instruction.
3824      */
3825     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
3826     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3827 
3828     gen_set_label(lab_over);
3829     tcg_gen_movi_tl(cpu_reserve, -1);
3830 }
3831 #endif /* defined(TARGET_PPC64) */
3832 
3833 /* sync */
3834 static void gen_sync(DisasContext *ctx)
3835 {
3836     TCGBar bar = TCG_MO_ALL;
3837     uint32_t l = (ctx->opcode >> 21) & 3;
3838 
3839     if ((l == 1) && (ctx->insns_flags2 & PPC2_MEM_LWSYNC)) {
3840         bar = TCG_MO_LD_LD | TCG_MO_LD_ST | TCG_MO_ST_ST;
3841     }
3842 
3843     /*
3844      * We may need to check for a pending TLB flush.
3845      *
3846      * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
3847      *
3848      * Additionally, this can only happen in kernel mode however so
3849      * check MSR_PR as well.
3850      */
3851     if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
3852         gen_check_tlb_flush(ctx, true);
3853     }
3854 
3855     tcg_gen_mb(bar | TCG_BAR_SC);
3856 }
3857 
3858 /* wait */
3859 static void gen_wait(DisasContext *ctx)
3860 {
3861     uint32_t wc;
3862 
3863     if (ctx->insns_flags & PPC_WAIT) {
3864         /* v2.03-v2.07 define an older incompatible 'wait' encoding. */
3865 
3866         if (ctx->insns_flags2 & PPC2_PM_ISA206) {
3867             /* v2.06 introduced the WC field. WC > 0 may be treated as no-op. */
3868             wc = WC(ctx->opcode);
3869         } else {
3870             wc = 0;
3871         }
3872 
3873     } else if (ctx->insns_flags2 & PPC2_ISA300) {
3874         /* v3.0 defines a new 'wait' encoding. */
3875         wc = WC(ctx->opcode);
3876         if (ctx->insns_flags2 & PPC2_ISA310) {
3877             uint32_t pl = PL(ctx->opcode);
3878 
3879             /* WC 1,2 may be treated as no-op. WC 3 is reserved. */
3880             if (wc == 3) {
3881                 gen_invalid(ctx);
3882                 return;
3883             }
3884 
3885             /* PL 1-3 are reserved. If WC=2 then the insn is treated as noop. */
3886             if (pl > 0 && wc != 2) {
3887                 gen_invalid(ctx);
3888                 return;
3889             }
3890 
3891         } else { /* ISA300 */
3892             /* WC 1-3 are reserved */
3893             if (wc > 0) {
3894                 gen_invalid(ctx);
3895                 return;
3896             }
3897         }
3898 
3899     } else {
3900         warn_report("wait instruction decoded with wrong ISA flags.");
3901         gen_invalid(ctx);
3902         return;
3903     }
3904 
3905     /*
3906      * wait without WC field or with WC=0 waits for an exception / interrupt
3907      * to occur.
3908      */
3909     if (wc == 0) {
3910         TCGv_i32 t0 = tcg_constant_i32(1);
3911         tcg_gen_st_i32(t0, cpu_env,
3912                        -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
3913         /* Stop translation, as the CPU is supposed to sleep from now */
3914         gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3915     }
3916 
3917     /*
3918      * Other wait types must not just wait until an exception occurs because
3919      * ignoring their other wake-up conditions could cause a hang.
3920      *
3921      * For v2.06 and 2.07, wc=1,2,3 are architected but may be implemented as
3922      * no-ops.
3923      *
3924      * wc=1 and wc=3 explicitly allow the instruction to be treated as a no-op.
3925      *
3926      * wc=2 waits for an implementation-specific condition, such could be
3927      * always true, so it can be implemented as a no-op.
3928      *
3929      * For v3.1, wc=1,2 are architected but may be implemented as no-ops.
3930      *
3931      * wc=1 (waitrsv) waits for an exception or a reservation to be lost.
3932      * Reservation-loss may have implementation-specific conditions, so it
3933      * can be implemented as a no-op.
3934      *
3935      * wc=2 waits for an exception or an amount of time to pass. This
3936      * amount is implementation-specific so it can be implemented as a
3937      * no-op.
3938      *
3939      * ISA v3.1 allows for execution to resume "in the rare case of
3940      * an implementation-dependent event", so in any case software must
3941      * not depend on the architected resumption condition to become
3942      * true, so no-op implementations should be architecturally correct
3943      * (if suboptimal).
3944      */
3945 }
3946 
3947 #if defined(TARGET_PPC64)
3948 static void gen_doze(DisasContext *ctx)
3949 {
3950 #if defined(CONFIG_USER_ONLY)
3951     GEN_PRIV(ctx);
3952 #else
3953     TCGv_i32 t;
3954 
3955     CHK_HV(ctx);
3956     t = tcg_constant_i32(PPC_PM_DOZE);
3957     gen_helper_pminsn(cpu_env, t);
3958     /* Stop translation, as the CPU is supposed to sleep from now */
3959     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3960 #endif /* defined(CONFIG_USER_ONLY) */
3961 }
3962 
3963 static void gen_nap(DisasContext *ctx)
3964 {
3965 #if defined(CONFIG_USER_ONLY)
3966     GEN_PRIV(ctx);
3967 #else
3968     TCGv_i32 t;
3969 
3970     CHK_HV(ctx);
3971     t = tcg_constant_i32(PPC_PM_NAP);
3972     gen_helper_pminsn(cpu_env, t);
3973     /* Stop translation, as the CPU is supposed to sleep from now */
3974     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3975 #endif /* defined(CONFIG_USER_ONLY) */
3976 }
3977 
3978 static void gen_stop(DisasContext *ctx)
3979 {
3980 #if defined(CONFIG_USER_ONLY)
3981     GEN_PRIV(ctx);
3982 #else
3983     TCGv_i32 t;
3984 
3985     CHK_HV(ctx);
3986     t = tcg_constant_i32(PPC_PM_STOP);
3987     gen_helper_pminsn(cpu_env, t);
3988     /* Stop translation, as the CPU is supposed to sleep from now */
3989     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3990 #endif /* defined(CONFIG_USER_ONLY) */
3991 }
3992 
3993 static void gen_sleep(DisasContext *ctx)
3994 {
3995 #if defined(CONFIG_USER_ONLY)
3996     GEN_PRIV(ctx);
3997 #else
3998     TCGv_i32 t;
3999 
4000     CHK_HV(ctx);
4001     t = tcg_constant_i32(PPC_PM_SLEEP);
4002     gen_helper_pminsn(cpu_env, t);
4003     /* Stop translation, as the CPU is supposed to sleep from now */
4004     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4005 #endif /* defined(CONFIG_USER_ONLY) */
4006 }
4007 
4008 static void gen_rvwinkle(DisasContext *ctx)
4009 {
4010 #if defined(CONFIG_USER_ONLY)
4011     GEN_PRIV(ctx);
4012 #else
4013     TCGv_i32 t;
4014 
4015     CHK_HV(ctx);
4016     t = tcg_constant_i32(PPC_PM_RVWINKLE);
4017     gen_helper_pminsn(cpu_env, t);
4018     /* Stop translation, as the CPU is supposed to sleep from now */
4019     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4020 #endif /* defined(CONFIG_USER_ONLY) */
4021 }
4022 #endif /* #if defined(TARGET_PPC64) */
4023 
4024 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4025 {
4026 #if defined(TARGET_PPC64)
4027     if (ctx->has_cfar) {
4028         tcg_gen_movi_tl(cpu_cfar, nip);
4029     }
4030 #endif
4031 }
4032 
4033 #if defined(TARGET_PPC64)
4034 static void pmu_count_insns(DisasContext *ctx)
4035 {
4036     /*
4037      * Do not bother calling the helper if the PMU isn't counting
4038      * instructions.
4039      */
4040     if (!ctx->pmu_insn_cnt) {
4041         return;
4042     }
4043 
4044  #if !defined(CONFIG_USER_ONLY)
4045     TCGLabel *l;
4046     TCGv t0;
4047 
4048     /*
4049      * The PMU insns_inc() helper stops the internal PMU timer if a
4050      * counter overflows happens. In that case, if the guest is
4051      * running with icount and we do not handle it beforehand,
4052      * the helper can trigger a 'bad icount read'.
4053      */
4054     translator_io_start(&ctx->base);
4055 
4056     /* Avoid helper calls when only PMC5-6 are enabled. */
4057     if (!ctx->pmc_other) {
4058         l = gen_new_label();
4059         t0 = tcg_temp_new();
4060 
4061         gen_load_spr(t0, SPR_POWER_PMC5);
4062         tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4063         gen_store_spr(SPR_POWER_PMC5, t0);
4064         /* Check for overflow, if it's enabled */
4065         if (ctx->mmcr0_pmcjce) {
4066             tcg_gen_brcondi_tl(TCG_COND_LT, t0, PMC_COUNTER_NEGATIVE_VAL, l);
4067             gen_helper_handle_pmc5_overflow(cpu_env);
4068         }
4069 
4070         gen_set_label(l);
4071     } else {
4072         gen_helper_insns_inc(cpu_env, tcg_constant_i32(ctx->base.num_insns));
4073     }
4074   #else
4075     /*
4076      * User mode can read (but not write) PMC5 and start/stop
4077      * the PMU via MMCR0_FC. In this case just increment
4078      * PMC5 with base.num_insns.
4079      */
4080     TCGv t0 = tcg_temp_new();
4081 
4082     gen_load_spr(t0, SPR_POWER_PMC5);
4083     tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4084     gen_store_spr(SPR_POWER_PMC5, t0);
4085   #endif /* #if !defined(CONFIG_USER_ONLY) */
4086 }
4087 #else
4088 static void pmu_count_insns(DisasContext *ctx)
4089 {
4090     return;
4091 }
4092 #endif /* #if defined(TARGET_PPC64) */
4093 
4094 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4095 {
4096     return translator_use_goto_tb(&ctx->base, dest);
4097 }
4098 
4099 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4100 {
4101     if (unlikely(ctx->singlestep_enabled)) {
4102         gen_debug_exception(ctx);
4103     } else {
4104         /*
4105          * tcg_gen_lookup_and_goto_ptr will exit the TB if
4106          * CF_NO_GOTO_PTR is set. Count insns now.
4107          */
4108         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
4109             pmu_count_insns(ctx);
4110         }
4111 
4112         tcg_gen_lookup_and_goto_ptr();
4113     }
4114 }
4115 
4116 /***                                Branch                                 ***/
4117 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4118 {
4119     if (NARROW_MODE(ctx)) {
4120         dest = (uint32_t) dest;
4121     }
4122     if (use_goto_tb(ctx, dest)) {
4123         pmu_count_insns(ctx);
4124         tcg_gen_goto_tb(n);
4125         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4126         tcg_gen_exit_tb(ctx->base.tb, n);
4127     } else {
4128         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4129         gen_lookup_and_goto_ptr(ctx);
4130     }
4131 }
4132 
4133 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4134 {
4135     if (NARROW_MODE(ctx)) {
4136         nip = (uint32_t)nip;
4137     }
4138     tcg_gen_movi_tl(cpu_lr, nip);
4139 }
4140 
4141 /* b ba bl bla */
4142 static void gen_b(DisasContext *ctx)
4143 {
4144     target_ulong li, target;
4145 
4146     /* sign extend LI */
4147     li = LI(ctx->opcode);
4148     li = (li ^ 0x02000000) - 0x02000000;
4149     if (likely(AA(ctx->opcode) == 0)) {
4150         target = ctx->cia + li;
4151     } else {
4152         target = li;
4153     }
4154     if (LK(ctx->opcode)) {
4155         gen_setlr(ctx, ctx->base.pc_next);
4156     }
4157     gen_update_cfar(ctx, ctx->cia);
4158     gen_goto_tb(ctx, 0, target);
4159     ctx->base.is_jmp = DISAS_NORETURN;
4160 }
4161 
4162 #define BCOND_IM  0
4163 #define BCOND_LR  1
4164 #define BCOND_CTR 2
4165 #define BCOND_TAR 3
4166 
4167 static void gen_bcond(DisasContext *ctx, int type)
4168 {
4169     uint32_t bo = BO(ctx->opcode);
4170     TCGLabel *l1;
4171     TCGv target;
4172 
4173     if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4174         target = tcg_temp_new();
4175         if (type == BCOND_CTR) {
4176             tcg_gen_mov_tl(target, cpu_ctr);
4177         } else if (type == BCOND_TAR) {
4178             gen_load_spr(target, SPR_TAR);
4179         } else {
4180             tcg_gen_mov_tl(target, cpu_lr);
4181         }
4182     } else {
4183         target = NULL;
4184     }
4185     if (LK(ctx->opcode)) {
4186         gen_setlr(ctx, ctx->base.pc_next);
4187     }
4188     l1 = gen_new_label();
4189     if ((bo & 0x4) == 0) {
4190         /* Decrement and test CTR */
4191         TCGv temp = tcg_temp_new();
4192 
4193         if (type == BCOND_CTR) {
4194             /*
4195              * All ISAs up to v3 describe this form of bcctr as invalid but
4196              * some processors, ie. 64-bit server processors compliant with
4197              * arch 2.x, do implement a "test and decrement" logic instead,
4198              * as described in their respective UMs. This logic involves CTR
4199              * to act as both the branch target and a counter, which makes
4200              * it basically useless and thus never used in real code.
4201              *
4202              * This form was hence chosen to trigger extra micro-architectural
4203              * side-effect on real HW needed for the Spectre v2 workaround.
4204              * It is up to guests that implement such workaround, ie. linux, to
4205              * use this form in a way it just triggers the side-effect without
4206              * doing anything else harmful.
4207              */
4208             if (unlikely(!is_book3s_arch2x(ctx))) {
4209                 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4210                 return;
4211             }
4212 
4213             if (NARROW_MODE(ctx)) {
4214                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4215             } else {
4216                 tcg_gen_mov_tl(temp, cpu_ctr);
4217             }
4218             if (bo & 0x2) {
4219                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4220             } else {
4221                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4222             }
4223             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4224         } else {
4225             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4226             if (NARROW_MODE(ctx)) {
4227                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4228             } else {
4229                 tcg_gen_mov_tl(temp, cpu_ctr);
4230             }
4231             if (bo & 0x2) {
4232                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4233             } else {
4234                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4235             }
4236         }
4237     }
4238     if ((bo & 0x10) == 0) {
4239         /* Test CR */
4240         uint32_t bi = BI(ctx->opcode);
4241         uint32_t mask = 0x08 >> (bi & 0x03);
4242         TCGv_i32 temp = tcg_temp_new_i32();
4243 
4244         if (bo & 0x8) {
4245             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4246             tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4247         } else {
4248             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4249             tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4250         }
4251     }
4252     gen_update_cfar(ctx, ctx->cia);
4253     if (type == BCOND_IM) {
4254         target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4255         if (likely(AA(ctx->opcode) == 0)) {
4256             gen_goto_tb(ctx, 0, ctx->cia + li);
4257         } else {
4258             gen_goto_tb(ctx, 0, li);
4259         }
4260     } else {
4261         if (NARROW_MODE(ctx)) {
4262             tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4263         } else {
4264             tcg_gen_andi_tl(cpu_nip, target, ~3);
4265         }
4266         gen_lookup_and_goto_ptr(ctx);
4267     }
4268     if ((bo & 0x14) != 0x14) {
4269         /* fallthrough case */
4270         gen_set_label(l1);
4271         gen_goto_tb(ctx, 1, ctx->base.pc_next);
4272     }
4273     ctx->base.is_jmp = DISAS_NORETURN;
4274 }
4275 
4276 static void gen_bc(DisasContext *ctx)
4277 {
4278     gen_bcond(ctx, BCOND_IM);
4279 }
4280 
4281 static void gen_bcctr(DisasContext *ctx)
4282 {
4283     gen_bcond(ctx, BCOND_CTR);
4284 }
4285 
4286 static void gen_bclr(DisasContext *ctx)
4287 {
4288     gen_bcond(ctx, BCOND_LR);
4289 }
4290 
4291 static void gen_bctar(DisasContext *ctx)
4292 {
4293     gen_bcond(ctx, BCOND_TAR);
4294 }
4295 
4296 /***                      Condition register logical                       ***/
4297 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
4298 static void glue(gen_, name)(DisasContext *ctx)                               \
4299 {                                                                             \
4300     uint8_t bitmask;                                                          \
4301     int sh;                                                                   \
4302     TCGv_i32 t0, t1;                                                          \
4303     sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
4304     t0 = tcg_temp_new_i32();                                                  \
4305     if (sh > 0)                                                               \
4306         tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
4307     else if (sh < 0)                                                          \
4308         tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
4309     else                                                                      \
4310         tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
4311     t1 = tcg_temp_new_i32();                                                  \
4312     sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
4313     if (sh > 0)                                                               \
4314         tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
4315     else if (sh < 0)                                                          \
4316         tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh);           \
4317     else                                                                      \
4318         tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]);                 \
4319     tcg_op(t0, t0, t1);                                                       \
4320     bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03);                             \
4321     tcg_gen_andi_i32(t0, t0, bitmask);                                        \
4322     tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
4323     tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
4324 }
4325 
4326 /* crand */
4327 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4328 /* crandc */
4329 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
4330 /* creqv */
4331 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
4332 /* crnand */
4333 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
4334 /* crnor */
4335 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
4336 /* cror */
4337 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
4338 /* crorc */
4339 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
4340 /* crxor */
4341 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
4342 
4343 /* mcrf */
4344 static void gen_mcrf(DisasContext *ctx)
4345 {
4346     tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
4347 }
4348 
4349 /***                           System linkage                              ***/
4350 
4351 /* rfi (supervisor only) */
4352 static void gen_rfi(DisasContext *ctx)
4353 {
4354 #if defined(CONFIG_USER_ONLY)
4355     GEN_PRIV(ctx);
4356 #else
4357     /*
4358      * This instruction doesn't exist anymore on 64-bit server
4359      * processors compliant with arch 2.x
4360      */
4361     if (is_book3s_arch2x(ctx)) {
4362         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4363         return;
4364     }
4365     /* Restore CPU state */
4366     CHK_SV(ctx);
4367     translator_io_start(&ctx->base);
4368     gen_update_cfar(ctx, ctx->cia);
4369     gen_helper_rfi(cpu_env);
4370     ctx->base.is_jmp = DISAS_EXIT;
4371 #endif
4372 }
4373 
4374 #if defined(TARGET_PPC64)
4375 static void gen_rfid(DisasContext *ctx)
4376 {
4377 #if defined(CONFIG_USER_ONLY)
4378     GEN_PRIV(ctx);
4379 #else
4380     /* Restore CPU state */
4381     CHK_SV(ctx);
4382     translator_io_start(&ctx->base);
4383     gen_update_cfar(ctx, ctx->cia);
4384     gen_helper_rfid(cpu_env);
4385     ctx->base.is_jmp = DISAS_EXIT;
4386 #endif
4387 }
4388 
4389 #if !defined(CONFIG_USER_ONLY)
4390 static void gen_rfscv(DisasContext *ctx)
4391 {
4392 #if defined(CONFIG_USER_ONLY)
4393     GEN_PRIV(ctx);
4394 #else
4395     /* Restore CPU state */
4396     CHK_SV(ctx);
4397     translator_io_start(&ctx->base);
4398     gen_update_cfar(ctx, ctx->cia);
4399     gen_helper_rfscv(cpu_env);
4400     ctx->base.is_jmp = DISAS_EXIT;
4401 #endif
4402 }
4403 #endif
4404 
4405 static void gen_hrfid(DisasContext *ctx)
4406 {
4407 #if defined(CONFIG_USER_ONLY)
4408     GEN_PRIV(ctx);
4409 #else
4410     /* Restore CPU state */
4411     CHK_HV(ctx);
4412     gen_helper_hrfid(cpu_env);
4413     ctx->base.is_jmp = DISAS_EXIT;
4414 #endif
4415 }
4416 #endif
4417 
4418 /* sc */
4419 #if defined(CONFIG_USER_ONLY)
4420 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
4421 #else
4422 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
4423 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
4424 #endif
4425 static void gen_sc(DisasContext *ctx)
4426 {
4427     uint32_t lev;
4428 
4429     lev = (ctx->opcode >> 5) & 0x7F;
4430     gen_exception_err(ctx, POWERPC_SYSCALL, lev);
4431 }
4432 
4433 #if defined(TARGET_PPC64)
4434 #if !defined(CONFIG_USER_ONLY)
4435 static void gen_scv(DisasContext *ctx)
4436 {
4437     uint32_t lev = (ctx->opcode >> 5) & 0x7F;
4438 
4439     /* Set the PC back to the faulting instruction. */
4440     gen_update_nip(ctx, ctx->cia);
4441     gen_helper_scv(cpu_env, tcg_constant_i32(lev));
4442 
4443     ctx->base.is_jmp = DISAS_NORETURN;
4444 }
4445 #endif
4446 #endif
4447 
4448 /***                                Trap                                   ***/
4449 
4450 /* Check for unconditional traps (always or never) */
4451 static bool check_unconditional_trap(DisasContext *ctx)
4452 {
4453     /* Trap never */
4454     if (TO(ctx->opcode) == 0) {
4455         return true;
4456     }
4457     /* Trap always */
4458     if (TO(ctx->opcode) == 31) {
4459         gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
4460         return true;
4461     }
4462     return false;
4463 }
4464 
4465 /* tw */
4466 static void gen_tw(DisasContext *ctx)
4467 {
4468     TCGv_i32 t0;
4469 
4470     if (check_unconditional_trap(ctx)) {
4471         return;
4472     }
4473     t0 = tcg_constant_i32(TO(ctx->opcode));
4474     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4475                   t0);
4476 }
4477 
4478 /* twi */
4479 static void gen_twi(DisasContext *ctx)
4480 {
4481     TCGv t0;
4482     TCGv_i32 t1;
4483 
4484     if (check_unconditional_trap(ctx)) {
4485         return;
4486     }
4487     t0 = tcg_constant_tl(SIMM(ctx->opcode));
4488     t1 = tcg_constant_i32(TO(ctx->opcode));
4489     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4490 }
4491 
4492 #if defined(TARGET_PPC64)
4493 /* td */
4494 static void gen_td(DisasContext *ctx)
4495 {
4496     TCGv_i32 t0;
4497 
4498     if (check_unconditional_trap(ctx)) {
4499         return;
4500     }
4501     t0 = tcg_constant_i32(TO(ctx->opcode));
4502     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4503                   t0);
4504 }
4505 
4506 /* tdi */
4507 static void gen_tdi(DisasContext *ctx)
4508 {
4509     TCGv t0;
4510     TCGv_i32 t1;
4511 
4512     if (check_unconditional_trap(ctx)) {
4513         return;
4514     }
4515     t0 = tcg_constant_tl(SIMM(ctx->opcode));
4516     t1 = tcg_constant_i32(TO(ctx->opcode));
4517     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4518 }
4519 #endif
4520 
4521 /***                          Processor control                            ***/
4522 
4523 /* mcrxr */
4524 static void gen_mcrxr(DisasContext *ctx)
4525 {
4526     TCGv_i32 t0 = tcg_temp_new_i32();
4527     TCGv_i32 t1 = tcg_temp_new_i32();
4528     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4529 
4530     tcg_gen_trunc_tl_i32(t0, cpu_so);
4531     tcg_gen_trunc_tl_i32(t1, cpu_ov);
4532     tcg_gen_trunc_tl_i32(dst, cpu_ca);
4533     tcg_gen_shli_i32(t0, t0, 3);
4534     tcg_gen_shli_i32(t1, t1, 2);
4535     tcg_gen_shli_i32(dst, dst, 1);
4536     tcg_gen_or_i32(dst, dst, t0);
4537     tcg_gen_or_i32(dst, dst, t1);
4538 
4539     tcg_gen_movi_tl(cpu_so, 0);
4540     tcg_gen_movi_tl(cpu_ov, 0);
4541     tcg_gen_movi_tl(cpu_ca, 0);
4542 }
4543 
4544 #ifdef TARGET_PPC64
4545 /* mcrxrx */
4546 static void gen_mcrxrx(DisasContext *ctx)
4547 {
4548     TCGv t0 = tcg_temp_new();
4549     TCGv t1 = tcg_temp_new();
4550     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4551 
4552     /* copy OV and OV32 */
4553     tcg_gen_shli_tl(t0, cpu_ov, 1);
4554     tcg_gen_or_tl(t0, t0, cpu_ov32);
4555     tcg_gen_shli_tl(t0, t0, 2);
4556     /* copy CA and CA32 */
4557     tcg_gen_shli_tl(t1, cpu_ca, 1);
4558     tcg_gen_or_tl(t1, t1, cpu_ca32);
4559     tcg_gen_or_tl(t0, t0, t1);
4560     tcg_gen_trunc_tl_i32(dst, t0);
4561 }
4562 #endif
4563 
4564 /* mfcr mfocrf */
4565 static void gen_mfcr(DisasContext *ctx)
4566 {
4567     uint32_t crm, crn;
4568 
4569     if (likely(ctx->opcode & 0x00100000)) {
4570         crm = CRM(ctx->opcode);
4571         if (likely(crm && ((crm & (crm - 1)) == 0))) {
4572             crn = ctz32(crm);
4573             tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4574             tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4575                             cpu_gpr[rD(ctx->opcode)], crn * 4);
4576         }
4577     } else {
4578         TCGv_i32 t0 = tcg_temp_new_i32();
4579         tcg_gen_mov_i32(t0, cpu_crf[0]);
4580         tcg_gen_shli_i32(t0, t0, 4);
4581         tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4582         tcg_gen_shli_i32(t0, t0, 4);
4583         tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4584         tcg_gen_shli_i32(t0, t0, 4);
4585         tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4586         tcg_gen_shli_i32(t0, t0, 4);
4587         tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4588         tcg_gen_shli_i32(t0, t0, 4);
4589         tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4590         tcg_gen_shli_i32(t0, t0, 4);
4591         tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4592         tcg_gen_shli_i32(t0, t0, 4);
4593         tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4594         tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4595     }
4596 }
4597 
4598 /* mfmsr */
4599 static void gen_mfmsr(DisasContext *ctx)
4600 {
4601     CHK_SV(ctx);
4602     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4603 }
4604 
4605 /* mfspr */
4606 static inline void gen_op_mfspr(DisasContext *ctx)
4607 {
4608     void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4609     uint32_t sprn = SPR(ctx->opcode);
4610 
4611 #if defined(CONFIG_USER_ONLY)
4612     read_cb = ctx->spr_cb[sprn].uea_read;
4613 #else
4614     if (ctx->pr) {
4615         read_cb = ctx->spr_cb[sprn].uea_read;
4616     } else if (ctx->hv) {
4617         read_cb = ctx->spr_cb[sprn].hea_read;
4618     } else {
4619         read_cb = ctx->spr_cb[sprn].oea_read;
4620     }
4621 #endif
4622     if (likely(read_cb != NULL)) {
4623         if (likely(read_cb != SPR_NOACCESS)) {
4624             (*read_cb)(ctx, rD(ctx->opcode), sprn);
4625         } else {
4626             /* Privilege exception */
4627             /*
4628              * This is a hack to avoid warnings when running Linux:
4629              * this OS breaks the PowerPC virtualisation model,
4630              * allowing userland application to read the PVR
4631              */
4632             if (sprn != SPR_PVR) {
4633                 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4634                               "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4635                               ctx->cia);
4636             }
4637             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4638         }
4639     } else {
4640         /* ISA 2.07 defines these as no-ops */
4641         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4642             (sprn >= 808 && sprn <= 811)) {
4643             /* This is a nop */
4644             return;
4645         }
4646         /* Not defined */
4647         qemu_log_mask(LOG_GUEST_ERROR,
4648                       "Trying to read invalid spr %d (0x%03x) at "
4649                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4650 
4651         /*
4652          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4653          * generate a priv, a hv emu or a no-op
4654          */
4655         if (sprn & 0x10) {
4656             if (ctx->pr) {
4657                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4658             }
4659         } else {
4660             if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4661                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4662             }
4663         }
4664     }
4665 }
4666 
4667 static void gen_mfspr(DisasContext *ctx)
4668 {
4669     gen_op_mfspr(ctx);
4670 }
4671 
4672 /* mftb */
4673 static void gen_mftb(DisasContext *ctx)
4674 {
4675     gen_op_mfspr(ctx);
4676 }
4677 
4678 /* mtcrf mtocrf*/
4679 static void gen_mtcrf(DisasContext *ctx)
4680 {
4681     uint32_t crm, crn;
4682 
4683     crm = CRM(ctx->opcode);
4684     if (likely((ctx->opcode & 0x00100000))) {
4685         if (crm && ((crm & (crm - 1)) == 0)) {
4686             TCGv_i32 temp = tcg_temp_new_i32();
4687             crn = ctz32(crm);
4688             tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4689             tcg_gen_shri_i32(temp, temp, crn * 4);
4690             tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4691         }
4692     } else {
4693         TCGv_i32 temp = tcg_temp_new_i32();
4694         tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4695         for (crn = 0 ; crn < 8 ; crn++) {
4696             if (crm & (1 << crn)) {
4697                     tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4698                     tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4699             }
4700         }
4701     }
4702 }
4703 
4704 /* mtmsr */
4705 #if defined(TARGET_PPC64)
4706 static void gen_mtmsrd(DisasContext *ctx)
4707 {
4708     if (unlikely(!is_book3s_arch2x(ctx))) {
4709         gen_invalid(ctx);
4710         return;
4711     }
4712 
4713     CHK_SV(ctx);
4714 
4715 #if !defined(CONFIG_USER_ONLY)
4716     TCGv t0, t1;
4717     target_ulong mask;
4718 
4719     t0 = tcg_temp_new();
4720     t1 = tcg_temp_new();
4721 
4722     translator_io_start(&ctx->base);
4723 
4724     if (ctx->opcode & 0x00010000) {
4725         /* L=1 form only updates EE and RI */
4726         mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4727     } else {
4728         /* mtmsrd does not alter HV, S, ME, or LE */
4729         mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
4730                  (1ULL << MSR_HV));
4731         /*
4732          * XXX: we need to update nip before the store if we enter
4733          *      power saving mode, we will exit the loop directly from
4734          *      ppc_store_msr
4735          */
4736         gen_update_nip(ctx, ctx->base.pc_next);
4737     }
4738 
4739     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4740     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4741     tcg_gen_or_tl(t0, t0, t1);
4742 
4743     gen_helper_store_msr(cpu_env, t0);
4744 
4745     /* Must stop the translation as machine state (may have) changed */
4746     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4747 #endif /* !defined(CONFIG_USER_ONLY) */
4748 }
4749 #endif /* defined(TARGET_PPC64) */
4750 
4751 static void gen_mtmsr(DisasContext *ctx)
4752 {
4753     CHK_SV(ctx);
4754 
4755 #if !defined(CONFIG_USER_ONLY)
4756     TCGv t0, t1;
4757     target_ulong mask = 0xFFFFFFFF;
4758 
4759     t0 = tcg_temp_new();
4760     t1 = tcg_temp_new();
4761 
4762     translator_io_start(&ctx->base);
4763     if (ctx->opcode & 0x00010000) {
4764         /* L=1 form only updates EE and RI */
4765         mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
4766     } else {
4767         /* mtmsr does not alter S, ME, or LE */
4768         mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
4769 
4770         /*
4771          * XXX: we need to update nip before the store if we enter
4772          *      power saving mode, we will exit the loop directly from
4773          *      ppc_store_msr
4774          */
4775         gen_update_nip(ctx, ctx->base.pc_next);
4776     }
4777 
4778     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4779     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4780     tcg_gen_or_tl(t0, t0, t1);
4781 
4782     gen_helper_store_msr(cpu_env, t0);
4783 
4784     /* Must stop the translation as machine state (may have) changed */
4785     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4786 #endif
4787 }
4788 
4789 /* mtspr */
4790 static void gen_mtspr(DisasContext *ctx)
4791 {
4792     void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
4793     uint32_t sprn = SPR(ctx->opcode);
4794 
4795 #if defined(CONFIG_USER_ONLY)
4796     write_cb = ctx->spr_cb[sprn].uea_write;
4797 #else
4798     if (ctx->pr) {
4799         write_cb = ctx->spr_cb[sprn].uea_write;
4800     } else if (ctx->hv) {
4801         write_cb = ctx->spr_cb[sprn].hea_write;
4802     } else {
4803         write_cb = ctx->spr_cb[sprn].oea_write;
4804     }
4805 #endif
4806     if (likely(write_cb != NULL)) {
4807         if (likely(write_cb != SPR_NOACCESS)) {
4808             (*write_cb)(ctx, sprn, rS(ctx->opcode));
4809         } else {
4810             /* Privilege exception */
4811             qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
4812                           "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4813                           ctx->cia);
4814             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4815         }
4816     } else {
4817         /* ISA 2.07 defines these as no-ops */
4818         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4819             (sprn >= 808 && sprn <= 811)) {
4820             /* This is a nop */
4821             return;
4822         }
4823 
4824         /* Not defined */
4825         qemu_log_mask(LOG_GUEST_ERROR,
4826                       "Trying to write invalid spr %d (0x%03x) at "
4827                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4828 
4829 
4830         /*
4831          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4832          * generate a priv, a hv emu or a no-op
4833          */
4834         if (sprn & 0x10) {
4835             if (ctx->pr) {
4836                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4837             }
4838         } else {
4839             if (ctx->pr || sprn == 0) {
4840                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4841             }
4842         }
4843     }
4844 }
4845 
4846 #if defined(TARGET_PPC64)
4847 /* setb */
4848 static void gen_setb(DisasContext *ctx)
4849 {
4850     TCGv_i32 t0 = tcg_temp_new_i32();
4851     TCGv_i32 t8 = tcg_constant_i32(8);
4852     TCGv_i32 tm1 = tcg_constant_i32(-1);
4853     int crf = crfS(ctx->opcode);
4854 
4855     tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
4856     tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
4857     tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4858 }
4859 #endif
4860 
4861 /***                         Cache management                              ***/
4862 
4863 /* dcbf */
4864 static void gen_dcbf(DisasContext *ctx)
4865 {
4866     /* XXX: specification says this is treated as a load by the MMU */
4867     TCGv t0;
4868     gen_set_access_type(ctx, ACCESS_CACHE);
4869     t0 = tcg_temp_new();
4870     gen_addr_reg_index(ctx, t0);
4871     gen_qemu_ld8u(ctx, t0, t0);
4872 }
4873 
4874 /* dcbfep (external PID dcbf) */
4875 static void gen_dcbfep(DisasContext *ctx)
4876 {
4877     /* XXX: specification says this is treated as a load by the MMU */
4878     TCGv t0;
4879     CHK_SV(ctx);
4880     gen_set_access_type(ctx, ACCESS_CACHE);
4881     t0 = tcg_temp_new();
4882     gen_addr_reg_index(ctx, t0);
4883     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
4884 }
4885 
4886 /* dcbi (Supervisor only) */
4887 static void gen_dcbi(DisasContext *ctx)
4888 {
4889 #if defined(CONFIG_USER_ONLY)
4890     GEN_PRIV(ctx);
4891 #else
4892     TCGv EA, val;
4893 
4894     CHK_SV(ctx);
4895     EA = tcg_temp_new();
4896     gen_set_access_type(ctx, ACCESS_CACHE);
4897     gen_addr_reg_index(ctx, EA);
4898     val = tcg_temp_new();
4899     /* XXX: specification says this should be treated as a store by the MMU */
4900     gen_qemu_ld8u(ctx, val, EA);
4901     gen_qemu_st8(ctx, val, EA);
4902 #endif /* defined(CONFIG_USER_ONLY) */
4903 }
4904 
4905 /* dcdst */
4906 static void gen_dcbst(DisasContext *ctx)
4907 {
4908     /* XXX: specification say this is treated as a load by the MMU */
4909     TCGv t0;
4910     gen_set_access_type(ctx, ACCESS_CACHE);
4911     t0 = tcg_temp_new();
4912     gen_addr_reg_index(ctx, t0);
4913     gen_qemu_ld8u(ctx, t0, t0);
4914 }
4915 
4916 /* dcbstep (dcbstep External PID version) */
4917 static void gen_dcbstep(DisasContext *ctx)
4918 {
4919     /* XXX: specification say this is treated as a load by the MMU */
4920     TCGv t0;
4921     gen_set_access_type(ctx, ACCESS_CACHE);
4922     t0 = tcg_temp_new();
4923     gen_addr_reg_index(ctx, t0);
4924     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
4925 }
4926 
4927 /* dcbt */
4928 static void gen_dcbt(DisasContext *ctx)
4929 {
4930     /*
4931      * interpreted as no-op
4932      * XXX: specification say this is treated as a load by the MMU but
4933      *      does not generate any exception
4934      */
4935 }
4936 
4937 /* dcbtep */
4938 static void gen_dcbtep(DisasContext *ctx)
4939 {
4940     /*
4941      * interpreted as no-op
4942      * XXX: specification say this is treated as a load by the MMU but
4943      *      does not generate any exception
4944      */
4945 }
4946 
4947 /* dcbtst */
4948 static void gen_dcbtst(DisasContext *ctx)
4949 {
4950     /*
4951      * interpreted as no-op
4952      * XXX: specification say this is treated as a load by the MMU but
4953      *      does not generate any exception
4954      */
4955 }
4956 
4957 /* dcbtstep */
4958 static void gen_dcbtstep(DisasContext *ctx)
4959 {
4960     /*
4961      * interpreted as no-op
4962      * XXX: specification say this is treated as a load by the MMU but
4963      *      does not generate any exception
4964      */
4965 }
4966 
4967 /* dcbtls */
4968 static void gen_dcbtls(DisasContext *ctx)
4969 {
4970     /* Always fails locking the cache */
4971     TCGv t0 = tcg_temp_new();
4972     gen_load_spr(t0, SPR_Exxx_L1CSR0);
4973     tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
4974     gen_store_spr(SPR_Exxx_L1CSR0, t0);
4975 }
4976 
4977 /* dcblc */
4978 static void gen_dcblc(DisasContext *ctx)
4979 {
4980     /*
4981      * interpreted as no-op
4982      */
4983 }
4984 
4985 /* dcbz */
4986 static void gen_dcbz(DisasContext *ctx)
4987 {
4988     TCGv tcgv_addr;
4989     TCGv_i32 tcgv_op;
4990 
4991     gen_set_access_type(ctx, ACCESS_CACHE);
4992     tcgv_addr = tcg_temp_new();
4993     tcgv_op = tcg_constant_i32(ctx->opcode & 0x03FF000);
4994     gen_addr_reg_index(ctx, tcgv_addr);
4995     gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
4996 }
4997 
4998 /* dcbzep */
4999 static void gen_dcbzep(DisasContext *ctx)
5000 {
5001     TCGv tcgv_addr;
5002     TCGv_i32 tcgv_op;
5003 
5004     gen_set_access_type(ctx, ACCESS_CACHE);
5005     tcgv_addr = tcg_temp_new();
5006     tcgv_op = tcg_constant_i32(ctx->opcode & 0x03FF000);
5007     gen_addr_reg_index(ctx, tcgv_addr);
5008     gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5009 }
5010 
5011 /* dst / dstt */
5012 static void gen_dst(DisasContext *ctx)
5013 {
5014     if (rA(ctx->opcode) == 0) {
5015         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5016     } else {
5017         /* interpreted as no-op */
5018     }
5019 }
5020 
5021 /* dstst /dststt */
5022 static void gen_dstst(DisasContext *ctx)
5023 {
5024     if (rA(ctx->opcode) == 0) {
5025         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5026     } else {
5027         /* interpreted as no-op */
5028     }
5029 
5030 }
5031 
5032 /* dss / dssall */
5033 static void gen_dss(DisasContext *ctx)
5034 {
5035     /* interpreted as no-op */
5036 }
5037 
5038 /* icbi */
5039 static void gen_icbi(DisasContext *ctx)
5040 {
5041     TCGv t0;
5042     gen_set_access_type(ctx, ACCESS_CACHE);
5043     t0 = tcg_temp_new();
5044     gen_addr_reg_index(ctx, t0);
5045     gen_helper_icbi(cpu_env, t0);
5046 }
5047 
5048 /* icbiep */
5049 static void gen_icbiep(DisasContext *ctx)
5050 {
5051     TCGv t0;
5052     gen_set_access_type(ctx, ACCESS_CACHE);
5053     t0 = tcg_temp_new();
5054     gen_addr_reg_index(ctx, t0);
5055     gen_helper_icbiep(cpu_env, t0);
5056 }
5057 
5058 /* Optional: */
5059 /* dcba */
5060 static void gen_dcba(DisasContext *ctx)
5061 {
5062     /*
5063      * interpreted as no-op
5064      * XXX: specification say this is treated as a store by the MMU
5065      *      but does not generate any exception
5066      */
5067 }
5068 
5069 /***                    Segment register manipulation                      ***/
5070 /* Supervisor only: */
5071 
5072 /* mfsr */
5073 static void gen_mfsr(DisasContext *ctx)
5074 {
5075 #if defined(CONFIG_USER_ONLY)
5076     GEN_PRIV(ctx);
5077 #else
5078     TCGv t0;
5079 
5080     CHK_SV(ctx);
5081     t0 = tcg_constant_tl(SR(ctx->opcode));
5082     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5083 #endif /* defined(CONFIG_USER_ONLY) */
5084 }
5085 
5086 /* mfsrin */
5087 static void gen_mfsrin(DisasContext *ctx)
5088 {
5089 #if defined(CONFIG_USER_ONLY)
5090     GEN_PRIV(ctx);
5091 #else
5092     TCGv t0;
5093 
5094     CHK_SV(ctx);
5095     t0 = tcg_temp_new();
5096     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5097     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5098 #endif /* defined(CONFIG_USER_ONLY) */
5099 }
5100 
5101 /* mtsr */
5102 static void gen_mtsr(DisasContext *ctx)
5103 {
5104 #if defined(CONFIG_USER_ONLY)
5105     GEN_PRIV(ctx);
5106 #else
5107     TCGv t0;
5108 
5109     CHK_SV(ctx);
5110     t0 = tcg_constant_tl(SR(ctx->opcode));
5111     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5112 #endif /* defined(CONFIG_USER_ONLY) */
5113 }
5114 
5115 /* mtsrin */
5116 static void gen_mtsrin(DisasContext *ctx)
5117 {
5118 #if defined(CONFIG_USER_ONLY)
5119     GEN_PRIV(ctx);
5120 #else
5121     TCGv t0;
5122     CHK_SV(ctx);
5123 
5124     t0 = tcg_temp_new();
5125     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5126     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5127 #endif /* defined(CONFIG_USER_ONLY) */
5128 }
5129 
5130 #if defined(TARGET_PPC64)
5131 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5132 
5133 /* mfsr */
5134 static void gen_mfsr_64b(DisasContext *ctx)
5135 {
5136 #if defined(CONFIG_USER_ONLY)
5137     GEN_PRIV(ctx);
5138 #else
5139     TCGv t0;
5140 
5141     CHK_SV(ctx);
5142     t0 = tcg_constant_tl(SR(ctx->opcode));
5143     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5144 #endif /* defined(CONFIG_USER_ONLY) */
5145 }
5146 
5147 /* mfsrin */
5148 static void gen_mfsrin_64b(DisasContext *ctx)
5149 {
5150 #if defined(CONFIG_USER_ONLY)
5151     GEN_PRIV(ctx);
5152 #else
5153     TCGv t0;
5154 
5155     CHK_SV(ctx);
5156     t0 = tcg_temp_new();
5157     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5158     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5159 #endif /* defined(CONFIG_USER_ONLY) */
5160 }
5161 
5162 /* mtsr */
5163 static void gen_mtsr_64b(DisasContext *ctx)
5164 {
5165 #if defined(CONFIG_USER_ONLY)
5166     GEN_PRIV(ctx);
5167 #else
5168     TCGv t0;
5169 
5170     CHK_SV(ctx);
5171     t0 = tcg_constant_tl(SR(ctx->opcode));
5172     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5173 #endif /* defined(CONFIG_USER_ONLY) */
5174 }
5175 
5176 /* mtsrin */
5177 static void gen_mtsrin_64b(DisasContext *ctx)
5178 {
5179 #if defined(CONFIG_USER_ONLY)
5180     GEN_PRIV(ctx);
5181 #else
5182     TCGv t0;
5183 
5184     CHK_SV(ctx);
5185     t0 = tcg_temp_new();
5186     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5187     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5188 #endif /* defined(CONFIG_USER_ONLY) */
5189 }
5190 
5191 #endif /* defined(TARGET_PPC64) */
5192 
5193 /***                      Lookaside buffer management                      ***/
5194 /* Optional & supervisor only: */
5195 
5196 /* tlbia */
5197 static void gen_tlbia(DisasContext *ctx)
5198 {
5199 #if defined(CONFIG_USER_ONLY)
5200     GEN_PRIV(ctx);
5201 #else
5202     CHK_HV(ctx);
5203 
5204     gen_helper_tlbia(cpu_env);
5205 #endif  /* defined(CONFIG_USER_ONLY) */
5206 }
5207 
5208 /* tlbsync */
5209 static void gen_tlbsync(DisasContext *ctx)
5210 {
5211 #if defined(CONFIG_USER_ONLY)
5212     GEN_PRIV(ctx);
5213 #else
5214 
5215     if (ctx->gtse) {
5216         CHK_SV(ctx); /* If gtse is set then tlbsync is supervisor privileged */
5217     } else {
5218         CHK_HV(ctx); /* Else hypervisor privileged */
5219     }
5220 
5221     /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
5222     if (ctx->insns_flags & PPC_BOOKE) {
5223         gen_check_tlb_flush(ctx, true);
5224     }
5225 #endif /* defined(CONFIG_USER_ONLY) */
5226 }
5227 
5228 /***                              External control                         ***/
5229 /* Optional: */
5230 
5231 /* eciwx */
5232 static void gen_eciwx(DisasContext *ctx)
5233 {
5234     TCGv t0;
5235     /* Should check EAR[E] ! */
5236     gen_set_access_type(ctx, ACCESS_EXT);
5237     t0 = tcg_temp_new();
5238     gen_addr_reg_index(ctx, t0);
5239     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5240                        DEF_MEMOP(MO_UL | MO_ALIGN));
5241 }
5242 
5243 /* ecowx */
5244 static void gen_ecowx(DisasContext *ctx)
5245 {
5246     TCGv t0;
5247     /* Should check EAR[E] ! */
5248     gen_set_access_type(ctx, ACCESS_EXT);
5249     t0 = tcg_temp_new();
5250     gen_addr_reg_index(ctx, t0);
5251     tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5252                        DEF_MEMOP(MO_UL | MO_ALIGN));
5253 }
5254 
5255 /* 602 - 603 - G2 TLB management */
5256 
5257 /* tlbld */
5258 static void gen_tlbld_6xx(DisasContext *ctx)
5259 {
5260 #if defined(CONFIG_USER_ONLY)
5261     GEN_PRIV(ctx);
5262 #else
5263     CHK_SV(ctx);
5264     gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5265 #endif /* defined(CONFIG_USER_ONLY) */
5266 }
5267 
5268 /* tlbli */
5269 static void gen_tlbli_6xx(DisasContext *ctx)
5270 {
5271 #if defined(CONFIG_USER_ONLY)
5272     GEN_PRIV(ctx);
5273 #else
5274     CHK_SV(ctx);
5275     gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5276 #endif /* defined(CONFIG_USER_ONLY) */
5277 }
5278 
5279 /* BookE specific instructions */
5280 
5281 /* XXX: not implemented on 440 ? */
5282 static void gen_mfapidi(DisasContext *ctx)
5283 {
5284     /* XXX: TODO */
5285     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5286 }
5287 
5288 /* XXX: not implemented on 440 ? */
5289 static void gen_tlbiva(DisasContext *ctx)
5290 {
5291 #if defined(CONFIG_USER_ONLY)
5292     GEN_PRIV(ctx);
5293 #else
5294     TCGv t0;
5295 
5296     CHK_SV(ctx);
5297     t0 = tcg_temp_new();
5298     gen_addr_reg_index(ctx, t0);
5299     gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5300 #endif /* defined(CONFIG_USER_ONLY) */
5301 }
5302 
5303 /* All 405 MAC instructions are translated here */
5304 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5305                                         int ra, int rb, int rt, int Rc)
5306 {
5307     TCGv t0, t1;
5308 
5309     t0 = tcg_temp_new();
5310     t1 = tcg_temp_new();
5311 
5312     switch (opc3 & 0x0D) {
5313     case 0x05:
5314         /* macchw    - macchw.    - macchwo   - macchwo.   */
5315         /* macchws   - macchws.   - macchwso  - macchwso.  */
5316         /* nmacchw   - nmacchw.   - nmacchwo  - nmacchwo.  */
5317         /* nmacchws  - nmacchws.  - nmacchwso - nmacchwso. */
5318         /* mulchw - mulchw. */
5319         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5320         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5321         tcg_gen_ext16s_tl(t1, t1);
5322         break;
5323     case 0x04:
5324         /* macchwu   - macchwu.   - macchwuo  - macchwuo.  */
5325         /* macchwsu  - macchwsu.  - macchwsuo - macchwsuo. */
5326         /* mulchwu - mulchwu. */
5327         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5328         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5329         tcg_gen_ext16u_tl(t1, t1);
5330         break;
5331     case 0x01:
5332         /* machhw    - machhw.    - machhwo   - machhwo.   */
5333         /* machhws   - machhws.   - machhwso  - machhwso.  */
5334         /* nmachhw   - nmachhw.   - nmachhwo  - nmachhwo.  */
5335         /* nmachhws  - nmachhws.  - nmachhwso - nmachhwso. */
5336         /* mulhhw - mulhhw. */
5337         tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5338         tcg_gen_ext16s_tl(t0, t0);
5339         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5340         tcg_gen_ext16s_tl(t1, t1);
5341         break;
5342     case 0x00:
5343         /* machhwu   - machhwu.   - machhwuo  - machhwuo.  */
5344         /* machhwsu  - machhwsu.  - machhwsuo - machhwsuo. */
5345         /* mulhhwu - mulhhwu. */
5346         tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5347         tcg_gen_ext16u_tl(t0, t0);
5348         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5349         tcg_gen_ext16u_tl(t1, t1);
5350         break;
5351     case 0x0D:
5352         /* maclhw    - maclhw.    - maclhwo   - maclhwo.   */
5353         /* maclhws   - maclhws.   - maclhwso  - maclhwso.  */
5354         /* nmaclhw   - nmaclhw.   - nmaclhwo  - nmaclhwo.  */
5355         /* nmaclhws  - nmaclhws.  - nmaclhwso - nmaclhwso. */
5356         /* mullhw - mullhw. */
5357         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5358         tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5359         break;
5360     case 0x0C:
5361         /* maclhwu   - maclhwu.   - maclhwuo  - maclhwuo.  */
5362         /* maclhwsu  - maclhwsu.  - maclhwsuo - maclhwsuo. */
5363         /* mullhwu - mullhwu. */
5364         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5365         tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5366         break;
5367     }
5368     if (opc2 & 0x04) {
5369         /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5370         tcg_gen_mul_tl(t1, t0, t1);
5371         if (opc2 & 0x02) {
5372             /* nmultiply-and-accumulate (0x0E) */
5373             tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5374         } else {
5375             /* multiply-and-accumulate (0x0C) */
5376             tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5377         }
5378 
5379         if (opc3 & 0x12) {
5380             /* Check overflow and/or saturate */
5381             TCGLabel *l1 = gen_new_label();
5382 
5383             if (opc3 & 0x10) {
5384                 /* Start with XER OV disabled, the most likely case */
5385                 tcg_gen_movi_tl(cpu_ov, 0);
5386             }
5387             if (opc3 & 0x01) {
5388                 /* Signed */
5389                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5390                 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5391                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5392                 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5393                 if (opc3 & 0x02) {
5394                     /* Saturate */
5395                     tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5396                     tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5397                 }
5398             } else {
5399                 /* Unsigned */
5400                 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5401                 if (opc3 & 0x02) {
5402                     /* Saturate */
5403                     tcg_gen_movi_tl(t0, UINT32_MAX);
5404                 }
5405             }
5406             if (opc3 & 0x10) {
5407                 /* Check overflow */
5408                 tcg_gen_movi_tl(cpu_ov, 1);
5409                 tcg_gen_movi_tl(cpu_so, 1);
5410             }
5411             gen_set_label(l1);
5412             tcg_gen_mov_tl(cpu_gpr[rt], t0);
5413         }
5414     } else {
5415         tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5416     }
5417     if (unlikely(Rc) != 0) {
5418         /* Update Rc0 */
5419         gen_set_Rc0(ctx, cpu_gpr[rt]);
5420     }
5421 }
5422 
5423 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
5424 static void glue(gen_, name)(DisasContext *ctx)                               \
5425 {                                                                             \
5426     gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode),   \
5427                          rD(ctx->opcode), Rc(ctx->opcode));                   \
5428 }
5429 
5430 /* macchw    - macchw.    */
5431 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5432 /* macchwo   - macchwo.   */
5433 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5434 /* macchws   - macchws.   */
5435 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5436 /* macchwso  - macchwso.  */
5437 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5438 /* macchwsu  - macchwsu.  */
5439 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5440 /* macchwsuo - macchwsuo. */
5441 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5442 /* macchwu   - macchwu.   */
5443 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5444 /* macchwuo  - macchwuo.  */
5445 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5446 /* machhw    - machhw.    */
5447 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5448 /* machhwo   - machhwo.   */
5449 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5450 /* machhws   - machhws.   */
5451 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5452 /* machhwso  - machhwso.  */
5453 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5454 /* machhwsu  - machhwsu.  */
5455 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5456 /* machhwsuo - machhwsuo. */
5457 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5458 /* machhwu   - machhwu.   */
5459 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5460 /* machhwuo  - machhwuo.  */
5461 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5462 /* maclhw    - maclhw.    */
5463 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5464 /* maclhwo   - maclhwo.   */
5465 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5466 /* maclhws   - maclhws.   */
5467 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5468 /* maclhwso  - maclhwso.  */
5469 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5470 /* maclhwu   - maclhwu.   */
5471 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5472 /* maclhwuo  - maclhwuo.  */
5473 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5474 /* maclhwsu  - maclhwsu.  */
5475 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5476 /* maclhwsuo - maclhwsuo. */
5477 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5478 /* nmacchw   - nmacchw.   */
5479 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5480 /* nmacchwo  - nmacchwo.  */
5481 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5482 /* nmacchws  - nmacchws.  */
5483 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5484 /* nmacchwso - nmacchwso. */
5485 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5486 /* nmachhw   - nmachhw.   */
5487 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5488 /* nmachhwo  - nmachhwo.  */
5489 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5490 /* nmachhws  - nmachhws.  */
5491 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5492 /* nmachhwso - nmachhwso. */
5493 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5494 /* nmaclhw   - nmaclhw.   */
5495 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5496 /* nmaclhwo  - nmaclhwo.  */
5497 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5498 /* nmaclhws  - nmaclhws.  */
5499 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5500 /* nmaclhwso - nmaclhwso. */
5501 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5502 
5503 /* mulchw  - mulchw.  */
5504 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5505 /* mulchwu - mulchwu. */
5506 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5507 /* mulhhw  - mulhhw.  */
5508 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5509 /* mulhhwu - mulhhwu. */
5510 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5511 /* mullhw  - mullhw.  */
5512 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5513 /* mullhwu - mullhwu. */
5514 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5515 
5516 /* mfdcr */
5517 static void gen_mfdcr(DisasContext *ctx)
5518 {
5519 #if defined(CONFIG_USER_ONLY)
5520     GEN_PRIV(ctx);
5521 #else
5522     TCGv dcrn;
5523 
5524     CHK_SV(ctx);
5525     dcrn = tcg_constant_tl(SPR(ctx->opcode));
5526     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5527 #endif /* defined(CONFIG_USER_ONLY) */
5528 }
5529 
5530 /* mtdcr */
5531 static void gen_mtdcr(DisasContext *ctx)
5532 {
5533 #if defined(CONFIG_USER_ONLY)
5534     GEN_PRIV(ctx);
5535 #else
5536     TCGv dcrn;
5537 
5538     CHK_SV(ctx);
5539     dcrn = tcg_constant_tl(SPR(ctx->opcode));
5540     gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5541 #endif /* defined(CONFIG_USER_ONLY) */
5542 }
5543 
5544 /* mfdcrx */
5545 /* XXX: not implemented on 440 ? */
5546 static void gen_mfdcrx(DisasContext *ctx)
5547 {
5548 #if defined(CONFIG_USER_ONLY)
5549     GEN_PRIV(ctx);
5550 #else
5551     CHK_SV(ctx);
5552     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5553                         cpu_gpr[rA(ctx->opcode)]);
5554     /* Note: Rc update flag set leads to undefined state of Rc0 */
5555 #endif /* defined(CONFIG_USER_ONLY) */
5556 }
5557 
5558 /* mtdcrx */
5559 /* XXX: not implemented on 440 ? */
5560 static void gen_mtdcrx(DisasContext *ctx)
5561 {
5562 #if defined(CONFIG_USER_ONLY)
5563     GEN_PRIV(ctx);
5564 #else
5565     CHK_SV(ctx);
5566     gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5567                          cpu_gpr[rS(ctx->opcode)]);
5568     /* Note: Rc update flag set leads to undefined state of Rc0 */
5569 #endif /* defined(CONFIG_USER_ONLY) */
5570 }
5571 
5572 /* dccci */
5573 static void gen_dccci(DisasContext *ctx)
5574 {
5575     CHK_SV(ctx);
5576     /* interpreted as no-op */
5577 }
5578 
5579 /* dcread */
5580 static void gen_dcread(DisasContext *ctx)
5581 {
5582 #if defined(CONFIG_USER_ONLY)
5583     GEN_PRIV(ctx);
5584 #else
5585     TCGv EA, val;
5586 
5587     CHK_SV(ctx);
5588     gen_set_access_type(ctx, ACCESS_CACHE);
5589     EA = tcg_temp_new();
5590     gen_addr_reg_index(ctx, EA);
5591     val = tcg_temp_new();
5592     gen_qemu_ld32u(ctx, val, EA);
5593     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5594 #endif /* defined(CONFIG_USER_ONLY) */
5595 }
5596 
5597 /* icbt */
5598 static void gen_icbt_40x(DisasContext *ctx)
5599 {
5600     /*
5601      * interpreted as no-op
5602      * XXX: specification say this is treated as a load by the MMU but
5603      *      does not generate any exception
5604      */
5605 }
5606 
5607 /* iccci */
5608 static void gen_iccci(DisasContext *ctx)
5609 {
5610     CHK_SV(ctx);
5611     /* interpreted as no-op */
5612 }
5613 
5614 /* icread */
5615 static void gen_icread(DisasContext *ctx)
5616 {
5617     CHK_SV(ctx);
5618     /* interpreted as no-op */
5619 }
5620 
5621 /* rfci (supervisor only) */
5622 static void gen_rfci_40x(DisasContext *ctx)
5623 {
5624 #if defined(CONFIG_USER_ONLY)
5625     GEN_PRIV(ctx);
5626 #else
5627     CHK_SV(ctx);
5628     /* Restore CPU state */
5629     gen_helper_40x_rfci(cpu_env);
5630     ctx->base.is_jmp = DISAS_EXIT;
5631 #endif /* defined(CONFIG_USER_ONLY) */
5632 }
5633 
5634 static void gen_rfci(DisasContext *ctx)
5635 {
5636 #if defined(CONFIG_USER_ONLY)
5637     GEN_PRIV(ctx);
5638 #else
5639     CHK_SV(ctx);
5640     /* Restore CPU state */
5641     gen_helper_rfci(cpu_env);
5642     ctx->base.is_jmp = DISAS_EXIT;
5643 #endif /* defined(CONFIG_USER_ONLY) */
5644 }
5645 
5646 /* BookE specific */
5647 
5648 /* XXX: not implemented on 440 ? */
5649 static void gen_rfdi(DisasContext *ctx)
5650 {
5651 #if defined(CONFIG_USER_ONLY)
5652     GEN_PRIV(ctx);
5653 #else
5654     CHK_SV(ctx);
5655     /* Restore CPU state */
5656     gen_helper_rfdi(cpu_env);
5657     ctx->base.is_jmp = DISAS_EXIT;
5658 #endif /* defined(CONFIG_USER_ONLY) */
5659 }
5660 
5661 /* XXX: not implemented on 440 ? */
5662 static void gen_rfmci(DisasContext *ctx)
5663 {
5664 #if defined(CONFIG_USER_ONLY)
5665     GEN_PRIV(ctx);
5666 #else
5667     CHK_SV(ctx);
5668     /* Restore CPU state */
5669     gen_helper_rfmci(cpu_env);
5670     ctx->base.is_jmp = DISAS_EXIT;
5671 #endif /* defined(CONFIG_USER_ONLY) */
5672 }
5673 
5674 /* TLB management - PowerPC 405 implementation */
5675 
5676 /* tlbre */
5677 static void gen_tlbre_40x(DisasContext *ctx)
5678 {
5679 #if defined(CONFIG_USER_ONLY)
5680     GEN_PRIV(ctx);
5681 #else
5682     CHK_SV(ctx);
5683     switch (rB(ctx->opcode)) {
5684     case 0:
5685         gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
5686                                 cpu_gpr[rA(ctx->opcode)]);
5687         break;
5688     case 1:
5689         gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
5690                                 cpu_gpr[rA(ctx->opcode)]);
5691         break;
5692     default:
5693         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5694         break;
5695     }
5696 #endif /* defined(CONFIG_USER_ONLY) */
5697 }
5698 
5699 /* tlbsx - tlbsx. */
5700 static void gen_tlbsx_40x(DisasContext *ctx)
5701 {
5702 #if defined(CONFIG_USER_ONLY)
5703     GEN_PRIV(ctx);
5704 #else
5705     TCGv t0;
5706 
5707     CHK_SV(ctx);
5708     t0 = tcg_temp_new();
5709     gen_addr_reg_index(ctx, t0);
5710     gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5711     if (Rc(ctx->opcode)) {
5712         TCGLabel *l1 = gen_new_label();
5713         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5714         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5715         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5716         gen_set_label(l1);
5717     }
5718 #endif /* defined(CONFIG_USER_ONLY) */
5719 }
5720 
5721 /* tlbwe */
5722 static void gen_tlbwe_40x(DisasContext *ctx)
5723 {
5724 #if defined(CONFIG_USER_ONLY)
5725     GEN_PRIV(ctx);
5726 #else
5727     CHK_SV(ctx);
5728 
5729     switch (rB(ctx->opcode)) {
5730     case 0:
5731         gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
5732                                 cpu_gpr[rS(ctx->opcode)]);
5733         break;
5734     case 1:
5735         gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
5736                                 cpu_gpr[rS(ctx->opcode)]);
5737         break;
5738     default:
5739         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5740         break;
5741     }
5742 #endif /* defined(CONFIG_USER_ONLY) */
5743 }
5744 
5745 /* TLB management - PowerPC 440 implementation */
5746 
5747 /* tlbre */
5748 static void gen_tlbre_440(DisasContext *ctx)
5749 {
5750 #if defined(CONFIG_USER_ONLY)
5751     GEN_PRIV(ctx);
5752 #else
5753     CHK_SV(ctx);
5754 
5755     switch (rB(ctx->opcode)) {
5756     case 0:
5757     case 1:
5758     case 2:
5759         {
5760             TCGv_i32 t0 = tcg_constant_i32(rB(ctx->opcode));
5761             gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
5762                                  t0, cpu_gpr[rA(ctx->opcode)]);
5763         }
5764         break;
5765     default:
5766         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5767         break;
5768     }
5769 #endif /* defined(CONFIG_USER_ONLY) */
5770 }
5771 
5772 /* tlbsx - tlbsx. */
5773 static void gen_tlbsx_440(DisasContext *ctx)
5774 {
5775 #if defined(CONFIG_USER_ONLY)
5776     GEN_PRIV(ctx);
5777 #else
5778     TCGv t0;
5779 
5780     CHK_SV(ctx);
5781     t0 = tcg_temp_new();
5782     gen_addr_reg_index(ctx, t0);
5783     gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5784     if (Rc(ctx->opcode)) {
5785         TCGLabel *l1 = gen_new_label();
5786         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5787         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5788         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5789         gen_set_label(l1);
5790     }
5791 #endif /* defined(CONFIG_USER_ONLY) */
5792 }
5793 
5794 /* tlbwe */
5795 static void gen_tlbwe_440(DisasContext *ctx)
5796 {
5797 #if defined(CONFIG_USER_ONLY)
5798     GEN_PRIV(ctx);
5799 #else
5800     CHK_SV(ctx);
5801     switch (rB(ctx->opcode)) {
5802     case 0:
5803     case 1:
5804     case 2:
5805         {
5806             TCGv_i32 t0 = tcg_constant_i32(rB(ctx->opcode));
5807             gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
5808                                  cpu_gpr[rS(ctx->opcode)]);
5809         }
5810         break;
5811     default:
5812         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5813         break;
5814     }
5815 #endif /* defined(CONFIG_USER_ONLY) */
5816 }
5817 
5818 /* TLB management - PowerPC BookE 2.06 implementation */
5819 
5820 /* tlbre */
5821 static void gen_tlbre_booke206(DisasContext *ctx)
5822 {
5823  #if defined(CONFIG_USER_ONLY)
5824     GEN_PRIV(ctx);
5825 #else
5826    CHK_SV(ctx);
5827     gen_helper_booke206_tlbre(cpu_env);
5828 #endif /* defined(CONFIG_USER_ONLY) */
5829 }
5830 
5831 /* tlbsx - tlbsx. */
5832 static void gen_tlbsx_booke206(DisasContext *ctx)
5833 {
5834 #if defined(CONFIG_USER_ONLY)
5835     GEN_PRIV(ctx);
5836 #else
5837     TCGv t0;
5838 
5839     CHK_SV(ctx);
5840     if (rA(ctx->opcode)) {
5841         t0 = tcg_temp_new();
5842         tcg_gen_add_tl(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
5843     } else {
5844         t0 = cpu_gpr[rB(ctx->opcode)];
5845     }
5846     gen_helper_booke206_tlbsx(cpu_env, t0);
5847 #endif /* defined(CONFIG_USER_ONLY) */
5848 }
5849 
5850 /* tlbwe */
5851 static void gen_tlbwe_booke206(DisasContext *ctx)
5852 {
5853 #if defined(CONFIG_USER_ONLY)
5854     GEN_PRIV(ctx);
5855 #else
5856     CHK_SV(ctx);
5857     gen_helper_booke206_tlbwe(cpu_env);
5858 #endif /* defined(CONFIG_USER_ONLY) */
5859 }
5860 
5861 static void gen_tlbivax_booke206(DisasContext *ctx)
5862 {
5863 #if defined(CONFIG_USER_ONLY)
5864     GEN_PRIV(ctx);
5865 #else
5866     TCGv t0;
5867 
5868     CHK_SV(ctx);
5869     t0 = tcg_temp_new();
5870     gen_addr_reg_index(ctx, t0);
5871     gen_helper_booke206_tlbivax(cpu_env, t0);
5872 #endif /* defined(CONFIG_USER_ONLY) */
5873 }
5874 
5875 static void gen_tlbilx_booke206(DisasContext *ctx)
5876 {
5877 #if defined(CONFIG_USER_ONLY)
5878     GEN_PRIV(ctx);
5879 #else
5880     TCGv t0;
5881 
5882     CHK_SV(ctx);
5883     t0 = tcg_temp_new();
5884     gen_addr_reg_index(ctx, t0);
5885 
5886     switch ((ctx->opcode >> 21) & 0x3) {
5887     case 0:
5888         gen_helper_booke206_tlbilx0(cpu_env, t0);
5889         break;
5890     case 1:
5891         gen_helper_booke206_tlbilx1(cpu_env, t0);
5892         break;
5893     case 3:
5894         gen_helper_booke206_tlbilx3(cpu_env, t0);
5895         break;
5896     default:
5897         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5898         break;
5899     }
5900 #endif /* defined(CONFIG_USER_ONLY) */
5901 }
5902 
5903 /* wrtee */
5904 static void gen_wrtee(DisasContext *ctx)
5905 {
5906 #if defined(CONFIG_USER_ONLY)
5907     GEN_PRIV(ctx);
5908 #else
5909     TCGv t0;
5910 
5911     CHK_SV(ctx);
5912     t0 = tcg_temp_new();
5913     tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
5914     tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5915     tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
5916     gen_ppc_maybe_interrupt(ctx);
5917     /*
5918      * Stop translation to have a chance to raise an exception if we
5919      * just set msr_ee to 1
5920      */
5921     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5922 #endif /* defined(CONFIG_USER_ONLY) */
5923 }
5924 
5925 /* wrteei */
5926 static void gen_wrteei(DisasContext *ctx)
5927 {
5928 #if defined(CONFIG_USER_ONLY)
5929     GEN_PRIV(ctx);
5930 #else
5931     CHK_SV(ctx);
5932     if (ctx->opcode & 0x00008000) {
5933         tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
5934         gen_ppc_maybe_interrupt(ctx);
5935         /* Stop translation to have a chance to raise an exception */
5936         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5937     } else {
5938         tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5939     }
5940 #endif /* defined(CONFIG_USER_ONLY) */
5941 }
5942 
5943 /* PowerPC 440 specific instructions */
5944 
5945 /* dlmzb */
5946 static void gen_dlmzb(DisasContext *ctx)
5947 {
5948     TCGv_i32 t0 = tcg_constant_i32(Rc(ctx->opcode));
5949     gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
5950                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
5951 }
5952 
5953 /* mbar replaces eieio on 440 */
5954 static void gen_mbar(DisasContext *ctx)
5955 {
5956     /* interpreted as no-op */
5957 }
5958 
5959 /* msync replaces sync on 440 */
5960 static void gen_msync_4xx(DisasContext *ctx)
5961 {
5962     /* Only e500 seems to treat reserved bits as invalid */
5963     if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
5964         (ctx->opcode & 0x03FFF801)) {
5965         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5966     }
5967     /* otherwise interpreted as no-op */
5968 }
5969 
5970 /* icbt */
5971 static void gen_icbt_440(DisasContext *ctx)
5972 {
5973     /*
5974      * interpreted as no-op
5975      * XXX: specification say this is treated as a load by the MMU but
5976      *      does not generate any exception
5977      */
5978 }
5979 
5980 #if defined(TARGET_PPC64)
5981 static void gen_maddld(DisasContext *ctx)
5982 {
5983     TCGv_i64 t1 = tcg_temp_new_i64();
5984 
5985     tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
5986     tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
5987 }
5988 
5989 /* maddhd maddhdu */
5990 static void gen_maddhd_maddhdu(DisasContext *ctx)
5991 {
5992     TCGv_i64 lo = tcg_temp_new_i64();
5993     TCGv_i64 hi = tcg_temp_new_i64();
5994     TCGv_i64 t1 = tcg_temp_new_i64();
5995 
5996     if (Rc(ctx->opcode)) {
5997         tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
5998                           cpu_gpr[rB(ctx->opcode)]);
5999         tcg_gen_movi_i64(t1, 0);
6000     } else {
6001         tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6002                           cpu_gpr[rB(ctx->opcode)]);
6003         tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
6004     }
6005     tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
6006                      cpu_gpr[rC(ctx->opcode)], t1);
6007 }
6008 #endif /* defined(TARGET_PPC64) */
6009 
6010 static void gen_tbegin(DisasContext *ctx)
6011 {
6012     if (unlikely(!ctx->tm_enabled)) {
6013         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6014         return;
6015     }
6016     gen_helper_tbegin(cpu_env);
6017 }
6018 
6019 #define GEN_TM_NOOP(name)                                      \
6020 static inline void gen_##name(DisasContext *ctx)               \
6021 {                                                              \
6022     if (unlikely(!ctx->tm_enabled)) {                          \
6023         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6024         return;                                                \
6025     }                                                          \
6026     /*                                                         \
6027      * Because tbegin always fails in QEMU, these user         \
6028      * space instructions all have a simple implementation:    \
6029      *                                                         \
6030      *     CR[0] = 0b0 || MSR[TS] || 0b0                       \
6031      *           = 0b0 || 0b00    || 0b0                       \
6032      */                                                        \
6033     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6034 }
6035 
6036 GEN_TM_NOOP(tend);
6037 GEN_TM_NOOP(tabort);
6038 GEN_TM_NOOP(tabortwc);
6039 GEN_TM_NOOP(tabortwci);
6040 GEN_TM_NOOP(tabortdc);
6041 GEN_TM_NOOP(tabortdci);
6042 GEN_TM_NOOP(tsr);
6043 
6044 static inline void gen_cp_abort(DisasContext *ctx)
6045 {
6046     /* Do Nothing */
6047 }
6048 
6049 #define GEN_CP_PASTE_NOOP(name)                           \
6050 static inline void gen_##name(DisasContext *ctx)          \
6051 {                                                         \
6052     /*                                                    \
6053      * Generate invalid exception until we have an        \
6054      * implementation of the copy paste facility          \
6055      */                                                   \
6056     gen_invalid(ctx);                                     \
6057 }
6058 
6059 GEN_CP_PASTE_NOOP(copy)
6060 GEN_CP_PASTE_NOOP(paste)
6061 
6062 static void gen_tcheck(DisasContext *ctx)
6063 {
6064     if (unlikely(!ctx->tm_enabled)) {
6065         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6066         return;
6067     }
6068     /*
6069      * Because tbegin always fails, the tcheck implementation is
6070      * simple:
6071      *
6072      * CR[CRF] = TDOOMED || MSR[TS] || 0b0
6073      *         = 0b1 || 0b00 || 0b0
6074      */
6075     tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
6076 }
6077 
6078 #if defined(CONFIG_USER_ONLY)
6079 #define GEN_TM_PRIV_NOOP(name)                                 \
6080 static inline void gen_##name(DisasContext *ctx)               \
6081 {                                                              \
6082     gen_priv_opc(ctx);                                         \
6083 }
6084 
6085 #else
6086 
6087 #define GEN_TM_PRIV_NOOP(name)                                 \
6088 static inline void gen_##name(DisasContext *ctx)               \
6089 {                                                              \
6090     CHK_SV(ctx);                                               \
6091     if (unlikely(!ctx->tm_enabled)) {                          \
6092         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6093         return;                                                \
6094     }                                                          \
6095     /*                                                         \
6096      * Because tbegin always fails, the implementation is      \
6097      * simple:                                                 \
6098      *                                                         \
6099      *   CR[0] = 0b0 || MSR[TS] || 0b0                         \
6100      *         = 0b0 || 0b00 | 0b0                             \
6101      */                                                        \
6102     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6103 }
6104 
6105 #endif
6106 
6107 GEN_TM_PRIV_NOOP(treclaim);
6108 GEN_TM_PRIV_NOOP(trechkpt);
6109 
6110 static inline void get_fpr(TCGv_i64 dst, int regno)
6111 {
6112     tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
6113 }
6114 
6115 static inline void set_fpr(int regno, TCGv_i64 src)
6116 {
6117     tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
6118     /*
6119      * Before PowerISA v3.1 the result of doubleword 1 of the VSR
6120      * corresponding to the target FPR was undefined. However,
6121      * most (if not all) real hardware were setting the result to 0.
6122      * Starting at ISA v3.1, the result for doubleword 1 is now defined
6123      * to be 0.
6124      */
6125     tcg_gen_st_i64(tcg_constant_i64(0), cpu_env, vsr64_offset(regno, false));
6126 }
6127 
6128 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
6129 {
6130     tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
6131 }
6132 
6133 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
6134 {
6135     tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
6136 }
6137 
6138 /*
6139  * Helpers for decodetree used by !function for decoding arguments.
6140  */
6141 static int times_2(DisasContext *ctx, int x)
6142 {
6143     return x * 2;
6144 }
6145 
6146 static int times_4(DisasContext *ctx, int x)
6147 {
6148     return x * 4;
6149 }
6150 
6151 static int times_16(DisasContext *ctx, int x)
6152 {
6153     return x * 16;
6154 }
6155 
6156 static int64_t dw_compose_ea(DisasContext *ctx, int x)
6157 {
6158     return deposit64(0xfffffffffffffe00, 3, 6, x);
6159 }
6160 
6161 /*
6162  * Helpers for trans_* functions to check for specific insns flags.
6163  * Use token pasting to ensure that we use the proper flag with the
6164  * proper variable.
6165  */
6166 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
6167     do {                                                \
6168         if (((CTX)->insns_flags & PPC_##NAME) == 0) {   \
6169             return false;                               \
6170         }                                               \
6171     } while (0)
6172 
6173 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
6174     do {                                                \
6175         if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
6176             return false;                               \
6177         }                                               \
6178     } while (0)
6179 
6180 /* Then special-case the check for 64-bit so that we elide code for ppc32. */
6181 #if TARGET_LONG_BITS == 32
6182 # define REQUIRE_64BIT(CTX)  return false
6183 #else
6184 # define REQUIRE_64BIT(CTX)  REQUIRE_INSNS_FLAGS(CTX, 64B)
6185 #endif
6186 
6187 #define REQUIRE_VECTOR(CTX)                             \
6188     do {                                                \
6189         if (unlikely(!(CTX)->altivec_enabled)) {        \
6190             gen_exception((CTX), POWERPC_EXCP_VPU);     \
6191             return true;                                \
6192         }                                               \
6193     } while (0)
6194 
6195 #define REQUIRE_VSX(CTX)                                \
6196     do {                                                \
6197         if (unlikely(!(CTX)->vsx_enabled)) {            \
6198             gen_exception((CTX), POWERPC_EXCP_VSXU);    \
6199             return true;                                \
6200         }                                               \
6201     } while (0)
6202 
6203 #define REQUIRE_FPU(ctx)                                \
6204     do {                                                \
6205         if (unlikely(!(ctx)->fpu_enabled)) {            \
6206             gen_exception((ctx), POWERPC_EXCP_FPU);     \
6207             return true;                                \
6208         }                                               \
6209     } while (0)
6210 
6211 #if !defined(CONFIG_USER_ONLY)
6212 #define REQUIRE_SV(CTX)             \
6213     do {                            \
6214         if (unlikely((CTX)->pr)) {  \
6215             gen_priv_opc(CTX);      \
6216             return true;            \
6217         }                           \
6218     } while (0)
6219 
6220 #define REQUIRE_HV(CTX)                             \
6221     do {                                            \
6222         if (unlikely((CTX)->pr || !(CTX)->hv)) {    \
6223             gen_priv_opc(CTX);                      \
6224             return true;                            \
6225         }                                           \
6226     } while (0)
6227 #else
6228 #define REQUIRE_SV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6229 #define REQUIRE_HV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6230 #endif
6231 
6232 /*
6233  * Helpers for implementing sets of trans_* functions.
6234  * Defer the implementation of NAME to FUNC, with optional extra arguments.
6235  */
6236 #define TRANS(NAME, FUNC, ...) \
6237     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6238     { return FUNC(ctx, a, __VA_ARGS__); }
6239 #define TRANS_FLAGS(FLAGS, NAME, FUNC, ...) \
6240     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6241     {                                                          \
6242         REQUIRE_INSNS_FLAGS(ctx, FLAGS);                       \
6243         return FUNC(ctx, a, __VA_ARGS__);                      \
6244     }
6245 #define TRANS_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6246     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6247     {                                                          \
6248         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6249         return FUNC(ctx, a, __VA_ARGS__);                      \
6250     }
6251 
6252 #define TRANS64(NAME, FUNC, ...) \
6253     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6254     { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
6255 #define TRANS64_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6256     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6257     {                                                          \
6258         REQUIRE_64BIT(ctx);                                    \
6259         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6260         return FUNC(ctx, a, __VA_ARGS__);                      \
6261     }
6262 
6263 /* TODO: More TRANS* helpers for extra insn_flags checks. */
6264 
6265 
6266 #include "decode-insn32.c.inc"
6267 #include "decode-insn64.c.inc"
6268 #include "power8-pmu-regs.c.inc"
6269 
6270 /*
6271  * Incorporate CIA into the constant when R=1.
6272  * Validate that when R=1, RA=0.
6273  */
6274 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
6275 {
6276     d->rt = a->rt;
6277     d->ra = a->ra;
6278     d->si = a->si;
6279     if (a->r) {
6280         if (unlikely(a->ra != 0)) {
6281             gen_invalid(ctx);
6282             return false;
6283         }
6284         d->si += ctx->cia;
6285     }
6286     return true;
6287 }
6288 
6289 #include "translate/fixedpoint-impl.c.inc"
6290 
6291 #include "translate/fp-impl.c.inc"
6292 
6293 #include "translate/vmx-impl.c.inc"
6294 
6295 #include "translate/vsx-impl.c.inc"
6296 
6297 #include "translate/dfp-impl.c.inc"
6298 
6299 #include "translate/spe-impl.c.inc"
6300 
6301 #include "translate/branch-impl.c.inc"
6302 
6303 #include "translate/processor-ctrl-impl.c.inc"
6304 
6305 #include "translate/storage-ctrl-impl.c.inc"
6306 
6307 /* Handles lfdp */
6308 static void gen_dform39(DisasContext *ctx)
6309 {
6310     if ((ctx->opcode & 0x3) == 0) {
6311         if (ctx->insns_flags2 & PPC2_ISA205) {
6312             return gen_lfdp(ctx);
6313         }
6314     }
6315     return gen_invalid(ctx);
6316 }
6317 
6318 /* Handles stfdp */
6319 static void gen_dform3D(DisasContext *ctx)
6320 {
6321     if ((ctx->opcode & 3) == 0) { /* DS-FORM */
6322         /* stfdp */
6323         if (ctx->insns_flags2 & PPC2_ISA205) {
6324             return gen_stfdp(ctx);
6325         }
6326     }
6327     return gen_invalid(ctx);
6328 }
6329 
6330 #if defined(TARGET_PPC64)
6331 /* brd */
6332 static void gen_brd(DisasContext *ctx)
6333 {
6334     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6335 }
6336 
6337 /* brw */
6338 static void gen_brw(DisasContext *ctx)
6339 {
6340     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6341     tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
6342 
6343 }
6344 
6345 /* brh */
6346 static void gen_brh(DisasContext *ctx)
6347 {
6348     TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
6349     TCGv_i64 t1 = tcg_temp_new_i64();
6350     TCGv_i64 t2 = tcg_temp_new_i64();
6351 
6352     tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
6353     tcg_gen_and_i64(t2, t1, mask);
6354     tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
6355     tcg_gen_shli_i64(t1, t1, 8);
6356     tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
6357 }
6358 #endif
6359 
6360 static opcode_t opcodes[] = {
6361 #if defined(TARGET_PPC64)
6362 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
6363 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
6364 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
6365 #endif
6366 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
6367 #if defined(TARGET_PPC64)
6368 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
6369 #endif
6370 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
6371 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
6372 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
6373 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6374 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6375 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
6376 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
6377 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
6378 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
6379 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6380 #if defined(TARGET_PPC64)
6381 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
6382 #endif
6383 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
6384 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
6385 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6386 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6387 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6388 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
6389 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
6390 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
6391 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6392 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
6393 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
6394 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
6395 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6396 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6397 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6398 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6399 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
6400 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
6401 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
6402 #if defined(TARGET_PPC64)
6403 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
6404 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
6405 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
6406 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
6407 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
6408 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
6409 #endif
6410 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6411 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6412 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6413 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
6414 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
6415 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
6416 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
6417 #if defined(TARGET_PPC64)
6418 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
6419 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
6420 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
6421 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
6422 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
6423 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
6424                PPC_NONE, PPC2_ISA300),
6425 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
6426                PPC_NONE, PPC2_ISA300),
6427 #endif
6428 /* handles lfdp, lxsd, lxssp */
6429 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6430 /* handles stfdp, stxsd, stxssp */
6431 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6432 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6433 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6434 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
6435 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
6436 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
6437 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
6438 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
6439 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
6440 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6441 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6442 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
6443 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
6444 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
6445 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6446 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6447 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
6448 #if defined(TARGET_PPC64)
6449 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
6450 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
6451 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
6452 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
6453 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
6454 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
6455 #endif
6456 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
6457 /* ISA v3.0 changed the extended opcode from 62 to 30 */
6458 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x039FF801, PPC_WAIT),
6459 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039CF801, PPC_NONE, PPC2_ISA300),
6460 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6461 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6462 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
6463 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
6464 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
6465 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
6466 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
6467 #if defined(TARGET_PPC64)
6468 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
6469 #if !defined(CONFIG_USER_ONLY)
6470 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6471 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6472 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6473 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
6474 #endif
6475 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6476 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6477 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6478 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6479 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6480 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
6481 #endif
6482 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6483 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
6484 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
6485 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
6486 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6487 #if defined(TARGET_PPC64)
6488 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
6489 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
6490 #endif
6491 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
6492 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
6493 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
6494 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
6495 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
6496 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
6497 #if defined(TARGET_PPC64)
6498 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
6499 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
6500 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
6501 #endif
6502 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
6503 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
6504 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
6505 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6506 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
6507 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
6508 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6509 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
6510 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6511 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
6512 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6513 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6514 GEN_HANDLER_E(dcblc, 0x1F, 0x06, 0x0c, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6515 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
6516 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6517 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
6518 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
6519 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
6520 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
6521 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6522 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
6523 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
6524 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
6525 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
6526 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
6527 #if defined(TARGET_PPC64)
6528 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
6529 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
6530              PPC_SEGMENT_64B),
6531 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
6532 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
6533              PPC_SEGMENT_64B),
6534 #endif
6535 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
6536 /*
6537  * XXX Those instructions will need to be handled differently for
6538  * different ISA versions
6539  */
6540 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
6541 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
6542 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
6543 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
6544 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
6545 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
6546 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
6547 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
6548 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
6549 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
6550 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
6551 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
6552 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
6553 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
6554 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
6555 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
6556 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
6557 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
6558 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
6559 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
6560 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
6561 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
6562 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
6563 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
6564 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
6565 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
6566 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
6567                PPC_NONE, PPC2_BOOKE206),
6568 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
6569                PPC_NONE, PPC2_BOOKE206),
6570 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
6571                PPC_NONE, PPC2_BOOKE206),
6572 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
6573                PPC_NONE, PPC2_BOOKE206),
6574 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
6575                PPC_NONE, PPC2_BOOKE206),
6576 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
6577 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
6578 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
6579 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
6580               PPC_BOOKE, PPC2_BOOKE206),
6581 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
6582 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
6583                PPC_BOOKE, PPC2_BOOKE206),
6584 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
6585              PPC_440_SPEC),
6586 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
6587 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
6588 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
6589 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
6590 #if defined(TARGET_PPC64)
6591 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
6592               PPC2_ISA300),
6593 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
6594 #endif
6595 
6596 #undef GEN_INT_ARITH_ADD
6597 #undef GEN_INT_ARITH_ADD_CONST
6598 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov)         \
6599 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
6600 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val,                        \
6601                                 add_ca, compute_ca, compute_ov)               \
6602 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
6603 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
6604 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
6605 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
6606 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
6607 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
6608 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
6609 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
6610 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
6611 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
6612 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
6613 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
6614 
6615 #undef GEN_INT_ARITH_DIVW
6616 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
6617 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
6618 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
6619 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
6620 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
6621 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
6622 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6623 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6624 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6625 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6626 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6627 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6628 
6629 #if defined(TARGET_PPC64)
6630 #undef GEN_INT_ARITH_DIVD
6631 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
6632 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6633 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
6634 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
6635 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
6636 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
6637 
6638 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6639 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6640 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6641 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6642 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6643 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6644 
6645 #undef GEN_INT_ARITH_MUL_HELPER
6646 #define GEN_INT_ARITH_MUL_HELPER(name, opc3)                                  \
6647 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6648 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
6649 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
6650 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
6651 #endif
6652 
6653 #undef GEN_INT_ARITH_SUBF
6654 #undef GEN_INT_ARITH_SUBF_CONST
6655 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
6656 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
6657 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
6658                                 add_ca, compute_ca, compute_ov)               \
6659 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
6660 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
6661 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
6662 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
6663 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
6664 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
6665 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
6666 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
6667 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
6668 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
6669 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
6670 
6671 #undef GEN_LOGICAL1
6672 #undef GEN_LOGICAL2
6673 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
6674 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
6675 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
6676 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
6677 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
6678 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
6679 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
6680 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
6681 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
6682 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
6683 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
6684 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
6685 #if defined(TARGET_PPC64)
6686 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
6687 #endif
6688 
6689 #if defined(TARGET_PPC64)
6690 #undef GEN_PPC64_R2
6691 #undef GEN_PPC64_R4
6692 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
6693 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6694 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
6695              PPC_64B)
6696 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
6697 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6698 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000,   \
6699              PPC_64B),                                                        \
6700 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
6701              PPC_64B),                                                        \
6702 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000,   \
6703              PPC_64B)
6704 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
6705 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
6706 GEN_PPC64_R4(rldic, 0x1E, 0x04),
6707 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
6708 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
6709 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
6710 #endif
6711 
6712 #undef GEN_LDX_E
6713 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
6714 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
6715 
6716 #if defined(TARGET_PPC64)
6717 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
6718 
6719 /* HV/P7 and later only */
6720 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
6721 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
6722 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
6723 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
6724 #endif
6725 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
6726 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
6727 
6728 /* External PID based load */
6729 #undef GEN_LDEPX
6730 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
6731 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
6732               0x00000001, PPC_NONE, PPC2_BOOKE206),
6733 
6734 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
6735 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
6736 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
6737 #if defined(TARGET_PPC64)
6738 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
6739 #endif
6740 
6741 #undef GEN_STX_E
6742 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
6743 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
6744 
6745 #if defined(TARGET_PPC64)
6746 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
6747 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
6748 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
6749 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
6750 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
6751 #endif
6752 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
6753 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
6754 
6755 #undef GEN_STEPX
6756 #define GEN_STEPX(name, ldop, opc2, opc3)                                     \
6757 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
6758               0x00000001, PPC_NONE, PPC2_BOOKE206),
6759 
6760 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
6761 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
6762 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
6763 #if defined(TARGET_PPC64)
6764 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04)
6765 #endif
6766 
6767 #undef GEN_CRLOGIC
6768 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
6769 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
6770 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
6771 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
6772 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
6773 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
6774 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
6775 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
6776 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
6777 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
6778 
6779 #undef GEN_MAC_HANDLER
6780 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
6781 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
6782 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
6783 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
6784 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
6785 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
6786 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
6787 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
6788 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
6789 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
6790 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
6791 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
6792 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
6793 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
6794 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
6795 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
6796 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
6797 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
6798 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
6799 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
6800 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
6801 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
6802 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
6803 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
6804 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
6805 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
6806 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
6807 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
6808 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
6809 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
6810 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
6811 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
6812 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
6813 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
6814 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
6815 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
6816 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
6817 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
6818 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
6819 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
6820 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
6821 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
6822 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
6823 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
6824 
6825 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
6826                PPC_NONE, PPC2_TM),
6827 GEN_HANDLER2_E(tend,   "tend",   0x1F, 0x0E, 0x15, 0x01FFF800, \
6828                PPC_NONE, PPC2_TM),
6829 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
6830                PPC_NONE, PPC2_TM),
6831 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
6832                PPC_NONE, PPC2_TM),
6833 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
6834                PPC_NONE, PPC2_TM),
6835 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
6836                PPC_NONE, PPC2_TM),
6837 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
6838                PPC_NONE, PPC2_TM),
6839 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
6840                PPC_NONE, PPC2_TM),
6841 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
6842                PPC_NONE, PPC2_TM),
6843 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
6844                PPC_NONE, PPC2_TM),
6845 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
6846                PPC_NONE, PPC2_TM),
6847 
6848 #include "translate/fp-ops.c.inc"
6849 
6850 #include "translate/vmx-ops.c.inc"
6851 
6852 #include "translate/vsx-ops.c.inc"
6853 
6854 #include "translate/spe-ops.c.inc"
6855 };
6856 
6857 /*****************************************************************************/
6858 /* Opcode types */
6859 enum {
6860     PPC_DIRECT   = 0, /* Opcode routine        */
6861     PPC_INDIRECT = 1, /* Indirect opcode table */
6862 };
6863 
6864 #define PPC_OPCODE_MASK 0x3
6865 
6866 static inline int is_indirect_opcode(void *handler)
6867 {
6868     return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
6869 }
6870 
6871 static inline opc_handler_t **ind_table(void *handler)
6872 {
6873     return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
6874 }
6875 
6876 /* Instruction table creation */
6877 /* Opcodes tables creation */
6878 static void fill_new_table(opc_handler_t **table, int len)
6879 {
6880     int i;
6881 
6882     for (i = 0; i < len; i++) {
6883         table[i] = &invalid_handler;
6884     }
6885 }
6886 
6887 static int create_new_table(opc_handler_t **table, unsigned char idx)
6888 {
6889     opc_handler_t **tmp;
6890 
6891     tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
6892     fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
6893     table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
6894 
6895     return 0;
6896 }
6897 
6898 static int insert_in_table(opc_handler_t **table, unsigned char idx,
6899                             opc_handler_t *handler)
6900 {
6901     if (table[idx] != &invalid_handler) {
6902         return -1;
6903     }
6904     table[idx] = handler;
6905 
6906     return 0;
6907 }
6908 
6909 static int register_direct_insn(opc_handler_t **ppc_opcodes,
6910                                 unsigned char idx, opc_handler_t *handler)
6911 {
6912     if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
6913         printf("*** ERROR: opcode %02x already assigned in main "
6914                "opcode table\n", idx);
6915         return -1;
6916     }
6917 
6918     return 0;
6919 }
6920 
6921 static int register_ind_in_table(opc_handler_t **table,
6922                                  unsigned char idx1, unsigned char idx2,
6923                                  opc_handler_t *handler)
6924 {
6925     if (table[idx1] == &invalid_handler) {
6926         if (create_new_table(table, idx1) < 0) {
6927             printf("*** ERROR: unable to create indirect table "
6928                    "idx=%02x\n", idx1);
6929             return -1;
6930         }
6931     } else {
6932         if (!is_indirect_opcode(table[idx1])) {
6933             printf("*** ERROR: idx %02x already assigned to a direct "
6934                    "opcode\n", idx1);
6935             return -1;
6936         }
6937     }
6938     if (handler != NULL &&
6939         insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
6940         printf("*** ERROR: opcode %02x already assigned in "
6941                "opcode table %02x\n", idx2, idx1);
6942         return -1;
6943     }
6944 
6945     return 0;
6946 }
6947 
6948 static int register_ind_insn(opc_handler_t **ppc_opcodes,
6949                              unsigned char idx1, unsigned char idx2,
6950                              opc_handler_t *handler)
6951 {
6952     return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
6953 }
6954 
6955 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
6956                                 unsigned char idx1, unsigned char idx2,
6957                                 unsigned char idx3, opc_handler_t *handler)
6958 {
6959     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
6960         printf("*** ERROR: unable to join indirect table idx "
6961                "[%02x-%02x]\n", idx1, idx2);
6962         return -1;
6963     }
6964     if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
6965                               handler) < 0) {
6966         printf("*** ERROR: unable to insert opcode "
6967                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
6968         return -1;
6969     }
6970 
6971     return 0;
6972 }
6973 
6974 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
6975                                  unsigned char idx1, unsigned char idx2,
6976                                  unsigned char idx3, unsigned char idx4,
6977                                  opc_handler_t *handler)
6978 {
6979     opc_handler_t **table;
6980 
6981     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
6982         printf("*** ERROR: unable to join indirect table idx "
6983                "[%02x-%02x]\n", idx1, idx2);
6984         return -1;
6985     }
6986     table = ind_table(ppc_opcodes[idx1]);
6987     if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
6988         printf("*** ERROR: unable to join 2nd-level indirect table idx "
6989                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
6990         return -1;
6991     }
6992     table = ind_table(table[idx2]);
6993     if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
6994         printf("*** ERROR: unable to insert opcode "
6995                "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
6996         return -1;
6997     }
6998     return 0;
6999 }
7000 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
7001 {
7002     if (insn->opc2 != 0xFF) {
7003         if (insn->opc3 != 0xFF) {
7004             if (insn->opc4 != 0xFF) {
7005                 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7006                                           insn->opc3, insn->opc4,
7007                                           &insn->handler) < 0) {
7008                     return -1;
7009                 }
7010             } else {
7011                 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7012                                          insn->opc3, &insn->handler) < 0) {
7013                     return -1;
7014                 }
7015             }
7016         } else {
7017             if (register_ind_insn(ppc_opcodes, insn->opc1,
7018                                   insn->opc2, &insn->handler) < 0) {
7019                 return -1;
7020             }
7021         }
7022     } else {
7023         if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
7024             return -1;
7025         }
7026     }
7027 
7028     return 0;
7029 }
7030 
7031 static int test_opcode_table(opc_handler_t **table, int len)
7032 {
7033     int i, count, tmp;
7034 
7035     for (i = 0, count = 0; i < len; i++) {
7036         /* Consistency fixup */
7037         if (table[i] == NULL) {
7038             table[i] = &invalid_handler;
7039         }
7040         if (table[i] != &invalid_handler) {
7041             if (is_indirect_opcode(table[i])) {
7042                 tmp = test_opcode_table(ind_table(table[i]),
7043                     PPC_CPU_INDIRECT_OPCODES_LEN);
7044                 if (tmp == 0) {
7045                     free(table[i]);
7046                     table[i] = &invalid_handler;
7047                 } else {
7048                     count++;
7049                 }
7050             } else {
7051                 count++;
7052             }
7053         }
7054     }
7055 
7056     return count;
7057 }
7058 
7059 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
7060 {
7061     if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
7062         printf("*** WARNING: no opcode defined !\n");
7063     }
7064 }
7065 
7066 /*****************************************************************************/
7067 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
7068 {
7069     PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
7070     opcode_t *opc;
7071 
7072     fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
7073     for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
7074         if (((opc->handler.type & pcc->insns_flags) != 0) ||
7075             ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
7076             if (register_insn(cpu->opcodes, opc) < 0) {
7077                 error_setg(errp, "ERROR initializing PowerPC instruction "
7078                            "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
7079                            opc->opc3);
7080                 return;
7081             }
7082         }
7083     }
7084     fix_opcode_tables(cpu->opcodes);
7085     fflush(stdout);
7086     fflush(stderr);
7087 }
7088 
7089 void destroy_ppc_opcodes(PowerPCCPU *cpu)
7090 {
7091     opc_handler_t **table, **table_2;
7092     int i, j, k;
7093 
7094     for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
7095         if (cpu->opcodes[i] == &invalid_handler) {
7096             continue;
7097         }
7098         if (is_indirect_opcode(cpu->opcodes[i])) {
7099             table = ind_table(cpu->opcodes[i]);
7100             for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
7101                 if (table[j] == &invalid_handler) {
7102                     continue;
7103                 }
7104                 if (is_indirect_opcode(table[j])) {
7105                     table_2 = ind_table(table[j]);
7106                     for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
7107                         if (table_2[k] != &invalid_handler &&
7108                             is_indirect_opcode(table_2[k])) {
7109                             g_free((opc_handler_t *)((uintptr_t)table_2[k] &
7110                                                      ~PPC_INDIRECT));
7111                         }
7112                     }
7113                     g_free((opc_handler_t *)((uintptr_t)table[j] &
7114                                              ~PPC_INDIRECT));
7115                 }
7116             }
7117             g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
7118                 ~PPC_INDIRECT));
7119         }
7120     }
7121 }
7122 
7123 int ppc_fixup_cpu(PowerPCCPU *cpu)
7124 {
7125     CPUPPCState *env = &cpu->env;
7126 
7127     /*
7128      * TCG doesn't (yet) emulate some groups of instructions that are
7129      * implemented on some otherwise supported CPUs (e.g. VSX and
7130      * decimal floating point instructions on POWER7).  We remove
7131      * unsupported instruction groups from the cpu state's instruction
7132      * masks and hope the guest can cope.  For at least the pseries
7133      * machine, the unavailability of these instructions can be
7134      * advertised to the guest via the device tree.
7135      */
7136     if ((env->insns_flags & ~PPC_TCG_INSNS)
7137         || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
7138         warn_report("Disabling some instructions which are not "
7139                     "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
7140                     env->insns_flags & ~PPC_TCG_INSNS,
7141                     env->insns_flags2 & ~PPC_TCG_INSNS2);
7142     }
7143     env->insns_flags &= PPC_TCG_INSNS;
7144     env->insns_flags2 &= PPC_TCG_INSNS2;
7145     return 0;
7146 }
7147 
7148 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
7149 {
7150     opc_handler_t **table, *handler;
7151     uint32_t inval;
7152 
7153     ctx->opcode = insn;
7154 
7155     LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
7156               insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7157               ctx->le_mode ? "little" : "big");
7158 
7159     table = cpu->opcodes;
7160     handler = table[opc1(insn)];
7161     if (is_indirect_opcode(handler)) {
7162         table = ind_table(handler);
7163         handler = table[opc2(insn)];
7164         if (is_indirect_opcode(handler)) {
7165             table = ind_table(handler);
7166             handler = table[opc3(insn)];
7167             if (is_indirect_opcode(handler)) {
7168                 table = ind_table(handler);
7169                 handler = table[opc4(insn)];
7170             }
7171         }
7172     }
7173 
7174     /* Is opcode *REALLY* valid ? */
7175     if (unlikely(handler->handler == &gen_invalid)) {
7176         qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
7177                       "%02x - %02x - %02x - %02x (%08x) "
7178                       TARGET_FMT_lx "\n",
7179                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7180                       insn, ctx->cia);
7181         return false;
7182     }
7183 
7184     if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
7185                  && Rc(insn))) {
7186         inval = handler->inval2;
7187     } else {
7188         inval = handler->inval1;
7189     }
7190 
7191     if (unlikely((insn & inval) != 0)) {
7192         qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
7193                       "%02x - %02x - %02x - %02x (%08x) "
7194                       TARGET_FMT_lx "\n", insn & inval,
7195                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7196                       insn, ctx->cia);
7197         return false;
7198     }
7199 
7200     handler->handler(ctx);
7201     return true;
7202 }
7203 
7204 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
7205 {
7206     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7207     CPUPPCState *env = cs->env_ptr;
7208     uint32_t hflags = ctx->base.tb->flags;
7209 
7210     ctx->spr_cb = env->spr_cb;
7211     ctx->pr = (hflags >> HFLAGS_PR) & 1;
7212     ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
7213     ctx->dr = (hflags >> HFLAGS_DR) & 1;
7214     ctx->hv = (hflags >> HFLAGS_HV) & 1;
7215     ctx->insns_flags = env->insns_flags;
7216     ctx->insns_flags2 = env->insns_flags2;
7217     ctx->access_type = -1;
7218     ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
7219     ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
7220     ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
7221     ctx->flags = env->flags;
7222 #if defined(TARGET_PPC64)
7223     ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
7224     ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
7225 #endif
7226     ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
7227         || env->mmu_model & POWERPC_MMU_64;
7228 
7229     ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
7230     ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
7231     ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
7232     ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
7233     ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
7234     ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
7235     ctx->hr = (hflags >> HFLAGS_HR) & 1;
7236     ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
7237     ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
7238     ctx->mmcr0_pmcjce = (hflags >> HFLAGS_PMCJCE) & 1;
7239     ctx->pmc_other = (hflags >> HFLAGS_PMC_OTHER) & 1;
7240     ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
7241 
7242     ctx->singlestep_enabled = 0;
7243     if ((hflags >> HFLAGS_SE) & 1) {
7244         ctx->singlestep_enabled |= CPU_SINGLE_STEP;
7245         ctx->base.max_insns = 1;
7246     }
7247     if ((hflags >> HFLAGS_BE) & 1) {
7248         ctx->singlestep_enabled |= CPU_BRANCH_STEP;
7249     }
7250 }
7251 
7252 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
7253 {
7254 }
7255 
7256 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
7257 {
7258     tcg_gen_insn_start(dcbase->pc_next);
7259 }
7260 
7261 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
7262 {
7263     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
7264     return opc1(insn) == 1;
7265 }
7266 
7267 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
7268 {
7269     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7270     PowerPCCPU *cpu = POWERPC_CPU(cs);
7271     CPUPPCState *env = cs->env_ptr;
7272     target_ulong pc;
7273     uint32_t insn;
7274     bool ok;
7275 
7276     LOG_DISAS("----------------\n");
7277     LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
7278               ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
7279 
7280     ctx->cia = pc = ctx->base.pc_next;
7281     insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
7282     ctx->base.pc_next = pc += 4;
7283 
7284     if (!is_prefix_insn(ctx, insn)) {
7285         ok = (decode_insn32(ctx, insn) ||
7286               decode_legacy(cpu, ctx, insn));
7287     } else if ((pc & 63) == 0) {
7288         /*
7289          * Power v3.1, section 1.9 Exceptions:
7290          * attempt to execute a prefixed instruction that crosses a
7291          * 64-byte address boundary (system alignment error).
7292          */
7293         gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
7294         ok = true;
7295     } else {
7296         uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
7297                                              need_byteswap(ctx));
7298         ctx->base.pc_next = pc += 4;
7299         ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
7300     }
7301     if (!ok) {
7302         gen_invalid(ctx);
7303     }
7304 
7305     /* End the TB when crossing a page boundary. */
7306     if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
7307         ctx->base.is_jmp = DISAS_TOO_MANY;
7308     }
7309 }
7310 
7311 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
7312 {
7313     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7314     DisasJumpType is_jmp = ctx->base.is_jmp;
7315     target_ulong nip = ctx->base.pc_next;
7316 
7317     if (is_jmp == DISAS_NORETURN) {
7318         /* We have already exited the TB. */
7319         return;
7320     }
7321 
7322     /* Honor single stepping. */
7323     if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)
7324         && (nip <= 0x100 || nip > 0xf00)) {
7325         switch (is_jmp) {
7326         case DISAS_TOO_MANY:
7327         case DISAS_EXIT_UPDATE:
7328         case DISAS_CHAIN_UPDATE:
7329             gen_update_nip(ctx, nip);
7330             break;
7331         case DISAS_EXIT:
7332         case DISAS_CHAIN:
7333             break;
7334         default:
7335             g_assert_not_reached();
7336         }
7337 
7338         gen_debug_exception(ctx);
7339         return;
7340     }
7341 
7342     switch (is_jmp) {
7343     case DISAS_TOO_MANY:
7344         if (use_goto_tb(ctx, nip)) {
7345             pmu_count_insns(ctx);
7346             tcg_gen_goto_tb(0);
7347             gen_update_nip(ctx, nip);
7348             tcg_gen_exit_tb(ctx->base.tb, 0);
7349             break;
7350         }
7351         /* fall through */
7352     case DISAS_CHAIN_UPDATE:
7353         gen_update_nip(ctx, nip);
7354         /* fall through */
7355     case DISAS_CHAIN:
7356         /*
7357          * tcg_gen_lookup_and_goto_ptr will exit the TB if
7358          * CF_NO_GOTO_PTR is set. Count insns now.
7359          */
7360         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
7361             pmu_count_insns(ctx);
7362         }
7363 
7364         tcg_gen_lookup_and_goto_ptr();
7365         break;
7366 
7367     case DISAS_EXIT_UPDATE:
7368         gen_update_nip(ctx, nip);
7369         /* fall through */
7370     case DISAS_EXIT:
7371         pmu_count_insns(ctx);
7372         tcg_gen_exit_tb(NULL, 0);
7373         break;
7374 
7375     default:
7376         g_assert_not_reached();
7377     }
7378 }
7379 
7380 static void ppc_tr_disas_log(const DisasContextBase *dcbase,
7381                              CPUState *cs, FILE *logfile)
7382 {
7383     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
7384     target_disas(logfile, cs, dcbase->pc_first, dcbase->tb->size);
7385 }
7386 
7387 static const TranslatorOps ppc_tr_ops = {
7388     .init_disas_context = ppc_tr_init_disas_context,
7389     .tb_start           = ppc_tr_tb_start,
7390     .insn_start         = ppc_tr_insn_start,
7391     .translate_insn     = ppc_tr_translate_insn,
7392     .tb_stop            = ppc_tr_tb_stop,
7393     .disas_log          = ppc_tr_disas_log,
7394 };
7395 
7396 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
7397                            target_ulong pc, void *host_pc)
7398 {
7399     DisasContext ctx;
7400 
7401     translator_loop(cs, tb, max_insns, pc, host_pc, &ppc_tr_ops, &ctx.base);
7402 }
7403