xref: /qemu/target/ppc/translate/vsx-impl.c.inc (revision 29b62a10)
1/***                           VSX extension                               ***/
2
3static inline void get_cpu_vsr(TCGv_i64 dst, int n, bool high)
4{
5    tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, high));
6}
7
8static inline void set_cpu_vsr(int n, TCGv_i64 src, bool high)
9{
10    tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, high));
11}
12
13static inline TCGv_ptr gen_vsr_ptr(int reg)
14{
15    TCGv_ptr r = tcg_temp_new_ptr();
16    tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
17    return r;
18}
19
20static inline TCGv_ptr gen_acc_ptr(int reg)
21{
22    TCGv_ptr r = tcg_temp_new_ptr();
23    tcg_gen_addi_ptr(r, cpu_env, acc_full_offset(reg));
24    return r;
25}
26
27#define VSX_LOAD_SCALAR(name, operation)                      \
28static void gen_##name(DisasContext *ctx)                     \
29{                                                             \
30    TCGv EA;                                                  \
31    TCGv_i64 t0;                                              \
32    if (unlikely(!ctx->vsx_enabled)) {                        \
33        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
34        return;                                               \
35    }                                                         \
36    t0 = tcg_temp_new_i64();                                  \
37    gen_set_access_type(ctx, ACCESS_INT);                     \
38    EA = tcg_temp_new();                                      \
39    gen_addr_reg_index(ctx, EA);                              \
40    gen_qemu_##operation(ctx, t0, EA);                        \
41    set_cpu_vsr(xT(ctx->opcode), t0, true);                   \
42    /* NOTE: cpu_vsrl is undefined */                         \
43    tcg_temp_free(EA);                                        \
44    tcg_temp_free_i64(t0);                                    \
45}
46
47VSX_LOAD_SCALAR(lxsdx, ld64_i64)
48VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
49VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
50VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
51VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
52VSX_LOAD_SCALAR(lxsspx, ld32fs)
53
54static void gen_lxvd2x(DisasContext *ctx)
55{
56    TCGv EA;
57    TCGv_i64 t0;
58    if (unlikely(!ctx->vsx_enabled)) {
59        gen_exception(ctx, POWERPC_EXCP_VSXU);
60        return;
61    }
62    t0 = tcg_temp_new_i64();
63    gen_set_access_type(ctx, ACCESS_INT);
64    EA = tcg_temp_new();
65    gen_addr_reg_index(ctx, EA);
66    gen_qemu_ld64_i64(ctx, t0, EA);
67    set_cpu_vsr(xT(ctx->opcode), t0, true);
68    tcg_gen_addi_tl(EA, EA, 8);
69    gen_qemu_ld64_i64(ctx, t0, EA);
70    set_cpu_vsr(xT(ctx->opcode), t0, false);
71    tcg_temp_free(EA);
72    tcg_temp_free_i64(t0);
73}
74
75static void gen_lxvw4x(DisasContext *ctx)
76{
77    TCGv EA;
78    TCGv_i64 xth;
79    TCGv_i64 xtl;
80    if (unlikely(!ctx->vsx_enabled)) {
81        gen_exception(ctx, POWERPC_EXCP_VSXU);
82        return;
83    }
84    xth = tcg_temp_new_i64();
85    xtl = tcg_temp_new_i64();
86
87    gen_set_access_type(ctx, ACCESS_INT);
88    EA = tcg_temp_new();
89
90    gen_addr_reg_index(ctx, EA);
91    if (ctx->le_mode) {
92        TCGv_i64 t0 = tcg_temp_new_i64();
93        TCGv_i64 t1 = tcg_temp_new_i64();
94
95        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
96        tcg_gen_shri_i64(t1, t0, 32);
97        tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
98        tcg_gen_addi_tl(EA, EA, 8);
99        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
100        tcg_gen_shri_i64(t1, t0, 32);
101        tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
102        tcg_temp_free_i64(t0);
103        tcg_temp_free_i64(t1);
104    } else {
105        tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
106        tcg_gen_addi_tl(EA, EA, 8);
107        tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
108    }
109    set_cpu_vsr(xT(ctx->opcode), xth, true);
110    set_cpu_vsr(xT(ctx->opcode), xtl, false);
111    tcg_temp_free(EA);
112    tcg_temp_free_i64(xth);
113    tcg_temp_free_i64(xtl);
114}
115
116static void gen_lxvwsx(DisasContext *ctx)
117{
118    TCGv EA;
119    TCGv_i32 data;
120
121    if (xT(ctx->opcode) < 32) {
122        if (unlikely(!ctx->vsx_enabled)) {
123            gen_exception(ctx, POWERPC_EXCP_VSXU);
124            return;
125        }
126    } else {
127        if (unlikely(!ctx->altivec_enabled)) {
128            gen_exception(ctx, POWERPC_EXCP_VPU);
129            return;
130        }
131    }
132
133    gen_set_access_type(ctx, ACCESS_INT);
134    EA = tcg_temp_new();
135
136    gen_addr_reg_index(ctx, EA);
137
138    data = tcg_temp_new_i32();
139    tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL));
140    tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
141
142    tcg_temp_free(EA);
143    tcg_temp_free_i32(data);
144}
145
146static void gen_lxvdsx(DisasContext *ctx)
147{
148    TCGv EA;
149    TCGv_i64 data;
150
151    if (unlikely(!ctx->vsx_enabled)) {
152        gen_exception(ctx, POWERPC_EXCP_VSXU);
153        return;
154    }
155
156    gen_set_access_type(ctx, ACCESS_INT);
157    EA = tcg_temp_new();
158
159    gen_addr_reg_index(ctx, EA);
160
161    data = tcg_temp_new_i64();
162    tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UQ));
163    tcg_gen_gvec_dup_i64(MO_UQ, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
164
165    tcg_temp_free(EA);
166    tcg_temp_free_i64(data);
167}
168
169static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
170                          TCGv_i64 inh, TCGv_i64 inl)
171{
172    TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
173    TCGv_i64 t0 = tcg_temp_new_i64();
174    TCGv_i64 t1 = tcg_temp_new_i64();
175
176    /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
177    tcg_gen_and_i64(t0, inh, mask);
178    tcg_gen_shli_i64(t0, t0, 8);
179    tcg_gen_shri_i64(t1, inh, 8);
180    tcg_gen_and_i64(t1, t1, mask);
181    tcg_gen_or_i64(outh, t0, t1);
182
183    /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
184    tcg_gen_and_i64(t0, inl, mask);
185    tcg_gen_shli_i64(t0, t0, 8);
186    tcg_gen_shri_i64(t1, inl, 8);
187    tcg_gen_and_i64(t1, t1, mask);
188    tcg_gen_or_i64(outl, t0, t1);
189
190    tcg_temp_free_i64(t0);
191    tcg_temp_free_i64(t1);
192    tcg_temp_free_i64(mask);
193}
194
195static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
196                          TCGv_i64 inh, TCGv_i64 inl)
197{
198    TCGv_i64 hi = tcg_temp_new_i64();
199    TCGv_i64 lo = tcg_temp_new_i64();
200
201    tcg_gen_bswap64_i64(hi, inh);
202    tcg_gen_bswap64_i64(lo, inl);
203    tcg_gen_shri_i64(outh, hi, 32);
204    tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
205    tcg_gen_shri_i64(outl, lo, 32);
206    tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
207
208    tcg_temp_free_i64(hi);
209    tcg_temp_free_i64(lo);
210}
211static void gen_lxvh8x(DisasContext *ctx)
212{
213    TCGv EA;
214    TCGv_i64 xth;
215    TCGv_i64 xtl;
216
217    if (unlikely(!ctx->vsx_enabled)) {
218        gen_exception(ctx, POWERPC_EXCP_VSXU);
219        return;
220    }
221    xth = tcg_temp_new_i64();
222    xtl = tcg_temp_new_i64();
223    gen_set_access_type(ctx, ACCESS_INT);
224
225    EA = tcg_temp_new();
226    gen_addr_reg_index(ctx, EA);
227    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
228    tcg_gen_addi_tl(EA, EA, 8);
229    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
230    if (ctx->le_mode) {
231        gen_bswap16x8(xth, xtl, xth, xtl);
232    }
233    set_cpu_vsr(xT(ctx->opcode), xth, true);
234    set_cpu_vsr(xT(ctx->opcode), xtl, false);
235    tcg_temp_free(EA);
236    tcg_temp_free_i64(xth);
237    tcg_temp_free_i64(xtl);
238}
239
240static void gen_lxvb16x(DisasContext *ctx)
241{
242    TCGv EA;
243    TCGv_i64 xth;
244    TCGv_i64 xtl;
245
246    if (unlikely(!ctx->vsx_enabled)) {
247        gen_exception(ctx, POWERPC_EXCP_VSXU);
248        return;
249    }
250    xth = tcg_temp_new_i64();
251    xtl = tcg_temp_new_i64();
252    gen_set_access_type(ctx, ACCESS_INT);
253    EA = tcg_temp_new();
254    gen_addr_reg_index(ctx, EA);
255    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
256    tcg_gen_addi_tl(EA, EA, 8);
257    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
258    set_cpu_vsr(xT(ctx->opcode), xth, true);
259    set_cpu_vsr(xT(ctx->opcode), xtl, false);
260    tcg_temp_free(EA);
261    tcg_temp_free_i64(xth);
262    tcg_temp_free_i64(xtl);
263}
264
265#ifdef TARGET_PPC64
266#define VSX_VECTOR_LOAD_STORE_LENGTH(name)                         \
267static void gen_##name(DisasContext *ctx)                          \
268{                                                                  \
269    TCGv EA;                                                       \
270    TCGv_ptr xt;                                                   \
271                                                                   \
272    if (xT(ctx->opcode) < 32) {                                    \
273        if (unlikely(!ctx->vsx_enabled)) {                         \
274            gen_exception(ctx, POWERPC_EXCP_VSXU);                 \
275            return;                                                \
276        }                                                          \
277    } else {                                                       \
278        if (unlikely(!ctx->altivec_enabled)) {                     \
279            gen_exception(ctx, POWERPC_EXCP_VPU);                  \
280            return;                                                \
281        }                                                          \
282    }                                                              \
283    EA = tcg_temp_new();                                           \
284    xt = gen_vsr_ptr(xT(ctx->opcode));                             \
285    gen_set_access_type(ctx, ACCESS_INT);                          \
286    gen_addr_register(ctx, EA);                                    \
287    gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]);  \
288    tcg_temp_free(EA);                                             \
289    tcg_temp_free_ptr(xt);                                         \
290}
291
292VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
293VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
294VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
295VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
296#endif
297
298#define VSX_STORE_SCALAR(name, operation)                     \
299static void gen_##name(DisasContext *ctx)                     \
300{                                                             \
301    TCGv EA;                                                  \
302    TCGv_i64 t0;                                              \
303    if (unlikely(!ctx->vsx_enabled)) {                        \
304        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
305        return;                                               \
306    }                                                         \
307    t0 = tcg_temp_new_i64();                                  \
308    gen_set_access_type(ctx, ACCESS_INT);                     \
309    EA = tcg_temp_new();                                      \
310    gen_addr_reg_index(ctx, EA);                              \
311    get_cpu_vsr(t0, xS(ctx->opcode), true);                   \
312    gen_qemu_##operation(ctx, t0, EA);                        \
313    tcg_temp_free(EA);                                        \
314    tcg_temp_free_i64(t0);                                    \
315}
316
317VSX_STORE_SCALAR(stxsdx, st64_i64)
318
319VSX_STORE_SCALAR(stxsibx, st8_i64)
320VSX_STORE_SCALAR(stxsihx, st16_i64)
321VSX_STORE_SCALAR(stxsiwx, st32_i64)
322VSX_STORE_SCALAR(stxsspx, st32fs)
323
324static void gen_stxvd2x(DisasContext *ctx)
325{
326    TCGv EA;
327    TCGv_i64 t0;
328    if (unlikely(!ctx->vsx_enabled)) {
329        gen_exception(ctx, POWERPC_EXCP_VSXU);
330        return;
331    }
332    t0 = tcg_temp_new_i64();
333    gen_set_access_type(ctx, ACCESS_INT);
334    EA = tcg_temp_new();
335    gen_addr_reg_index(ctx, EA);
336    get_cpu_vsr(t0, xS(ctx->opcode), true);
337    gen_qemu_st64_i64(ctx, t0, EA);
338    tcg_gen_addi_tl(EA, EA, 8);
339    get_cpu_vsr(t0, xS(ctx->opcode), false);
340    gen_qemu_st64_i64(ctx, t0, EA);
341    tcg_temp_free(EA);
342    tcg_temp_free_i64(t0);
343}
344
345static void gen_stxvw4x(DisasContext *ctx)
346{
347    TCGv EA;
348    TCGv_i64 xsh;
349    TCGv_i64 xsl;
350
351    if (unlikely(!ctx->vsx_enabled)) {
352        gen_exception(ctx, POWERPC_EXCP_VSXU);
353        return;
354    }
355    xsh = tcg_temp_new_i64();
356    xsl = tcg_temp_new_i64();
357    get_cpu_vsr(xsh, xS(ctx->opcode), true);
358    get_cpu_vsr(xsl, xS(ctx->opcode), false);
359    gen_set_access_type(ctx, ACCESS_INT);
360    EA = tcg_temp_new();
361    gen_addr_reg_index(ctx, EA);
362    if (ctx->le_mode) {
363        TCGv_i64 t0 = tcg_temp_new_i64();
364        TCGv_i64 t1 = tcg_temp_new_i64();
365
366        tcg_gen_shri_i64(t0, xsh, 32);
367        tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
368        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
369        tcg_gen_addi_tl(EA, EA, 8);
370        tcg_gen_shri_i64(t0, xsl, 32);
371        tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
372        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
373        tcg_temp_free_i64(t0);
374        tcg_temp_free_i64(t1);
375    } else {
376        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
377        tcg_gen_addi_tl(EA, EA, 8);
378        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
379    }
380    tcg_temp_free(EA);
381    tcg_temp_free_i64(xsh);
382    tcg_temp_free_i64(xsl);
383}
384
385static void gen_stxvh8x(DisasContext *ctx)
386{
387    TCGv EA;
388    TCGv_i64 xsh;
389    TCGv_i64 xsl;
390
391    if (unlikely(!ctx->vsx_enabled)) {
392        gen_exception(ctx, POWERPC_EXCP_VSXU);
393        return;
394    }
395    xsh = tcg_temp_new_i64();
396    xsl = tcg_temp_new_i64();
397    get_cpu_vsr(xsh, xS(ctx->opcode), true);
398    get_cpu_vsr(xsl, xS(ctx->opcode), false);
399    gen_set_access_type(ctx, ACCESS_INT);
400    EA = tcg_temp_new();
401    gen_addr_reg_index(ctx, EA);
402    if (ctx->le_mode) {
403        TCGv_i64 outh = tcg_temp_new_i64();
404        TCGv_i64 outl = tcg_temp_new_i64();
405
406        gen_bswap16x8(outh, outl, xsh, xsl);
407        tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEUQ);
408        tcg_gen_addi_tl(EA, EA, 8);
409        tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEUQ);
410        tcg_temp_free_i64(outh);
411        tcg_temp_free_i64(outl);
412    } else {
413        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
414        tcg_gen_addi_tl(EA, EA, 8);
415        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
416    }
417    tcg_temp_free(EA);
418    tcg_temp_free_i64(xsh);
419    tcg_temp_free_i64(xsl);
420}
421
422static void gen_stxvb16x(DisasContext *ctx)
423{
424    TCGv EA;
425    TCGv_i64 xsh;
426    TCGv_i64 xsl;
427
428    if (unlikely(!ctx->vsx_enabled)) {
429        gen_exception(ctx, POWERPC_EXCP_VSXU);
430        return;
431    }
432    xsh = tcg_temp_new_i64();
433    xsl = tcg_temp_new_i64();
434    get_cpu_vsr(xsh, xS(ctx->opcode), true);
435    get_cpu_vsr(xsl, xS(ctx->opcode), false);
436    gen_set_access_type(ctx, ACCESS_INT);
437    EA = tcg_temp_new();
438    gen_addr_reg_index(ctx, EA);
439    tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
440    tcg_gen_addi_tl(EA, EA, 8);
441    tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
442    tcg_temp_free(EA);
443    tcg_temp_free_i64(xsh);
444    tcg_temp_free_i64(xsl);
445}
446
447static void gen_mfvsrwz(DisasContext *ctx)
448{
449    if (xS(ctx->opcode) < 32) {
450        if (unlikely(!ctx->fpu_enabled)) {
451            gen_exception(ctx, POWERPC_EXCP_FPU);
452            return;
453        }
454    } else {
455        if (unlikely(!ctx->altivec_enabled)) {
456            gen_exception(ctx, POWERPC_EXCP_VPU);
457            return;
458        }
459    }
460    TCGv_i64 tmp = tcg_temp_new_i64();
461    TCGv_i64 xsh = tcg_temp_new_i64();
462    get_cpu_vsr(xsh, xS(ctx->opcode), true);
463    tcg_gen_ext32u_i64(tmp, xsh);
464    tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
465    tcg_temp_free_i64(tmp);
466    tcg_temp_free_i64(xsh);
467}
468
469static void gen_mtvsrwa(DisasContext *ctx)
470{
471    if (xS(ctx->opcode) < 32) {
472        if (unlikely(!ctx->fpu_enabled)) {
473            gen_exception(ctx, POWERPC_EXCP_FPU);
474            return;
475        }
476    } else {
477        if (unlikely(!ctx->altivec_enabled)) {
478            gen_exception(ctx, POWERPC_EXCP_VPU);
479            return;
480        }
481    }
482    TCGv_i64 tmp = tcg_temp_new_i64();
483    TCGv_i64 xsh = tcg_temp_new_i64();
484    tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
485    tcg_gen_ext32s_i64(xsh, tmp);
486    set_cpu_vsr(xT(ctx->opcode), xsh, true);
487    tcg_temp_free_i64(tmp);
488    tcg_temp_free_i64(xsh);
489}
490
491static void gen_mtvsrwz(DisasContext *ctx)
492{
493    if (xS(ctx->opcode) < 32) {
494        if (unlikely(!ctx->fpu_enabled)) {
495            gen_exception(ctx, POWERPC_EXCP_FPU);
496            return;
497        }
498    } else {
499        if (unlikely(!ctx->altivec_enabled)) {
500            gen_exception(ctx, POWERPC_EXCP_VPU);
501            return;
502        }
503    }
504    TCGv_i64 tmp = tcg_temp_new_i64();
505    TCGv_i64 xsh = tcg_temp_new_i64();
506    tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
507    tcg_gen_ext32u_i64(xsh, tmp);
508    set_cpu_vsr(xT(ctx->opcode), xsh, true);
509    tcg_temp_free_i64(tmp);
510    tcg_temp_free_i64(xsh);
511}
512
513#if defined(TARGET_PPC64)
514static void gen_mfvsrd(DisasContext *ctx)
515{
516    TCGv_i64 t0;
517    if (xS(ctx->opcode) < 32) {
518        if (unlikely(!ctx->fpu_enabled)) {
519            gen_exception(ctx, POWERPC_EXCP_FPU);
520            return;
521        }
522    } else {
523        if (unlikely(!ctx->altivec_enabled)) {
524            gen_exception(ctx, POWERPC_EXCP_VPU);
525            return;
526        }
527    }
528    t0 = tcg_temp_new_i64();
529    get_cpu_vsr(t0, xS(ctx->opcode), true);
530    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
531    tcg_temp_free_i64(t0);
532}
533
534static void gen_mtvsrd(DisasContext *ctx)
535{
536    TCGv_i64 t0;
537    if (xS(ctx->opcode) < 32) {
538        if (unlikely(!ctx->fpu_enabled)) {
539            gen_exception(ctx, POWERPC_EXCP_FPU);
540            return;
541        }
542    } else {
543        if (unlikely(!ctx->altivec_enabled)) {
544            gen_exception(ctx, POWERPC_EXCP_VPU);
545            return;
546        }
547    }
548    t0 = tcg_temp_new_i64();
549    tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
550    set_cpu_vsr(xT(ctx->opcode), t0, true);
551    tcg_temp_free_i64(t0);
552}
553
554static void gen_mfvsrld(DisasContext *ctx)
555{
556    TCGv_i64 t0;
557    if (xS(ctx->opcode) < 32) {
558        if (unlikely(!ctx->vsx_enabled)) {
559            gen_exception(ctx, POWERPC_EXCP_VSXU);
560            return;
561        }
562    } else {
563        if (unlikely(!ctx->altivec_enabled)) {
564            gen_exception(ctx, POWERPC_EXCP_VPU);
565            return;
566        }
567    }
568    t0 = tcg_temp_new_i64();
569    get_cpu_vsr(t0, xS(ctx->opcode), false);
570    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
571    tcg_temp_free_i64(t0);
572}
573
574static void gen_mtvsrdd(DisasContext *ctx)
575{
576    TCGv_i64 t0;
577    if (xT(ctx->opcode) < 32) {
578        if (unlikely(!ctx->vsx_enabled)) {
579            gen_exception(ctx, POWERPC_EXCP_VSXU);
580            return;
581        }
582    } else {
583        if (unlikely(!ctx->altivec_enabled)) {
584            gen_exception(ctx, POWERPC_EXCP_VPU);
585            return;
586        }
587    }
588
589    t0 = tcg_temp_new_i64();
590    if (!rA(ctx->opcode)) {
591        tcg_gen_movi_i64(t0, 0);
592    } else {
593        tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
594    }
595    set_cpu_vsr(xT(ctx->opcode), t0, true);
596
597    tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
598    set_cpu_vsr(xT(ctx->opcode), t0, false);
599    tcg_temp_free_i64(t0);
600}
601
602static void gen_mtvsrws(DisasContext *ctx)
603{
604    TCGv_i64 t0;
605    if (xT(ctx->opcode) < 32) {
606        if (unlikely(!ctx->vsx_enabled)) {
607            gen_exception(ctx, POWERPC_EXCP_VSXU);
608            return;
609        }
610    } else {
611        if (unlikely(!ctx->altivec_enabled)) {
612            gen_exception(ctx, POWERPC_EXCP_VPU);
613            return;
614        }
615    }
616
617    t0 = tcg_temp_new_i64();
618    tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
619                        cpu_gpr[rA(ctx->opcode)], 32, 32);
620    set_cpu_vsr(xT(ctx->opcode), t0, false);
621    set_cpu_vsr(xT(ctx->opcode), t0, true);
622    tcg_temp_free_i64(t0);
623}
624
625#endif
626
627#define OP_ABS 1
628#define OP_NABS 2
629#define OP_NEG 3
630#define OP_CPSGN 4
631#define SGN_MASK_DP  0x8000000000000000ull
632#define SGN_MASK_SP 0x8000000080000000ull
633#define EXP_MASK_DP  0x7FF0000000000000ull
634#define EXP_MASK_SP 0x7F8000007F800000ull
635#define FRC_MASK_DP (~(SGN_MASK_DP | EXP_MASK_DP))
636#define FRC_MASK_SP (~(SGN_MASK_SP | EXP_MASK_SP))
637
638#define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
639static void glue(gen_, name)(DisasContext *ctx)                   \
640    {                                                             \
641        TCGv_i64 xb, sgm;                                         \
642        if (unlikely(!ctx->vsx_enabled)) {                        \
643            gen_exception(ctx, POWERPC_EXCP_VSXU);                \
644            return;                                               \
645        }                                                         \
646        xb = tcg_temp_new_i64();                                  \
647        sgm = tcg_temp_new_i64();                                 \
648        get_cpu_vsr(xb, xB(ctx->opcode), true);                   \
649        tcg_gen_movi_i64(sgm, sgn_mask);                          \
650        switch (op) {                                             \
651            case OP_ABS: {                                        \
652                tcg_gen_andc_i64(xb, xb, sgm);                    \
653                break;                                            \
654            }                                                     \
655            case OP_NABS: {                                       \
656                tcg_gen_or_i64(xb, xb, sgm);                      \
657                break;                                            \
658            }                                                     \
659            case OP_NEG: {                                        \
660                tcg_gen_xor_i64(xb, xb, sgm);                     \
661                break;                                            \
662            }                                                     \
663            case OP_CPSGN: {                                      \
664                TCGv_i64 xa = tcg_temp_new_i64();                 \
665                get_cpu_vsr(xa, xA(ctx->opcode), true);           \
666                tcg_gen_and_i64(xa, xa, sgm);                     \
667                tcg_gen_andc_i64(xb, xb, sgm);                    \
668                tcg_gen_or_i64(xb, xb, xa);                       \
669                tcg_temp_free_i64(xa);                            \
670                break;                                            \
671            }                                                     \
672        }                                                         \
673        set_cpu_vsr(xT(ctx->opcode), xb, true);                   \
674        set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
675        tcg_temp_free_i64(xb);                                    \
676        tcg_temp_free_i64(sgm);                                   \
677    }
678
679VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
680VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
681VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
682VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
683
684#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask)                    \
685static void glue(gen_, name)(DisasContext *ctx)                   \
686{                                                                 \
687    int xa;                                                       \
688    int xt = rD(ctx->opcode) + 32;                                \
689    int xb = rB(ctx->opcode) + 32;                                \
690    TCGv_i64 xah, xbh, xbl, sgm, tmp;                             \
691                                                                  \
692    if (unlikely(!ctx->vsx_enabled)) {                            \
693        gen_exception(ctx, POWERPC_EXCP_VSXU);                    \
694        return;                                                   \
695    }                                                             \
696    xbh = tcg_temp_new_i64();                                     \
697    xbl = tcg_temp_new_i64();                                     \
698    sgm = tcg_temp_new_i64();                                     \
699    tmp = tcg_temp_new_i64();                                     \
700    get_cpu_vsr(xbh, xb, true);                                   \
701    get_cpu_vsr(xbl, xb, false);                                  \
702    tcg_gen_movi_i64(sgm, sgn_mask);                              \
703    switch (op) {                                                 \
704    case OP_ABS:                                                  \
705        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
706        break;                                                    \
707    case OP_NABS:                                                 \
708        tcg_gen_or_i64(xbh, xbh, sgm);                            \
709        break;                                                    \
710    case OP_NEG:                                                  \
711        tcg_gen_xor_i64(xbh, xbh, sgm);                           \
712        break;                                                    \
713    case OP_CPSGN:                                                \
714        xah = tcg_temp_new_i64();                                 \
715        xa = rA(ctx->opcode) + 32;                                \
716        get_cpu_vsr(tmp, xa, true);                               \
717        tcg_gen_and_i64(xah, tmp, sgm);                           \
718        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
719        tcg_gen_or_i64(xbh, xbh, xah);                            \
720        tcg_temp_free_i64(xah);                                   \
721        break;                                                    \
722    }                                                             \
723    set_cpu_vsr(xt, xbh, true);                                   \
724    set_cpu_vsr(xt, xbl, false);                                  \
725    tcg_temp_free_i64(xbl);                                       \
726    tcg_temp_free_i64(xbh);                                       \
727    tcg_temp_free_i64(sgm);                                       \
728    tcg_temp_free_i64(tmp);                                       \
729}
730
731VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
732VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
733VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
734VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
735
736#define TCG_OP_IMM_i64(FUNC, OP, IMM)                           \
737    static void FUNC(TCGv_i64 t, TCGv_i64 b)                    \
738    {                                                           \
739        OP(t, b, IMM);                                          \
740    }
741
742TCG_OP_IMM_i64(do_xvabssp_i64, tcg_gen_andi_i64, ~SGN_MASK_SP)
743TCG_OP_IMM_i64(do_xvnabssp_i64, tcg_gen_ori_i64, SGN_MASK_SP)
744TCG_OP_IMM_i64(do_xvnegsp_i64, tcg_gen_xori_i64, SGN_MASK_SP)
745TCG_OP_IMM_i64(do_xvabsdp_i64, tcg_gen_andi_i64, ~SGN_MASK_DP)
746TCG_OP_IMM_i64(do_xvnabsdp_i64, tcg_gen_ori_i64, SGN_MASK_DP)
747TCG_OP_IMM_i64(do_xvnegdp_i64, tcg_gen_xori_i64, SGN_MASK_DP)
748#undef TCG_OP_IMM_i64
749
750static void xv_msb_op1(unsigned vece, TCGv_vec t, TCGv_vec b,
751                 void (*tcg_gen_op_vec)(unsigned, TCGv_vec, TCGv_vec, TCGv_vec))
752{
753    uint64_t msb = (vece == MO_32) ? SGN_MASK_SP : SGN_MASK_DP;
754    tcg_gen_op_vec(vece, t, b, tcg_constant_vec_matching(t, vece, msb));
755}
756
757static void do_xvabs_vec(unsigned vece, TCGv_vec t, TCGv_vec b)
758{
759    xv_msb_op1(vece, t, b, tcg_gen_andc_vec);
760}
761
762static void do_xvnabs_vec(unsigned vece, TCGv_vec t, TCGv_vec b)
763{
764    xv_msb_op1(vece, t, b, tcg_gen_or_vec);
765}
766
767static void do_xvneg_vec(unsigned vece, TCGv_vec t, TCGv_vec b)
768{
769    xv_msb_op1(vece, t, b, tcg_gen_xor_vec);
770}
771
772static bool do_vsx_msb_op(DisasContext *ctx, arg_XX2 *a, unsigned vece,
773                          void (*vec)(unsigned, TCGv_vec, TCGv_vec),
774                          void (*i64)(TCGv_i64, TCGv_i64))
775{
776    static const TCGOpcode vecop_list[] = {
777        0
778    };
779
780    const GVecGen2 op = {
781       .fni8 = i64,
782       .fniv = vec,
783       .opt_opc = vecop_list,
784       .vece = vece
785    };
786
787    REQUIRE_INSNS_FLAGS2(ctx, VSX);
788    REQUIRE_VSX(ctx);
789
790    tcg_gen_gvec_2(vsr_full_offset(a->xt), vsr_full_offset(a->xb),
791                   16, 16, &op);
792
793    return true;
794}
795
796TRANS(XVABSDP, do_vsx_msb_op, MO_64, do_xvabs_vec, do_xvabsdp_i64)
797TRANS(XVNABSDP, do_vsx_msb_op, MO_64, do_xvnabs_vec, do_xvnabsdp_i64)
798TRANS(XVNEGDP, do_vsx_msb_op, MO_64, do_xvneg_vec, do_xvnegdp_i64)
799TRANS(XVABSSP, do_vsx_msb_op, MO_32, do_xvabs_vec, do_xvabssp_i64)
800TRANS(XVNABSSP, do_vsx_msb_op, MO_32, do_xvnabs_vec, do_xvnabssp_i64)
801TRANS(XVNEGSP, do_vsx_msb_op, MO_32, do_xvneg_vec, do_xvnegsp_i64)
802
803static void do_xvcpsgndp_i64(TCGv_i64 t, TCGv_i64 a, TCGv_i64 b)
804{
805    tcg_gen_andi_i64(a, a, SGN_MASK_DP);
806    tcg_gen_andi_i64(b, b, ~SGN_MASK_DP);
807    tcg_gen_or_i64(t, a, b);
808}
809
810static void do_xvcpsgnsp_i64(TCGv_i64 t, TCGv_i64 a, TCGv_i64 b)
811{
812    tcg_gen_andi_i64(a, a, SGN_MASK_SP);
813    tcg_gen_andi_i64(b, b, ~SGN_MASK_SP);
814    tcg_gen_or_i64(t, a, b);
815}
816
817static void do_xvcpsgn_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b)
818{
819    uint64_t msb = (vece == MO_32) ? SGN_MASK_SP : SGN_MASK_DP;
820    tcg_gen_bitsel_vec(vece, t, tcg_constant_vec_matching(t, vece, msb), a, b);
821}
822
823static bool do_xvcpsgn(DisasContext *ctx, arg_XX3 *a, unsigned vece)
824{
825    static const TCGOpcode vecop_list[] = {
826        0
827    };
828
829    static const GVecGen3 op[] = {
830        {
831            .fni8 = do_xvcpsgnsp_i64,
832            .fniv = do_xvcpsgn_vec,
833            .opt_opc = vecop_list,
834            .vece = MO_32
835        },
836        {
837            .fni8 = do_xvcpsgndp_i64,
838            .fniv = do_xvcpsgn_vec,
839            .opt_opc = vecop_list,
840            .vece = MO_64
841        },
842    };
843
844    REQUIRE_INSNS_FLAGS2(ctx, VSX);
845    REQUIRE_VSX(ctx);
846
847    tcg_gen_gvec_3(vsr_full_offset(a->xt), vsr_full_offset(a->xa),
848                   vsr_full_offset(a->xb), 16, 16, &op[vece - MO_32]);
849
850    return true;
851}
852
853TRANS(XVCPSGNSP, do_xvcpsgn, MO_32)
854TRANS(XVCPSGNDP, do_xvcpsgn, MO_64)
855
856#define VSX_CMP(name, op1, op2, inval, type)                                  \
857static void gen_##name(DisasContext *ctx)                                     \
858{                                                                             \
859    TCGv_i32 ignored;                                                         \
860    TCGv_ptr xt, xa, xb;                                                      \
861    if (unlikely(!ctx->vsx_enabled)) {                                        \
862        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
863        return;                                                               \
864    }                                                                         \
865    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
866    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
867    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
868    if ((ctx->opcode >> (31 - 21)) & 1) {                                     \
869        gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb);                   \
870    } else {                                                                  \
871        ignored = tcg_temp_new_i32();                                         \
872        gen_helper_##name(ignored, cpu_env, xt, xa, xb);                      \
873        tcg_temp_free_i32(ignored);                                           \
874    }                                                                         \
875    tcg_temp_free_ptr(xt);                                                    \
876    tcg_temp_free_ptr(xa);                                                    \
877    tcg_temp_free_ptr(xb);                                                    \
878}
879
880VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
881VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
882VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
883VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
884VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
885VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
886VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
887VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
888
889static bool trans_XSCVQPDP(DisasContext *ctx, arg_X_tb_rc *a)
890{
891    TCGv_i32 ro;
892    TCGv_ptr xt, xb;
893
894    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
895    REQUIRE_VSX(ctx);
896
897    ro = tcg_const_i32(a->rc);
898
899    xt = gen_avr_ptr(a->rt);
900    xb = gen_avr_ptr(a->rb);
901    gen_helper_XSCVQPDP(cpu_env, ro, xt, xb);
902    tcg_temp_free_i32(ro);
903    tcg_temp_free_ptr(xt);
904    tcg_temp_free_ptr(xb);
905
906    return true;
907}
908
909static bool do_helper_env_X_tb(DisasContext *ctx, arg_X_tb *a,
910                               void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr))
911{
912    TCGv_ptr xt, xb;
913
914    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
915    REQUIRE_VSX(ctx);
916
917    xt = gen_avr_ptr(a->rt);
918    xb = gen_avr_ptr(a->rb);
919    gen_helper(cpu_env, xt, xb);
920    tcg_temp_free_ptr(xt);
921    tcg_temp_free_ptr(xb);
922
923    return true;
924}
925
926TRANS(XSCVUQQP, do_helper_env_X_tb, gen_helper_XSCVUQQP)
927TRANS(XSCVSQQP, do_helper_env_X_tb, gen_helper_XSCVSQQP)
928TRANS(XSCVQPUQZ, do_helper_env_X_tb, gen_helper_XSCVQPUQZ)
929TRANS(XSCVQPSQZ, do_helper_env_X_tb, gen_helper_XSCVQPSQZ)
930
931#define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
932static void gen_##name(DisasContext *ctx)                                     \
933{                                                                             \
934    TCGv_i32 opc;                                                             \
935    if (unlikely(!ctx->vsx_enabled)) {                                        \
936        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
937        return;                                                               \
938    }                                                                         \
939    opc = tcg_const_i32(ctx->opcode);                                         \
940    gen_helper_##name(cpu_env, opc);                                          \
941    tcg_temp_free_i32(opc);                                                   \
942}
943
944#define GEN_VSX_HELPER_X3(name, op1, op2, inval, type)                        \
945static void gen_##name(DisasContext *ctx)                                     \
946{                                                                             \
947    TCGv_ptr xt, xa, xb;                                                      \
948    if (unlikely(!ctx->vsx_enabled)) {                                        \
949        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
950        return;                                                               \
951    }                                                                         \
952    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
953    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
954    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
955    gen_helper_##name(cpu_env, xt, xa, xb);                                   \
956    tcg_temp_free_ptr(xt);                                                    \
957    tcg_temp_free_ptr(xa);                                                    \
958    tcg_temp_free_ptr(xb);                                                    \
959}
960
961#define GEN_VSX_HELPER_X2(name, op1, op2, inval, type)                        \
962static void gen_##name(DisasContext *ctx)                                     \
963{                                                                             \
964    TCGv_ptr xt, xb;                                                          \
965    if (unlikely(!ctx->vsx_enabled)) {                                        \
966        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
967        return;                                                               \
968    }                                                                         \
969    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
970    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
971    gen_helper_##name(cpu_env, xt, xb);                                       \
972    tcg_temp_free_ptr(xt);                                                    \
973    tcg_temp_free_ptr(xb);                                                    \
974}
975
976#define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type)                     \
977static void gen_##name(DisasContext *ctx)                                     \
978{                                                                             \
979    TCGv_i32 opc;                                                             \
980    TCGv_ptr xa, xb;                                                          \
981    if (unlikely(!ctx->vsx_enabled)) {                                        \
982        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
983        return;                                                               \
984    }                                                                         \
985    opc = tcg_const_i32(ctx->opcode);                                         \
986    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
987    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
988    gen_helper_##name(cpu_env, opc, xa, xb);                                  \
989    tcg_temp_free_i32(opc);                                                   \
990    tcg_temp_free_ptr(xa);                                                    \
991    tcg_temp_free_ptr(xb);                                                    \
992}
993
994#define GEN_VSX_HELPER_X1(name, op1, op2, inval, type)                        \
995static void gen_##name(DisasContext *ctx)                                     \
996{                                                                             \
997    TCGv_i32 opc;                                                             \
998    TCGv_ptr xb;                                                              \
999    if (unlikely(!ctx->vsx_enabled)) {                                        \
1000        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1001        return;                                                               \
1002    }                                                                         \
1003    opc = tcg_const_i32(ctx->opcode);                                         \
1004    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1005    gen_helper_##name(cpu_env, opc, xb);                                      \
1006    tcg_temp_free_i32(opc);                                                   \
1007    tcg_temp_free_ptr(xb);                                                    \
1008}
1009
1010#define GEN_VSX_HELPER_R3(name, op1, op2, inval, type)                        \
1011static void gen_##name(DisasContext *ctx)                                     \
1012{                                                                             \
1013    TCGv_i32 opc;                                                             \
1014    TCGv_ptr xt, xa, xb;                                                      \
1015    if (unlikely(!ctx->vsx_enabled)) {                                        \
1016        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1017        return;                                                               \
1018    }                                                                         \
1019    opc = tcg_const_i32(ctx->opcode);                                         \
1020    xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1021    xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1022    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1023    gen_helper_##name(cpu_env, opc, xt, xa, xb);                              \
1024    tcg_temp_free_i32(opc);                                                   \
1025    tcg_temp_free_ptr(xt);                                                    \
1026    tcg_temp_free_ptr(xa);                                                    \
1027    tcg_temp_free_ptr(xb);                                                    \
1028}
1029
1030#define GEN_VSX_HELPER_R2(name, op1, op2, inval, type)                        \
1031static void gen_##name(DisasContext *ctx)                                     \
1032{                                                                             \
1033    TCGv_i32 opc;                                                             \
1034    TCGv_ptr xt, xb;                                                          \
1035    if (unlikely(!ctx->vsx_enabled)) {                                        \
1036        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1037        return;                                                               \
1038    }                                                                         \
1039    opc = tcg_const_i32(ctx->opcode);                                         \
1040    xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1041    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1042    gen_helper_##name(cpu_env, opc, xt, xb);                                  \
1043    tcg_temp_free_i32(opc);                                                   \
1044    tcg_temp_free_ptr(xt);                                                    \
1045    tcg_temp_free_ptr(xb);                                                    \
1046}
1047
1048#define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type)                     \
1049static void gen_##name(DisasContext *ctx)                                     \
1050{                                                                             \
1051    TCGv_i32 opc;                                                             \
1052    TCGv_ptr xa, xb;                                                          \
1053    if (unlikely(!ctx->vsx_enabled)) {                                        \
1054        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1055        return;                                                               \
1056    }                                                                         \
1057    opc = tcg_const_i32(ctx->opcode);                                         \
1058    xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1059    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1060    gen_helper_##name(cpu_env, opc, xa, xb);                                  \
1061    tcg_temp_free_i32(opc);                                                   \
1062    tcg_temp_free_ptr(xa);                                                    \
1063    tcg_temp_free_ptr(xb);                                                    \
1064}
1065
1066#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
1067static void gen_##name(DisasContext *ctx)                     \
1068{                                                             \
1069    TCGv_i64 t0;                                              \
1070    TCGv_i64 t1;                                              \
1071    if (unlikely(!ctx->vsx_enabled)) {                        \
1072        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
1073        return;                                               \
1074    }                                                         \
1075    t0 = tcg_temp_new_i64();                                  \
1076    t1 = tcg_temp_new_i64();                                  \
1077    get_cpu_vsr(t0, xB(ctx->opcode), true);                   \
1078    gen_helper_##name(t1, cpu_env, t0);                       \
1079    set_cpu_vsr(xT(ctx->opcode), t1, true);                   \
1080    set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
1081    tcg_temp_free_i64(t0);                                    \
1082    tcg_temp_free_i64(t1);                                    \
1083}
1084
1085GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1086GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1087GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1088GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1089GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1090GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1091GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1092GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1093GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1094GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1095GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1096GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1097GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1098GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1099GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1100GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1101GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1102GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1103GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1104GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1105GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1106GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1107GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1108GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1109GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1110GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1111GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1112GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1113GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1114GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1115GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1116
1117/* test if +Inf */
1118static void gen_is_pos_inf(unsigned vece, TCGv_vec t, TCGv_vec b, int64_t v)
1119{
1120    uint64_t exp_msk = (vece == MO_32) ? (uint32_t)EXP_MASK_SP : EXP_MASK_DP;
1121    tcg_gen_cmp_vec(TCG_COND_EQ, vece, t, b,
1122                    tcg_constant_vec_matching(t, vece, exp_msk));
1123}
1124
1125/* test if -Inf */
1126static void gen_is_neg_inf(unsigned vece, TCGv_vec t, TCGv_vec b, int64_t v)
1127{
1128    uint64_t exp_msk = (vece == MO_32) ? (uint32_t)EXP_MASK_SP : EXP_MASK_DP;
1129    uint64_t sgn_msk = (vece == MO_32) ? (uint32_t)SGN_MASK_SP : SGN_MASK_DP;
1130    tcg_gen_cmp_vec(TCG_COND_EQ, vece, t, b,
1131                    tcg_constant_vec_matching(t, vece, sgn_msk | exp_msk));
1132}
1133
1134/* test if +Inf or -Inf */
1135static void gen_is_any_inf(unsigned vece, TCGv_vec t, TCGv_vec b, int64_t v)
1136{
1137    uint64_t exp_msk = (vece == MO_32) ? (uint32_t)EXP_MASK_SP : EXP_MASK_DP;
1138    uint64_t sgn_msk = (vece == MO_32) ? (uint32_t)SGN_MASK_SP : SGN_MASK_DP;
1139    tcg_gen_andc_vec(vece, b, b, tcg_constant_vec_matching(t, vece, sgn_msk));
1140    tcg_gen_cmp_vec(TCG_COND_EQ, vece, t, b,
1141                    tcg_constant_vec_matching(t, vece, exp_msk));
1142}
1143
1144/* test if +0 */
1145static void gen_is_pos_zero(unsigned vece, TCGv_vec t, TCGv_vec b, int64_t v)
1146{
1147    tcg_gen_cmp_vec(TCG_COND_EQ, vece, t, b,
1148                    tcg_constant_vec_matching(t, vece, 0));
1149}
1150
1151/* test if -0 */
1152static void gen_is_neg_zero(unsigned vece, TCGv_vec t, TCGv_vec b, int64_t v)
1153{
1154    uint64_t sgn_msk = (vece == MO_32) ? (uint32_t)SGN_MASK_SP : SGN_MASK_DP;
1155    tcg_gen_cmp_vec(TCG_COND_EQ, vece, t, b,
1156                    tcg_constant_vec_matching(t, vece, sgn_msk));
1157}
1158
1159/* test if +0 or -0 */
1160static void gen_is_any_zero(unsigned vece, TCGv_vec t, TCGv_vec b, int64_t v)
1161{
1162    uint64_t sgn_msk = (vece == MO_32) ? (uint32_t)SGN_MASK_SP : SGN_MASK_DP;
1163    tcg_gen_andc_vec(vece, b, b, tcg_constant_vec_matching(t, vece, sgn_msk));
1164    tcg_gen_cmp_vec(TCG_COND_EQ, vece, t, b,
1165                    tcg_constant_vec_matching(t, vece, 0));
1166}
1167
1168/* test if +Denormal */
1169static void gen_is_pos_denormal(unsigned vece, TCGv_vec t,
1170                                TCGv_vec b, int64_t v)
1171{
1172    uint64_t frc_msk = (vece == MO_32) ? (uint32_t)FRC_MASK_SP : FRC_MASK_DP;
1173    tcg_gen_cmp_vec(TCG_COND_LEU, vece, t, b,
1174                    tcg_constant_vec_matching(t, vece, frc_msk));
1175    tcg_gen_cmp_vec(TCG_COND_NE, vece, b, b,
1176                    tcg_constant_vec_matching(t, vece, 0));
1177    tcg_gen_and_vec(vece, t, t, b);
1178}
1179
1180/* test if -Denormal */
1181static void gen_is_neg_denormal(unsigned vece, TCGv_vec t,
1182                                TCGv_vec b, int64_t v)
1183{
1184    uint64_t sgn_msk = (vece == MO_32) ? (uint32_t)SGN_MASK_SP : SGN_MASK_DP;
1185    uint64_t frc_msk = (vece == MO_32) ? (uint32_t)FRC_MASK_SP : FRC_MASK_DP;
1186    tcg_gen_cmp_vec(TCG_COND_LEU, vece, t, b,
1187                    tcg_constant_vec_matching(t, vece, sgn_msk | frc_msk));
1188    tcg_gen_cmp_vec(TCG_COND_GTU, vece, b, b,
1189                    tcg_constant_vec_matching(t, vece, sgn_msk));
1190    tcg_gen_and_vec(vece, t, t, b);
1191}
1192
1193/* test if +Denormal or -Denormal */
1194static void gen_is_any_denormal(unsigned vece, TCGv_vec t,
1195                                TCGv_vec b, int64_t v)
1196{
1197    uint64_t sgn_msk = (vece == MO_32) ? (uint32_t)SGN_MASK_SP : SGN_MASK_DP;
1198    uint64_t frc_msk = (vece == MO_32) ? (uint32_t)FRC_MASK_SP : FRC_MASK_DP;
1199    tcg_gen_andc_vec(vece, b, b, tcg_constant_vec_matching(t, vece, sgn_msk));
1200    tcg_gen_cmp_vec(TCG_COND_LE, vece, t, b,
1201                    tcg_constant_vec_matching(t, vece, frc_msk));
1202    tcg_gen_cmp_vec(TCG_COND_NE, vece, b, b,
1203                    tcg_constant_vec_matching(t, vece, 0));
1204    tcg_gen_and_vec(vece, t, t, b);
1205}
1206
1207/* test if NaN */
1208static void gen_is_nan(unsigned vece, TCGv_vec t, TCGv_vec b, int64_t v)
1209{
1210    uint64_t exp_msk = (vece == MO_32) ? (uint32_t)EXP_MASK_SP : EXP_MASK_DP;
1211    uint64_t sgn_msk = (vece == MO_32) ? (uint32_t)SGN_MASK_SP : SGN_MASK_DP;
1212    tcg_gen_and_vec(vece, b, b, tcg_constant_vec_matching(t, vece, ~sgn_msk));
1213    tcg_gen_cmp_vec(TCG_COND_GT, vece, t, b,
1214                    tcg_constant_vec_matching(t, vece, exp_msk));
1215}
1216
1217static bool do_xvtstdc(DisasContext *ctx, arg_XX2_uim *a, unsigned vece)
1218{
1219    static const TCGOpcode vecop_list[] = {
1220        INDEX_op_cmp_vec, 0
1221    };
1222
1223    GVecGen2i op = {
1224        .fnoi = (vece == MO_32) ? gen_helper_XVTSTDCSP : gen_helper_XVTSTDCDP,
1225        .vece = vece,
1226        .opt_opc = vecop_list
1227    };
1228
1229    REQUIRE_VSX(ctx);
1230
1231    switch (a->uim) {
1232    case 0:
1233        set_cpu_vsr(a->xt, tcg_constant_i64(0), true);
1234        set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
1235        return true;
1236    case ((1 << 0) | (1 << 1)):
1237        /* test if +Denormal or -Denormal */
1238        op.fniv = gen_is_any_denormal;
1239        break;
1240    case (1 << 0):
1241        /* test if -Denormal */
1242        op.fniv = gen_is_neg_denormal;
1243        break;
1244    case (1 << 1):
1245        /* test if +Denormal */
1246        op.fniv = gen_is_pos_denormal;
1247        break;
1248    case ((1 << 2) | (1 << 3)):
1249        /* test if +0 or -0 */
1250        op.fniv = gen_is_any_zero;
1251        break;
1252    case (1 << 2):
1253        /* test if -0 */
1254        op.fniv = gen_is_neg_zero;
1255        break;
1256    case (1 << 3):
1257        /* test if +0 */
1258        op.fniv = gen_is_pos_zero;
1259        break;
1260    case ((1 << 4) | (1 << 5)):
1261        /* test if +Inf or -Inf */
1262        op.fniv = gen_is_any_inf;
1263        break;
1264    case (1 << 4):
1265        /* test if -Inf */
1266        op.fniv = gen_is_neg_inf;
1267        break;
1268    case (1 << 5):
1269        /* test if +Inf */
1270        op.fniv = gen_is_pos_inf;
1271        break;
1272    case (1 << 6):
1273        /* test if NaN */
1274        op.fniv = gen_is_nan;
1275        break;
1276    }
1277    tcg_gen_gvec_2i(vsr_full_offset(a->xt), vsr_full_offset(a->xb),
1278                    16, 16, a->uim, &op);
1279
1280    return true;
1281}
1282
1283TRANS_FLAGS2(VSX, XVTSTDCSP, do_xvtstdc, MO_32)
1284TRANS_FLAGS2(VSX, XVTSTDCDP, do_xvtstdc, MO_64)
1285
1286static bool do_XX2_bf_uim(DisasContext *ctx, arg_XX2_bf_uim *a, bool vsr,
1287                     void (*gen_helper)(TCGv_env, TCGv_i32, TCGv_i32, TCGv_ptr))
1288{
1289    TCGv_ptr xb;
1290
1291    REQUIRE_VSX(ctx);
1292    xb = vsr ? gen_vsr_ptr(a->xb) : gen_avr_ptr(a->xb);
1293    gen_helper(cpu_env, tcg_constant_i32(a->bf), tcg_constant_i32(a->uim), xb);
1294    tcg_temp_free_ptr(xb);
1295
1296    return true;
1297}
1298
1299TRANS_FLAGS2(ISA300, XSTSTDCSP, do_XX2_bf_uim, true, gen_helper_XSTSTDCSP)
1300TRANS_FLAGS2(ISA300, XSTSTDCDP, do_XX2_bf_uim, true, gen_helper_XSTSTDCDP)
1301TRANS_FLAGS2(ISA300, XSTSTDCQP, do_XX2_bf_uim, false, gen_helper_XSTSTDCQP)
1302
1303bool trans_XSCVSPDPN(DisasContext *ctx, arg_XX2 *a)
1304{
1305    TCGv_i64 tmp;
1306
1307    REQUIRE_INSNS_FLAGS2(ctx, VSX207);
1308    REQUIRE_VSX(ctx);
1309
1310    tmp = tcg_temp_new_i64();
1311    get_cpu_vsr(tmp, a->xb, true);
1312
1313    gen_helper_XSCVSPDPN(tmp, tmp);
1314
1315    set_cpu_vsr(a->xt, tmp, true);
1316    set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
1317
1318    tcg_temp_free_i64(tmp);
1319
1320    return true;
1321}
1322
1323GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1324GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1325GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1326GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1327GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1328GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1329GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1330GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1331GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1332GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1333GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1334GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1335GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1336GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1337GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1338GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1339GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1340GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1341GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1342GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1343GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1344GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1345GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1346GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1347GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1348GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1349
1350GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1351GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1352GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1353GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1354GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1355GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1356GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1357GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1358GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1359GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1360GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1361GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1362GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1363GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1364GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1365GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1366GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1367GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1368GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1369GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1370GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1371GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1372GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1373GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1374GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1375
1376GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1377GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1378GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1379GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1380GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1381GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1382GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1383GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1384GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1385GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1386GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1387GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1388GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1389GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1390GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1391GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1392GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1393GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1394GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1395GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1396GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1397GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1398GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1399GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1400GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1401GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1402GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1403
1404static bool trans_XXPERM(DisasContext *ctx, arg_XX3 *a)
1405{
1406    TCGv_ptr xt, xa, xb;
1407
1408    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1409    REQUIRE_VSX(ctx);
1410
1411    xt = gen_vsr_ptr(a->xt);
1412    xa = gen_vsr_ptr(a->xa);
1413    xb = gen_vsr_ptr(a->xb);
1414
1415    gen_helper_VPERM(xt, xa, xt, xb);
1416
1417    tcg_temp_free_ptr(xt);
1418    tcg_temp_free_ptr(xa);
1419    tcg_temp_free_ptr(xb);
1420
1421    return true;
1422}
1423
1424static bool trans_XXPERMR(DisasContext *ctx, arg_XX3 *a)
1425{
1426    TCGv_ptr xt, xa, xb;
1427
1428    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1429    REQUIRE_VSX(ctx);
1430
1431    xt = gen_vsr_ptr(a->xt);
1432    xa = gen_vsr_ptr(a->xa);
1433    xb = gen_vsr_ptr(a->xb);
1434
1435    gen_helper_VPERMR(xt, xa, xt, xb);
1436
1437    tcg_temp_free_ptr(xt);
1438    tcg_temp_free_ptr(xa);
1439    tcg_temp_free_ptr(xb);
1440
1441    return true;
1442}
1443
1444static bool trans_XXPERMDI(DisasContext *ctx, arg_XX3_dm *a)
1445{
1446    TCGv_i64 t0, t1;
1447
1448    REQUIRE_INSNS_FLAGS2(ctx, VSX);
1449    REQUIRE_VSX(ctx);
1450
1451    t0 = tcg_temp_new_i64();
1452
1453    if (unlikely(a->xt == a->xa || a->xt == a->xb)) {
1454        t1 = tcg_temp_new_i64();
1455
1456        get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1457        get_cpu_vsr(t1, a->xb, (a->dm & 1) == 0);
1458
1459        set_cpu_vsr(a->xt, t0, true);
1460        set_cpu_vsr(a->xt, t1, false);
1461
1462        tcg_temp_free_i64(t1);
1463    } else {
1464        get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1465        set_cpu_vsr(a->xt, t0, true);
1466
1467        get_cpu_vsr(t0, a->xb, (a->dm & 1) == 0);
1468        set_cpu_vsr(a->xt, t0, false);
1469    }
1470
1471    tcg_temp_free_i64(t0);
1472
1473    return true;
1474}
1475
1476static bool trans_XXPERMX(DisasContext *ctx, arg_8RR_XX4_uim3 *a)
1477{
1478    TCGv_ptr xt, xa, xb, xc;
1479
1480    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1481    REQUIRE_VSX(ctx);
1482
1483    xt = gen_vsr_ptr(a->xt);
1484    xa = gen_vsr_ptr(a->xa);
1485    xb = gen_vsr_ptr(a->xb);
1486    xc = gen_vsr_ptr(a->xc);
1487
1488    gen_helper_XXPERMX(xt, xa, xb, xc, tcg_constant_tl(a->uim3));
1489
1490    tcg_temp_free_ptr(xt);
1491    tcg_temp_free_ptr(xa);
1492    tcg_temp_free_ptr(xb);
1493    tcg_temp_free_ptr(xc);
1494
1495    return true;
1496}
1497
1498typedef void (*xxgenpcv_genfn)(TCGv_ptr, TCGv_ptr);
1499
1500static bool do_xxgenpcv(DisasContext *ctx, arg_X_imm5 *a,
1501                        const xxgenpcv_genfn fn[4])
1502{
1503    TCGv_ptr xt, vrb;
1504
1505    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1506    REQUIRE_VSX(ctx);
1507
1508    if (a->imm & ~0x3) {
1509        gen_invalid(ctx);
1510        return true;
1511    }
1512
1513    xt = gen_vsr_ptr(a->xt);
1514    vrb = gen_avr_ptr(a->vrb);
1515
1516    fn[a->imm](xt, vrb);
1517
1518    tcg_temp_free_ptr(xt);
1519    tcg_temp_free_ptr(vrb);
1520
1521    return true;
1522}
1523
1524#define XXGENPCV(NAME) \
1525    static bool trans_##NAME(DisasContext *ctx, arg_X_imm5 *a)  \
1526    {                                                           \
1527        static const xxgenpcv_genfn fn[4] = {                   \
1528            gen_helper_##NAME##_be_exp,                         \
1529            gen_helper_##NAME##_be_comp,                        \
1530            gen_helper_##NAME##_le_exp,                         \
1531            gen_helper_##NAME##_le_comp,                        \
1532        };                                                      \
1533        return do_xxgenpcv(ctx, a, fn);                         \
1534    }
1535
1536XXGENPCV(XXGENPCVBM)
1537XXGENPCV(XXGENPCVHM)
1538XXGENPCV(XXGENPCVWM)
1539XXGENPCV(XXGENPCVDM)
1540#undef XXGENPCV
1541
1542static bool do_xsmadd(DisasContext *ctx, int tgt, int src1, int src2, int src3,
1543        void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1544{
1545    TCGv_ptr t, s1, s2, s3;
1546
1547    t = gen_vsr_ptr(tgt);
1548    s1 = gen_vsr_ptr(src1);
1549    s2 = gen_vsr_ptr(src2);
1550    s3 = gen_vsr_ptr(src3);
1551
1552    gen_helper(cpu_env, t, s1, s2, s3);
1553
1554    tcg_temp_free_ptr(t);
1555    tcg_temp_free_ptr(s1);
1556    tcg_temp_free_ptr(s2);
1557    tcg_temp_free_ptr(s3);
1558
1559    return true;
1560}
1561
1562static bool do_xsmadd_XX3(DisasContext *ctx, arg_XX3 *a, bool type_a,
1563        void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1564{
1565    REQUIRE_VSX(ctx);
1566
1567    if (type_a) {
1568        return do_xsmadd(ctx, a->xt, a->xa, a->xt, a->xb, gen_helper);
1569    }
1570    return do_xsmadd(ctx, a->xt, a->xa, a->xb, a->xt, gen_helper);
1571}
1572
1573TRANS_FLAGS2(VSX, XSMADDADP, do_xsmadd_XX3, true, gen_helper_XSMADDDP)
1574TRANS_FLAGS2(VSX, XSMADDMDP, do_xsmadd_XX3, false, gen_helper_XSMADDDP)
1575TRANS_FLAGS2(VSX, XSMSUBADP, do_xsmadd_XX3, true, gen_helper_XSMSUBDP)
1576TRANS_FLAGS2(VSX, XSMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSMSUBDP)
1577TRANS_FLAGS2(VSX, XSNMADDADP, do_xsmadd_XX3, true, gen_helper_XSNMADDDP)
1578TRANS_FLAGS2(VSX, XSNMADDMDP, do_xsmadd_XX3, false, gen_helper_XSNMADDDP)
1579TRANS_FLAGS2(VSX, XSNMSUBADP, do_xsmadd_XX3, true, gen_helper_XSNMSUBDP)
1580TRANS_FLAGS2(VSX, XSNMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSNMSUBDP)
1581TRANS_FLAGS2(VSX207, XSMADDASP, do_xsmadd_XX3, true, gen_helper_XSMADDSP)
1582TRANS_FLAGS2(VSX207, XSMADDMSP, do_xsmadd_XX3, false, gen_helper_XSMADDSP)
1583TRANS_FLAGS2(VSX207, XSMSUBASP, do_xsmadd_XX3, true, gen_helper_XSMSUBSP)
1584TRANS_FLAGS2(VSX207, XSMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSMSUBSP)
1585TRANS_FLAGS2(VSX207, XSNMADDASP, do_xsmadd_XX3, true, gen_helper_XSNMADDSP)
1586TRANS_FLAGS2(VSX207, XSNMADDMSP, do_xsmadd_XX3, false, gen_helper_XSNMADDSP)
1587TRANS_FLAGS2(VSX207, XSNMSUBASP, do_xsmadd_XX3, true, gen_helper_XSNMSUBSP)
1588TRANS_FLAGS2(VSX207, XSNMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSNMSUBSP)
1589
1590static bool do_xsmadd_X(DisasContext *ctx, arg_X_rc *a,
1591        void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr),
1592        void (*gen_helper_ro)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1593{
1594    int vrt, vra, vrb;
1595
1596    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1597    REQUIRE_VSX(ctx);
1598
1599    vrt = a->rt + 32;
1600    vra = a->ra + 32;
1601    vrb = a->rb + 32;
1602
1603    if (a->rc) {
1604        return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper_ro);
1605    }
1606
1607    return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper);
1608}
1609
1610TRANS(XSMADDQP, do_xsmadd_X, gen_helper_XSMADDQP, gen_helper_XSMADDQPO)
1611TRANS(XSMSUBQP, do_xsmadd_X, gen_helper_XSMSUBQP, gen_helper_XSMSUBQPO)
1612TRANS(XSNMADDQP, do_xsmadd_X, gen_helper_XSNMADDQP, gen_helper_XSNMADDQPO)
1613TRANS(XSNMSUBQP, do_xsmadd_X, gen_helper_XSNMSUBQP, gen_helper_XSNMSUBQPO)
1614
1615#define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type)             \
1616static void gen_##name(DisasContext *ctx)                                     \
1617{                                                                             \
1618    TCGv_ptr xt, s1, s2, s3;                                                  \
1619    if (unlikely(!ctx->vsx_enabled)) {                                        \
1620        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1621        return;                                                               \
1622    }                                                                         \
1623    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1624    s1 = gen_vsr_ptr(xA(ctx->opcode));                                        \
1625    if (ctx->opcode & PPC_BIT32(25)) {                                        \
1626        /*                                                                    \
1627         * AxT + B                                                            \
1628         */                                                                   \
1629        s2 = gen_vsr_ptr(xB(ctx->opcode));                                    \
1630        s3 = gen_vsr_ptr(xT(ctx->opcode));                                    \
1631    } else {                                                                  \
1632        /*                                                                    \
1633         * AxB + T                                                            \
1634         */                                                                   \
1635        s2 = gen_vsr_ptr(xT(ctx->opcode));                                    \
1636        s3 = gen_vsr_ptr(xB(ctx->opcode));                                    \
1637    }                                                                         \
1638    gen_helper_##name(cpu_env, xt, s1, s2, s3);                               \
1639    tcg_temp_free_ptr(xt);                                                    \
1640    tcg_temp_free_ptr(s1);                                                    \
1641    tcg_temp_free_ptr(s2);                                                    \
1642    tcg_temp_free_ptr(s3);                                                    \
1643}
1644
1645GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1646GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1647GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1648GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1649GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1650GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1651GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1652GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1653
1654static void gen_xxbrd(DisasContext *ctx)
1655{
1656    TCGv_i64 xth;
1657    TCGv_i64 xtl;
1658    TCGv_i64 xbh;
1659    TCGv_i64 xbl;
1660
1661    if (unlikely(!ctx->vsx_enabled)) {
1662        gen_exception(ctx, POWERPC_EXCP_VSXU);
1663        return;
1664    }
1665    xth = tcg_temp_new_i64();
1666    xtl = tcg_temp_new_i64();
1667    xbh = tcg_temp_new_i64();
1668    xbl = tcg_temp_new_i64();
1669    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1670    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1671
1672    tcg_gen_bswap64_i64(xth, xbh);
1673    tcg_gen_bswap64_i64(xtl, xbl);
1674    set_cpu_vsr(xT(ctx->opcode), xth, true);
1675    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1676
1677    tcg_temp_free_i64(xth);
1678    tcg_temp_free_i64(xtl);
1679    tcg_temp_free_i64(xbh);
1680    tcg_temp_free_i64(xbl);
1681}
1682
1683static void gen_xxbrh(DisasContext *ctx)
1684{
1685    TCGv_i64 xth;
1686    TCGv_i64 xtl;
1687    TCGv_i64 xbh;
1688    TCGv_i64 xbl;
1689
1690    if (unlikely(!ctx->vsx_enabled)) {
1691        gen_exception(ctx, POWERPC_EXCP_VSXU);
1692        return;
1693    }
1694    xth = tcg_temp_new_i64();
1695    xtl = tcg_temp_new_i64();
1696    xbh = tcg_temp_new_i64();
1697    xbl = tcg_temp_new_i64();
1698    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1699    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1700
1701    gen_bswap16x8(xth, xtl, xbh, xbl);
1702    set_cpu_vsr(xT(ctx->opcode), xth, true);
1703    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1704
1705    tcg_temp_free_i64(xth);
1706    tcg_temp_free_i64(xtl);
1707    tcg_temp_free_i64(xbh);
1708    tcg_temp_free_i64(xbl);
1709}
1710
1711static void gen_xxbrq(DisasContext *ctx)
1712{
1713    TCGv_i64 xth;
1714    TCGv_i64 xtl;
1715    TCGv_i64 xbh;
1716    TCGv_i64 xbl;
1717    TCGv_i64 t0;
1718
1719    if (unlikely(!ctx->vsx_enabled)) {
1720        gen_exception(ctx, POWERPC_EXCP_VSXU);
1721        return;
1722    }
1723    xth = tcg_temp_new_i64();
1724    xtl = tcg_temp_new_i64();
1725    xbh = tcg_temp_new_i64();
1726    xbl = tcg_temp_new_i64();
1727    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1728    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1729    t0 = tcg_temp_new_i64();
1730
1731    tcg_gen_bswap64_i64(t0, xbl);
1732    tcg_gen_bswap64_i64(xtl, xbh);
1733    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1734    tcg_gen_mov_i64(xth, t0);
1735    set_cpu_vsr(xT(ctx->opcode), xth, true);
1736
1737    tcg_temp_free_i64(t0);
1738    tcg_temp_free_i64(xth);
1739    tcg_temp_free_i64(xtl);
1740    tcg_temp_free_i64(xbh);
1741    tcg_temp_free_i64(xbl);
1742}
1743
1744static void gen_xxbrw(DisasContext *ctx)
1745{
1746    TCGv_i64 xth;
1747    TCGv_i64 xtl;
1748    TCGv_i64 xbh;
1749    TCGv_i64 xbl;
1750
1751    if (unlikely(!ctx->vsx_enabled)) {
1752        gen_exception(ctx, POWERPC_EXCP_VSXU);
1753        return;
1754    }
1755    xth = tcg_temp_new_i64();
1756    xtl = tcg_temp_new_i64();
1757    xbh = tcg_temp_new_i64();
1758    xbl = tcg_temp_new_i64();
1759    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1760    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1761
1762    gen_bswap32x4(xth, xtl, xbh, xbl);
1763    set_cpu_vsr(xT(ctx->opcode), xth, true);
1764    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1765
1766    tcg_temp_free_i64(xth);
1767    tcg_temp_free_i64(xtl);
1768    tcg_temp_free_i64(xbh);
1769    tcg_temp_free_i64(xbl);
1770}
1771
1772#define VSX_LOGICAL(name, vece, tcg_op)                              \
1773static void glue(gen_, name)(DisasContext *ctx)                      \
1774    {                                                                \
1775        if (unlikely(!ctx->vsx_enabled)) {                           \
1776            gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
1777            return;                                                  \
1778        }                                                            \
1779        tcg_op(vece, vsr_full_offset(xT(ctx->opcode)),               \
1780               vsr_full_offset(xA(ctx->opcode)),                     \
1781               vsr_full_offset(xB(ctx->opcode)), 16, 16);            \
1782    }
1783
1784VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1785VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1786VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1787VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1788VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1789VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1790VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1791VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1792
1793#define VSX_XXMRG(name, high)                               \
1794static void glue(gen_, name)(DisasContext *ctx)             \
1795    {                                                       \
1796        TCGv_i64 a0, a1, b0, b1, tmp;                       \
1797        if (unlikely(!ctx->vsx_enabled)) {                  \
1798            gen_exception(ctx, POWERPC_EXCP_VSXU);          \
1799            return;                                         \
1800        }                                                   \
1801        a0 = tcg_temp_new_i64();                            \
1802        a1 = tcg_temp_new_i64();                            \
1803        b0 = tcg_temp_new_i64();                            \
1804        b1 = tcg_temp_new_i64();                            \
1805        tmp = tcg_temp_new_i64();                           \
1806        get_cpu_vsr(a0, xA(ctx->opcode), high);             \
1807        get_cpu_vsr(a1, xA(ctx->opcode), high);             \
1808        get_cpu_vsr(b0, xB(ctx->opcode), high);             \
1809        get_cpu_vsr(b1, xB(ctx->opcode), high);             \
1810        tcg_gen_shri_i64(a0, a0, 32);                       \
1811        tcg_gen_shri_i64(b0, b0, 32);                       \
1812        tcg_gen_deposit_i64(tmp, b0, a0, 32, 32);           \
1813        set_cpu_vsr(xT(ctx->opcode), tmp, true);            \
1814        tcg_gen_deposit_i64(tmp, b1, a1, 32, 32);           \
1815        set_cpu_vsr(xT(ctx->opcode), tmp, false);           \
1816        tcg_temp_free_i64(a0);                              \
1817        tcg_temp_free_i64(a1);                              \
1818        tcg_temp_free_i64(b0);                              \
1819        tcg_temp_free_i64(b1);                              \
1820        tcg_temp_free_i64(tmp);                             \
1821    }
1822
1823VSX_XXMRG(xxmrghw, 1)
1824VSX_XXMRG(xxmrglw, 0)
1825
1826static bool trans_XXSEL(DisasContext *ctx, arg_XX4 *a)
1827{
1828    REQUIRE_INSNS_FLAGS2(ctx, VSX);
1829    REQUIRE_VSX(ctx);
1830
1831    tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(a->xt), vsr_full_offset(a->xc),
1832                        vsr_full_offset(a->xb), vsr_full_offset(a->xa), 16, 16);
1833
1834    return true;
1835}
1836
1837static bool trans_XXSPLTW(DisasContext *ctx, arg_XX2_uim *a)
1838{
1839    int tofs, bofs;
1840
1841    REQUIRE_VSX(ctx);
1842
1843    tofs = vsr_full_offset(a->xt);
1844    bofs = vsr_full_offset(a->xb);
1845    bofs += a->uim << MO_32;
1846#if !HOST_BIG_ENDIAN
1847    bofs ^= 8 | 4;
1848#endif
1849
1850    tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1851    return true;
1852}
1853
1854#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1855
1856static bool trans_XXSPLTIB(DisasContext *ctx, arg_X_imm8 *a)
1857{
1858    if (a->xt < 32) {
1859        REQUIRE_VSX(ctx);
1860    } else {
1861        REQUIRE_VECTOR(ctx);
1862    }
1863    tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(a->xt), 16, 16, a->imm);
1864    return true;
1865}
1866
1867static bool trans_XXSPLTIW(DisasContext *ctx, arg_8RR_D *a)
1868{
1869    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1870    REQUIRE_VSX(ctx);
1871
1872    tcg_gen_gvec_dup_imm(MO_32, vsr_full_offset(a->xt), 16, 16, a->si);
1873
1874    return true;
1875}
1876
1877static bool trans_XXSPLTIDP(DisasContext *ctx, arg_8RR_D *a)
1878{
1879    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1880    REQUIRE_VSX(ctx);
1881
1882    tcg_gen_gvec_dup_imm(MO_64, vsr_full_offset(a->xt), 16, 16,
1883                         helper_todouble(a->si));
1884    return true;
1885}
1886
1887static bool trans_XXSPLTI32DX(DisasContext *ctx, arg_8RR_D_IX *a)
1888{
1889    TCGv_i32 imm;
1890
1891    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1892    REQUIRE_VSX(ctx);
1893
1894    imm = tcg_constant_i32(a->si);
1895
1896    tcg_gen_st_i32(imm, cpu_env,
1897        offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix)));
1898    tcg_gen_st_i32(imm, cpu_env,
1899        offsetof(CPUPPCState, vsr[a->xt].VsrW(2 + a->ix)));
1900
1901    return true;
1902}
1903
1904static bool trans_LXVKQ(DisasContext *ctx, arg_X_uim5 *a)
1905{
1906    static const uint64_t values[32] = {
1907        0, /* Unspecified */
1908        0x3FFF000000000000llu, /* QP +1.0 */
1909        0x4000000000000000llu, /* QP +2.0 */
1910        0x4000800000000000llu, /* QP +3.0 */
1911        0x4001000000000000llu, /* QP +4.0 */
1912        0x4001400000000000llu, /* QP +5.0 */
1913        0x4001800000000000llu, /* QP +6.0 */
1914        0x4001C00000000000llu, /* QP +7.0 */
1915        0x7FFF000000000000llu, /* QP +Inf */
1916        0x7FFF800000000000llu, /* QP dQNaN */
1917        0, /* Unspecified */
1918        0, /* Unspecified */
1919        0, /* Unspecified */
1920        0, /* Unspecified */
1921        0, /* Unspecified */
1922        0, /* Unspecified */
1923        0x8000000000000000llu, /* QP -0.0 */
1924        0xBFFF000000000000llu, /* QP -1.0 */
1925        0xC000000000000000llu, /* QP -2.0 */
1926        0xC000800000000000llu, /* QP -3.0 */
1927        0xC001000000000000llu, /* QP -4.0 */
1928        0xC001400000000000llu, /* QP -5.0 */
1929        0xC001800000000000llu, /* QP -6.0 */
1930        0xC001C00000000000llu, /* QP -7.0 */
1931        0xFFFF000000000000llu, /* QP -Inf */
1932    };
1933
1934    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1935    REQUIRE_VSX(ctx);
1936
1937    if (values[a->uim]) {
1938        set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false);
1939        set_cpu_vsr(a->xt, tcg_constant_i64(values[a->uim]), true);
1940    } else {
1941        gen_invalid(ctx);
1942    }
1943
1944    return true;
1945}
1946
1947static bool trans_XVTLSBB(DisasContext *ctx, arg_XX2_bf_xb *a)
1948{
1949    TCGv_i64 xb, t0, t1, all_true, all_false, mask, zero;
1950
1951    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1952    REQUIRE_VSX(ctx);
1953
1954    xb = tcg_temp_new_i64();
1955    t0 = tcg_temp_new_i64();
1956    t1 = tcg_temp_new_i64();
1957    all_true = tcg_temp_new_i64();
1958    all_false = tcg_temp_new_i64();
1959    mask = tcg_constant_i64(dup_const(MO_8, 1));
1960    zero = tcg_constant_i64(0);
1961
1962    get_cpu_vsr(xb, a->xb, true);
1963    tcg_gen_and_i64(t0, mask, xb);
1964    get_cpu_vsr(xb, a->xb, false);
1965    tcg_gen_and_i64(t1, mask, xb);
1966
1967    tcg_gen_or_i64(all_false, t0, t1);
1968    tcg_gen_and_i64(all_true, t0, t1);
1969
1970    tcg_gen_setcond_i64(TCG_COND_EQ, all_false, all_false, zero);
1971    tcg_gen_shli_i64(all_false, all_false, 1);
1972    tcg_gen_setcond_i64(TCG_COND_EQ, all_true, all_true, mask);
1973    tcg_gen_shli_i64(all_true, all_true, 3);
1974
1975    tcg_gen_or_i64(t0, all_false, all_true);
1976    tcg_gen_extrl_i64_i32(cpu_crf[a->bf], t0);
1977
1978    tcg_temp_free_i64(xb);
1979    tcg_temp_free_i64(t0);
1980    tcg_temp_free_i64(t1);
1981    tcg_temp_free_i64(all_true);
1982    tcg_temp_free_i64(all_false);
1983
1984    return true;
1985}
1986
1987static void gen_xxsldwi(DisasContext *ctx)
1988{
1989    TCGv_i64 xth, xtl;
1990    if (unlikely(!ctx->vsx_enabled)) {
1991        gen_exception(ctx, POWERPC_EXCP_VSXU);
1992        return;
1993    }
1994    xth = tcg_temp_new_i64();
1995    xtl = tcg_temp_new_i64();
1996
1997    switch (SHW(ctx->opcode)) {
1998        case 0: {
1999            get_cpu_vsr(xth, xA(ctx->opcode), true);
2000            get_cpu_vsr(xtl, xA(ctx->opcode), false);
2001            break;
2002        }
2003        case 1: {
2004            TCGv_i64 t0 = tcg_temp_new_i64();
2005            get_cpu_vsr(xth, xA(ctx->opcode), true);
2006            tcg_gen_shli_i64(xth, xth, 32);
2007            get_cpu_vsr(t0, xA(ctx->opcode), false);
2008            tcg_gen_shri_i64(t0, t0, 32);
2009            tcg_gen_or_i64(xth, xth, t0);
2010            get_cpu_vsr(xtl, xA(ctx->opcode), false);
2011            tcg_gen_shli_i64(xtl, xtl, 32);
2012            get_cpu_vsr(t0, xB(ctx->opcode), true);
2013            tcg_gen_shri_i64(t0, t0, 32);
2014            tcg_gen_or_i64(xtl, xtl, t0);
2015            tcg_temp_free_i64(t0);
2016            break;
2017        }
2018        case 2: {
2019            get_cpu_vsr(xth, xA(ctx->opcode), false);
2020            get_cpu_vsr(xtl, xB(ctx->opcode), true);
2021            break;
2022        }
2023        case 3: {
2024            TCGv_i64 t0 = tcg_temp_new_i64();
2025            get_cpu_vsr(xth, xA(ctx->opcode), false);
2026            tcg_gen_shli_i64(xth, xth, 32);
2027            get_cpu_vsr(t0, xB(ctx->opcode), true);
2028            tcg_gen_shri_i64(t0, t0, 32);
2029            tcg_gen_or_i64(xth, xth, t0);
2030            get_cpu_vsr(xtl, xB(ctx->opcode), true);
2031            tcg_gen_shli_i64(xtl, xtl, 32);
2032            get_cpu_vsr(t0, xB(ctx->opcode), false);
2033            tcg_gen_shri_i64(t0, t0, 32);
2034            tcg_gen_or_i64(xtl, xtl, t0);
2035            tcg_temp_free_i64(t0);
2036            break;
2037        }
2038    }
2039
2040    set_cpu_vsr(xT(ctx->opcode), xth, true);
2041    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2042
2043    tcg_temp_free_i64(xth);
2044    tcg_temp_free_i64(xtl);
2045}
2046
2047static bool do_vsx_extract_insert(DisasContext *ctx, arg_XX2_uim *a,
2048    void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_i32))
2049{
2050    TCGv_i64 zero = tcg_constant_i64(0);
2051    TCGv_ptr xt, xb;
2052
2053    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2054    REQUIRE_VSX(ctx);
2055
2056    /*
2057     * uim > 15 out of bound and for
2058     * uim > 12 handle as per hardware in helper
2059     */
2060    if (a->uim > 15) {
2061        set_cpu_vsr(a->xt, zero, true);
2062        set_cpu_vsr(a->xt, zero, false);
2063    } else {
2064        xt = gen_vsr_ptr(a->xt);
2065        xb = gen_vsr_ptr(a->xb);
2066        gen_helper(xt, xb, tcg_constant_i32(a->uim));
2067        tcg_temp_free_ptr(xb);
2068        tcg_temp_free_ptr(xt);
2069    }
2070
2071    return true;
2072}
2073
2074TRANS(XXEXTRACTUW, do_vsx_extract_insert, gen_helper_XXEXTRACTUW)
2075TRANS(XXINSERTW, do_vsx_extract_insert, gen_helper_XXINSERTW)
2076
2077#ifdef TARGET_PPC64
2078static void gen_xsxexpdp(DisasContext *ctx)
2079{
2080    TCGv rt = cpu_gpr[rD(ctx->opcode)];
2081    TCGv_i64 t0;
2082    if (unlikely(!ctx->vsx_enabled)) {
2083        gen_exception(ctx, POWERPC_EXCP_VSXU);
2084        return;
2085    }
2086    t0 = tcg_temp_new_i64();
2087    get_cpu_vsr(t0, xB(ctx->opcode), true);
2088    tcg_gen_extract_i64(rt, t0, 52, 11);
2089    tcg_temp_free_i64(t0);
2090}
2091
2092static void gen_xsxexpqp(DisasContext *ctx)
2093{
2094    TCGv_i64 xth;
2095    TCGv_i64 xtl;
2096    TCGv_i64 xbh;
2097
2098    if (unlikely(!ctx->vsx_enabled)) {
2099        gen_exception(ctx, POWERPC_EXCP_VSXU);
2100        return;
2101    }
2102    xth = tcg_temp_new_i64();
2103    xtl = tcg_temp_new_i64();
2104    xbh = tcg_temp_new_i64();
2105    get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
2106
2107    tcg_gen_extract_i64(xth, xbh, 48, 15);
2108    set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
2109    tcg_gen_movi_i64(xtl, 0);
2110    set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
2111
2112    tcg_temp_free_i64(xbh);
2113    tcg_temp_free_i64(xth);
2114    tcg_temp_free_i64(xtl);
2115}
2116
2117static void gen_xsiexpdp(DisasContext *ctx)
2118{
2119    TCGv_i64 xth;
2120    TCGv ra = cpu_gpr[rA(ctx->opcode)];
2121    TCGv rb = cpu_gpr[rB(ctx->opcode)];
2122    TCGv_i64 t0;
2123
2124    if (unlikely(!ctx->vsx_enabled)) {
2125        gen_exception(ctx, POWERPC_EXCP_VSXU);
2126        return;
2127    }
2128    t0 = tcg_temp_new_i64();
2129    xth = tcg_temp_new_i64();
2130    tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
2131    tcg_gen_andi_i64(t0, rb, 0x7FF);
2132    tcg_gen_shli_i64(t0, t0, 52);
2133    tcg_gen_or_i64(xth, xth, t0);
2134    set_cpu_vsr(xT(ctx->opcode), xth, true);
2135    set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false);
2136    tcg_temp_free_i64(t0);
2137    tcg_temp_free_i64(xth);
2138}
2139
2140static void gen_xsiexpqp(DisasContext *ctx)
2141{
2142    TCGv_i64 xth;
2143    TCGv_i64 xtl;
2144    TCGv_i64 xah;
2145    TCGv_i64 xal;
2146    TCGv_i64 xbh;
2147    TCGv_i64 t0;
2148
2149    if (unlikely(!ctx->vsx_enabled)) {
2150        gen_exception(ctx, POWERPC_EXCP_VSXU);
2151        return;
2152    }
2153    xth = tcg_temp_new_i64();
2154    xtl = tcg_temp_new_i64();
2155    xah = tcg_temp_new_i64();
2156    xal = tcg_temp_new_i64();
2157    get_cpu_vsr(xah, rA(ctx->opcode) + 32, true);
2158    get_cpu_vsr(xal, rA(ctx->opcode) + 32, false);
2159    xbh = tcg_temp_new_i64();
2160    get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
2161    t0 = tcg_temp_new_i64();
2162
2163    tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
2164    tcg_gen_andi_i64(t0, xbh, 0x7FFF);
2165    tcg_gen_shli_i64(t0, t0, 48);
2166    tcg_gen_or_i64(xth, xth, t0);
2167    set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
2168    tcg_gen_mov_i64(xtl, xal);
2169    set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
2170
2171    tcg_temp_free_i64(t0);
2172    tcg_temp_free_i64(xth);
2173    tcg_temp_free_i64(xtl);
2174    tcg_temp_free_i64(xah);
2175    tcg_temp_free_i64(xal);
2176    tcg_temp_free_i64(xbh);
2177}
2178
2179static void gen_xsxsigdp(DisasContext *ctx)
2180{
2181    TCGv rt = cpu_gpr[rD(ctx->opcode)];
2182    TCGv_i64 t0, t1, zr, nan, exp;
2183
2184    if (unlikely(!ctx->vsx_enabled)) {
2185        gen_exception(ctx, POWERPC_EXCP_VSXU);
2186        return;
2187    }
2188    exp = tcg_temp_new_i64();
2189    t0 = tcg_temp_new_i64();
2190    t1 = tcg_temp_new_i64();
2191    zr = tcg_const_i64(0);
2192    nan = tcg_const_i64(2047);
2193
2194    get_cpu_vsr(t1, xB(ctx->opcode), true);
2195    tcg_gen_extract_i64(exp, t1, 52, 11);
2196    tcg_gen_movi_i64(t0, 0x0010000000000000);
2197    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2198    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2199    get_cpu_vsr(t1, xB(ctx->opcode), true);
2200    tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
2201
2202    tcg_temp_free_i64(t0);
2203    tcg_temp_free_i64(t1);
2204    tcg_temp_free_i64(exp);
2205    tcg_temp_free_i64(zr);
2206    tcg_temp_free_i64(nan);
2207}
2208
2209static void gen_xsxsigqp(DisasContext *ctx)
2210{
2211    TCGv_i64 t0, zr, nan, exp;
2212    TCGv_i64 xth;
2213    TCGv_i64 xtl;
2214    TCGv_i64 xbh;
2215    TCGv_i64 xbl;
2216
2217    if (unlikely(!ctx->vsx_enabled)) {
2218        gen_exception(ctx, POWERPC_EXCP_VSXU);
2219        return;
2220    }
2221    xth = tcg_temp_new_i64();
2222    xtl = tcg_temp_new_i64();
2223    xbh = tcg_temp_new_i64();
2224    xbl = tcg_temp_new_i64();
2225    get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
2226    get_cpu_vsr(xbl, rB(ctx->opcode) + 32, false);
2227    exp = tcg_temp_new_i64();
2228    t0 = tcg_temp_new_i64();
2229    zr = tcg_const_i64(0);
2230    nan = tcg_const_i64(32767);
2231
2232    tcg_gen_extract_i64(exp, xbh, 48, 15);
2233    tcg_gen_movi_i64(t0, 0x0001000000000000);
2234    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2235    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2236    tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
2237    set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
2238    tcg_gen_mov_i64(xtl, xbl);
2239    set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
2240
2241    tcg_temp_free_i64(t0);
2242    tcg_temp_free_i64(exp);
2243    tcg_temp_free_i64(zr);
2244    tcg_temp_free_i64(nan);
2245    tcg_temp_free_i64(xth);
2246    tcg_temp_free_i64(xtl);
2247    tcg_temp_free_i64(xbh);
2248    tcg_temp_free_i64(xbl);
2249}
2250#endif
2251
2252static void gen_xviexpsp(DisasContext *ctx)
2253{
2254    TCGv_i64 xth;
2255    TCGv_i64 xtl;
2256    TCGv_i64 xah;
2257    TCGv_i64 xal;
2258    TCGv_i64 xbh;
2259    TCGv_i64 xbl;
2260    TCGv_i64 t0;
2261
2262    if (unlikely(!ctx->vsx_enabled)) {
2263        gen_exception(ctx, POWERPC_EXCP_VSXU);
2264        return;
2265    }
2266    xth = tcg_temp_new_i64();
2267    xtl = tcg_temp_new_i64();
2268    xah = tcg_temp_new_i64();
2269    xal = tcg_temp_new_i64();
2270    xbh = tcg_temp_new_i64();
2271    xbl = tcg_temp_new_i64();
2272    get_cpu_vsr(xah, xA(ctx->opcode), true);
2273    get_cpu_vsr(xal, xA(ctx->opcode), false);
2274    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2275    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2276    t0 = tcg_temp_new_i64();
2277
2278    tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
2279    tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
2280    tcg_gen_shli_i64(t0, t0, 23);
2281    tcg_gen_or_i64(xth, xth, t0);
2282    set_cpu_vsr(xT(ctx->opcode), xth, true);
2283    tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
2284    tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
2285    tcg_gen_shli_i64(t0, t0, 23);
2286    tcg_gen_or_i64(xtl, xtl, t0);
2287    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2288
2289    tcg_temp_free_i64(t0);
2290    tcg_temp_free_i64(xth);
2291    tcg_temp_free_i64(xtl);
2292    tcg_temp_free_i64(xah);
2293    tcg_temp_free_i64(xal);
2294    tcg_temp_free_i64(xbh);
2295    tcg_temp_free_i64(xbl);
2296}
2297
2298static void gen_xviexpdp(DisasContext *ctx)
2299{
2300    TCGv_i64 xth;
2301    TCGv_i64 xtl;
2302    TCGv_i64 xah;
2303    TCGv_i64 xal;
2304    TCGv_i64 xbh;
2305    TCGv_i64 xbl;
2306
2307    if (unlikely(!ctx->vsx_enabled)) {
2308        gen_exception(ctx, POWERPC_EXCP_VSXU);
2309        return;
2310    }
2311    xth = tcg_temp_new_i64();
2312    xtl = tcg_temp_new_i64();
2313    xah = tcg_temp_new_i64();
2314    xal = tcg_temp_new_i64();
2315    xbh = tcg_temp_new_i64();
2316    xbl = tcg_temp_new_i64();
2317    get_cpu_vsr(xah, xA(ctx->opcode), true);
2318    get_cpu_vsr(xal, xA(ctx->opcode), false);
2319    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2320    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2321
2322    tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
2323    set_cpu_vsr(xT(ctx->opcode), xth, true);
2324
2325    tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
2326    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2327
2328    tcg_temp_free_i64(xth);
2329    tcg_temp_free_i64(xtl);
2330    tcg_temp_free_i64(xah);
2331    tcg_temp_free_i64(xal);
2332    tcg_temp_free_i64(xbh);
2333    tcg_temp_free_i64(xbl);
2334}
2335
2336static void gen_xvxexpsp(DisasContext *ctx)
2337{
2338    TCGv_i64 xth;
2339    TCGv_i64 xtl;
2340    TCGv_i64 xbh;
2341    TCGv_i64 xbl;
2342
2343    if (unlikely(!ctx->vsx_enabled)) {
2344        gen_exception(ctx, POWERPC_EXCP_VSXU);
2345        return;
2346    }
2347    xth = tcg_temp_new_i64();
2348    xtl = tcg_temp_new_i64();
2349    xbh = tcg_temp_new_i64();
2350    xbl = tcg_temp_new_i64();
2351    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2352    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2353
2354    tcg_gen_shri_i64(xth, xbh, 23);
2355    tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
2356    set_cpu_vsr(xT(ctx->opcode), xth, true);
2357    tcg_gen_shri_i64(xtl, xbl, 23);
2358    tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
2359    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2360
2361    tcg_temp_free_i64(xth);
2362    tcg_temp_free_i64(xtl);
2363    tcg_temp_free_i64(xbh);
2364    tcg_temp_free_i64(xbl);
2365}
2366
2367static void gen_xvxexpdp(DisasContext *ctx)
2368{
2369    TCGv_i64 xth;
2370    TCGv_i64 xtl;
2371    TCGv_i64 xbh;
2372    TCGv_i64 xbl;
2373
2374    if (unlikely(!ctx->vsx_enabled)) {
2375        gen_exception(ctx, POWERPC_EXCP_VSXU);
2376        return;
2377    }
2378    xth = tcg_temp_new_i64();
2379    xtl = tcg_temp_new_i64();
2380    xbh = tcg_temp_new_i64();
2381    xbl = tcg_temp_new_i64();
2382    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2383    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2384
2385    tcg_gen_extract_i64(xth, xbh, 52, 11);
2386    set_cpu_vsr(xT(ctx->opcode), xth, true);
2387    tcg_gen_extract_i64(xtl, xbl, 52, 11);
2388    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2389
2390    tcg_temp_free_i64(xth);
2391    tcg_temp_free_i64(xtl);
2392    tcg_temp_free_i64(xbh);
2393    tcg_temp_free_i64(xbl);
2394}
2395
2396static bool trans_XVXSIGSP(DisasContext *ctx, arg_XX2 *a)
2397{
2398    TCGv_ptr t, b;
2399
2400    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2401    REQUIRE_VSX(ctx);
2402
2403    t = gen_vsr_ptr(a->xt);
2404    b = gen_vsr_ptr(a->xb);
2405
2406    gen_helper_XVXSIGSP(t, b);
2407
2408    tcg_temp_free_ptr(t);
2409    tcg_temp_free_ptr(b);
2410
2411    return true;
2412}
2413
2414static void gen_xvxsigdp(DisasContext *ctx)
2415{
2416    TCGv_i64 xth;
2417    TCGv_i64 xtl;
2418    TCGv_i64 xbh;
2419    TCGv_i64 xbl;
2420    TCGv_i64 t0, zr, nan, exp;
2421
2422    if (unlikely(!ctx->vsx_enabled)) {
2423        gen_exception(ctx, POWERPC_EXCP_VSXU);
2424        return;
2425    }
2426    xth = tcg_temp_new_i64();
2427    xtl = tcg_temp_new_i64();
2428    xbh = tcg_temp_new_i64();
2429    xbl = tcg_temp_new_i64();
2430    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2431    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2432    exp = tcg_temp_new_i64();
2433    t0 = tcg_temp_new_i64();
2434    zr = tcg_const_i64(0);
2435    nan = tcg_const_i64(2047);
2436
2437    tcg_gen_extract_i64(exp, xbh, 52, 11);
2438    tcg_gen_movi_i64(t0, 0x0010000000000000);
2439    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2440    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2441    tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
2442    set_cpu_vsr(xT(ctx->opcode), xth, true);
2443
2444    tcg_gen_extract_i64(exp, xbl, 52, 11);
2445    tcg_gen_movi_i64(t0, 0x0010000000000000);
2446    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2447    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2448    tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2449    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2450
2451    tcg_temp_free_i64(t0);
2452    tcg_temp_free_i64(exp);
2453    tcg_temp_free_i64(zr);
2454    tcg_temp_free_i64(nan);
2455    tcg_temp_free_i64(xth);
2456    tcg_temp_free_i64(xtl);
2457    tcg_temp_free_i64(xbh);
2458    tcg_temp_free_i64(xbl);
2459}
2460
2461static bool do_lstxv(DisasContext *ctx, int ra, TCGv displ,
2462                     int rt, bool store, bool paired)
2463{
2464    TCGv ea;
2465    TCGv_i64 xt;
2466    MemOp mop;
2467    int rt1, rt2;
2468
2469    xt = tcg_temp_new_i64();
2470
2471    mop = DEF_MEMOP(MO_UQ);
2472
2473    gen_set_access_type(ctx, ACCESS_INT);
2474    ea = do_ea_calc(ctx, ra, displ);
2475
2476    if (paired && ctx->le_mode) {
2477        rt1 = rt + 1;
2478        rt2 = rt;
2479    } else {
2480        rt1 = rt;
2481        rt2 = rt + 1;
2482    }
2483
2484    if (store) {
2485        get_cpu_vsr(xt, rt1, !ctx->le_mode);
2486        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2487        gen_addr_add(ctx, ea, ea, 8);
2488        get_cpu_vsr(xt, rt1, ctx->le_mode);
2489        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2490        if (paired) {
2491            gen_addr_add(ctx, ea, ea, 8);
2492            get_cpu_vsr(xt, rt2, !ctx->le_mode);
2493            tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2494            gen_addr_add(ctx, ea, ea, 8);
2495            get_cpu_vsr(xt, rt2, ctx->le_mode);
2496            tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2497        }
2498    } else {
2499        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2500        set_cpu_vsr(rt1, xt, !ctx->le_mode);
2501        gen_addr_add(ctx, ea, ea, 8);
2502        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2503        set_cpu_vsr(rt1, xt, ctx->le_mode);
2504        if (paired) {
2505            gen_addr_add(ctx, ea, ea, 8);
2506            tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2507            set_cpu_vsr(rt2, xt, !ctx->le_mode);
2508            gen_addr_add(ctx, ea, ea, 8);
2509            tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2510            set_cpu_vsr(rt2, xt, ctx->le_mode);
2511        }
2512    }
2513
2514    tcg_temp_free(ea);
2515    tcg_temp_free_i64(xt);
2516    return true;
2517}
2518
2519static bool do_lstxv_D(DisasContext *ctx, arg_D *a, bool store, bool paired)
2520{
2521    if (paired || a->rt >= 32) {
2522        REQUIRE_VSX(ctx);
2523    } else {
2524        REQUIRE_VECTOR(ctx);
2525    }
2526
2527    return do_lstxv(ctx, a->ra, tcg_constant_tl(a->si), a->rt, store, paired);
2528}
2529
2530static bool do_lstxv_PLS_D(DisasContext *ctx, arg_PLS_D *a,
2531                           bool store, bool paired)
2532{
2533    arg_D d;
2534    REQUIRE_VSX(ctx);
2535
2536    if (!resolve_PLS_D(ctx, &d, a)) {
2537        return true;
2538    }
2539
2540    return do_lstxv(ctx, d.ra, tcg_constant_tl(d.si), d.rt, store, paired);
2541}
2542
2543static bool do_lstxv_X(DisasContext *ctx, arg_X *a, bool store, bool paired)
2544{
2545    if (paired || a->rt >= 32) {
2546        REQUIRE_VSX(ctx);
2547    } else {
2548        REQUIRE_VECTOR(ctx);
2549    }
2550
2551    return do_lstxv(ctx, a->ra, cpu_gpr[a->rb], a->rt, store, paired);
2552}
2553
2554static bool do_lstxsd(DisasContext *ctx, int rt, int ra, TCGv displ, bool store)
2555{
2556    TCGv ea;
2557    TCGv_i64 xt;
2558    MemOp mop;
2559
2560    if (store) {
2561        REQUIRE_VECTOR(ctx);
2562    } else {
2563        REQUIRE_VSX(ctx);
2564    }
2565
2566    xt = tcg_temp_new_i64();
2567    mop = DEF_MEMOP(MO_UQ);
2568
2569    gen_set_access_type(ctx, ACCESS_INT);
2570    ea = do_ea_calc(ctx, ra, displ);
2571
2572    if (store) {
2573        get_cpu_vsr(xt, rt + 32, true);
2574        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2575    } else {
2576        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2577        set_cpu_vsr(rt + 32, xt, true);
2578        set_cpu_vsr(rt + 32, tcg_constant_i64(0), false);
2579    }
2580
2581    tcg_temp_free(ea);
2582    tcg_temp_free_i64(xt);
2583
2584    return true;
2585}
2586
2587static bool do_lstxsd_DS(DisasContext *ctx, arg_D *a, bool store)
2588{
2589    return do_lstxsd(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store);
2590}
2591
2592static bool do_plstxsd_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store)
2593{
2594    arg_D d;
2595
2596    if (!resolve_PLS_D(ctx, &d, a)) {
2597        return true;
2598    }
2599
2600    return do_lstxsd(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store);
2601}
2602
2603static bool do_lstxssp(DisasContext *ctx, int rt, int ra, TCGv displ, bool store)
2604{
2605    TCGv ea;
2606    TCGv_i64 xt;
2607
2608    REQUIRE_VECTOR(ctx);
2609
2610    xt = tcg_temp_new_i64();
2611
2612    gen_set_access_type(ctx, ACCESS_INT);
2613    ea = do_ea_calc(ctx, ra, displ);
2614
2615    if (store) {
2616        get_cpu_vsr(xt, rt + 32, true);
2617        gen_qemu_st32fs(ctx, xt, ea);
2618    } else {
2619        gen_qemu_ld32fs(ctx, xt, ea);
2620        set_cpu_vsr(rt + 32, xt, true);
2621        set_cpu_vsr(rt + 32, tcg_constant_i64(0), false);
2622    }
2623
2624    tcg_temp_free(ea);
2625    tcg_temp_free_i64(xt);
2626
2627    return true;
2628}
2629
2630static bool do_lstxssp_DS(DisasContext *ctx, arg_D *a, bool store)
2631{
2632    return do_lstxssp(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store);
2633}
2634
2635static bool do_plstxssp_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store)
2636{
2637    arg_D d;
2638
2639    if (!resolve_PLS_D(ctx, &d, a)) {
2640        return true;
2641    }
2642
2643    return do_lstxssp(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store);
2644}
2645
2646TRANS_FLAGS2(ISA300, LXSD, do_lstxsd_DS, false)
2647TRANS_FLAGS2(ISA300, STXSD, do_lstxsd_DS, true)
2648TRANS_FLAGS2(ISA300, LXSSP, do_lstxssp_DS, false)
2649TRANS_FLAGS2(ISA300, STXSSP, do_lstxssp_DS, true)
2650TRANS_FLAGS2(ISA300, STXV, do_lstxv_D, true, false)
2651TRANS_FLAGS2(ISA300, LXV, do_lstxv_D, false, false)
2652TRANS_FLAGS2(ISA310, STXVP, do_lstxv_D, true, true)
2653TRANS_FLAGS2(ISA310, LXVP, do_lstxv_D, false, true)
2654TRANS_FLAGS2(ISA300, STXVX, do_lstxv_X, true, false)
2655TRANS_FLAGS2(ISA300, LXVX, do_lstxv_X, false, false)
2656TRANS_FLAGS2(ISA310, STXVPX, do_lstxv_X, true, true)
2657TRANS_FLAGS2(ISA310, LXVPX, do_lstxv_X, false, true)
2658TRANS64_FLAGS2(ISA310, PLXSD, do_plstxsd_PLS_D, false)
2659TRANS64_FLAGS2(ISA310, PSTXSD, do_plstxsd_PLS_D, true)
2660TRANS64_FLAGS2(ISA310, PLXSSP, do_plstxssp_PLS_D, false)
2661TRANS64_FLAGS2(ISA310, PSTXSSP, do_plstxssp_PLS_D, true)
2662TRANS64_FLAGS2(ISA310, PSTXV, do_lstxv_PLS_D, true, false)
2663TRANS64_FLAGS2(ISA310, PLXV, do_lstxv_PLS_D, false, false)
2664TRANS64_FLAGS2(ISA310, PSTXVP, do_lstxv_PLS_D, true, true)
2665TRANS64_FLAGS2(ISA310, PLXVP, do_lstxv_PLS_D, false, true)
2666
2667static bool do_lstrm(DisasContext *ctx, arg_X *a, MemOp mop, bool store)
2668{
2669    TCGv ea;
2670    TCGv_i64 xt;
2671
2672    REQUIRE_VSX(ctx);
2673
2674    xt = tcg_temp_new_i64();
2675
2676    gen_set_access_type(ctx, ACCESS_INT);
2677    ea = do_ea_calc(ctx, a->ra , cpu_gpr[a->rb]);
2678
2679    if (store) {
2680        get_cpu_vsr(xt, a->rt, false);
2681        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2682    } else {
2683        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2684        set_cpu_vsr(a->rt, xt, false);
2685        set_cpu_vsr(a->rt, tcg_constant_i64(0), true);
2686    }
2687
2688    tcg_temp_free(ea);
2689    tcg_temp_free_i64(xt);
2690    return true;
2691}
2692
2693TRANS_FLAGS2(ISA310, LXVRBX, do_lstrm, DEF_MEMOP(MO_UB), false)
2694TRANS_FLAGS2(ISA310, LXVRHX, do_lstrm, DEF_MEMOP(MO_UW), false)
2695TRANS_FLAGS2(ISA310, LXVRWX, do_lstrm, DEF_MEMOP(MO_UL), false)
2696TRANS_FLAGS2(ISA310, LXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), false)
2697TRANS_FLAGS2(ISA310, STXVRBX, do_lstrm, DEF_MEMOP(MO_UB), true)
2698TRANS_FLAGS2(ISA310, STXVRHX, do_lstrm, DEF_MEMOP(MO_UW), true)
2699TRANS_FLAGS2(ISA310, STXVRWX, do_lstrm, DEF_MEMOP(MO_UL), true)
2700TRANS_FLAGS2(ISA310, STXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), true)
2701
2702static void gen_xxeval_i64(TCGv_i64 t, TCGv_i64 a, TCGv_i64 b, TCGv_i64 c,
2703                           int64_t imm)
2704{
2705    /*
2706     * Instead of processing imm bit-by-bit, we'll skip the computation of
2707     * conjunctions whose corresponding bit is unset.
2708     */
2709    int bit;
2710    TCGv_i64 conj, disj;
2711
2712    conj = tcg_temp_new_i64();
2713    disj = tcg_const_i64(0);
2714
2715    /* Iterate over set bits from the least to the most significant bit */
2716    while (imm) {
2717        /*
2718         * Get the next bit to be processed with ctz64. Invert the result of
2719         * ctz64 to match the indexing used by PowerISA.
2720         */
2721        bit = 7 - ctz64(imm);
2722        if (bit & 0x4) {
2723            tcg_gen_mov_i64(conj, a);
2724        } else {
2725            tcg_gen_not_i64(conj, a);
2726        }
2727        if (bit & 0x2) {
2728            tcg_gen_and_i64(conj, conj, b);
2729        } else {
2730            tcg_gen_andc_i64(conj, conj, b);
2731        }
2732        if (bit & 0x1) {
2733            tcg_gen_and_i64(conj, conj, c);
2734        } else {
2735            tcg_gen_andc_i64(conj, conj, c);
2736        }
2737        tcg_gen_or_i64(disj, disj, conj);
2738
2739        /* Unset the least significant bit that is set */
2740        imm &= imm - 1;
2741    }
2742
2743    tcg_gen_mov_i64(t, disj);
2744
2745    tcg_temp_free_i64(conj);
2746    tcg_temp_free_i64(disj);
2747}
2748
2749static void gen_xxeval_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2750                           TCGv_vec c, int64_t imm)
2751{
2752    /*
2753     * Instead of processing imm bit-by-bit, we'll skip the computation of
2754     * conjunctions whose corresponding bit is unset.
2755     */
2756    int bit;
2757    TCGv_vec disj, conj;
2758
2759    disj = tcg_const_zeros_vec_matching(t);
2760    conj = tcg_temp_new_vec_matching(t);
2761
2762    /* Iterate over set bits from the least to the most significant bit */
2763    while (imm) {
2764        /*
2765         * Get the next bit to be processed with ctz64. Invert the result of
2766         * ctz64 to match the indexing used by PowerISA.
2767         */
2768        bit = 7 - ctz64(imm);
2769        if (bit & 0x4) {
2770            tcg_gen_mov_vec(conj, a);
2771        } else {
2772            tcg_gen_not_vec(vece, conj, a);
2773        }
2774        if (bit & 0x2) {
2775            tcg_gen_and_vec(vece, conj, conj, b);
2776        } else {
2777            tcg_gen_andc_vec(vece, conj, conj, b);
2778        }
2779        if (bit & 0x1) {
2780            tcg_gen_and_vec(vece, conj, conj, c);
2781        } else {
2782            tcg_gen_andc_vec(vece, conj, conj, c);
2783        }
2784        tcg_gen_or_vec(vece, disj, disj, conj);
2785
2786        /* Unset the least significant bit that is set */
2787        imm &= imm - 1;
2788    }
2789
2790    tcg_gen_mov_vec(t, disj);
2791
2792    tcg_temp_free_vec(disj);
2793    tcg_temp_free_vec(conj);
2794}
2795
2796static bool trans_XXEVAL(DisasContext *ctx, arg_8RR_XX4_imm *a)
2797{
2798    static const TCGOpcode vecop_list[] = {
2799        INDEX_op_andc_vec, 0
2800    };
2801    static const GVecGen4i op = {
2802        .fniv = gen_xxeval_vec,
2803        .fno = gen_helper_XXEVAL,
2804        .fni8 = gen_xxeval_i64,
2805        .opt_opc = vecop_list,
2806        .vece = MO_64
2807    };
2808    int xt = vsr_full_offset(a->xt), xa = vsr_full_offset(a->xa),
2809        xb = vsr_full_offset(a->xb), xc = vsr_full_offset(a->xc);
2810
2811    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2812    REQUIRE_VSX(ctx);
2813
2814    /* Equivalent functions that can be implemented with a single gen_gvec */
2815    switch (a->imm) {
2816    case 0b00000000: /* true */
2817        set_cpu_vsr(a->xt, tcg_constant_i64(0), true);
2818        set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
2819        break;
2820    case 0b00000011: /* and(B,A) */
2821        tcg_gen_gvec_and(MO_64, xt, xb, xa, 16, 16);
2822        break;
2823    case 0b00000101: /* and(C,A) */
2824        tcg_gen_gvec_and(MO_64, xt, xc, xa, 16, 16);
2825        break;
2826    case 0b00001111: /* A */
2827        tcg_gen_gvec_mov(MO_64, xt, xa, 16, 16);
2828        break;
2829    case 0b00010001: /* and(C,B) */
2830        tcg_gen_gvec_and(MO_64, xt, xc, xb, 16, 16);
2831        break;
2832    case 0b00011011: /* C?B:A */
2833        tcg_gen_gvec_bitsel(MO_64, xt, xc, xb, xa, 16, 16);
2834        break;
2835    case 0b00011101: /* B?C:A */
2836        tcg_gen_gvec_bitsel(MO_64, xt, xb, xc, xa, 16, 16);
2837        break;
2838    case 0b00100111: /* C?A:B */
2839        tcg_gen_gvec_bitsel(MO_64, xt, xc, xa, xb, 16, 16);
2840        break;
2841    case 0b00110011: /* B */
2842        tcg_gen_gvec_mov(MO_64, xt, xb, 16, 16);
2843        break;
2844    case 0b00110101: /* A?C:B */
2845        tcg_gen_gvec_bitsel(MO_64, xt, xa, xc, xb, 16, 16);
2846        break;
2847    case 0b00111100: /* xor(B,A) */
2848        tcg_gen_gvec_xor(MO_64, xt, xb, xa, 16, 16);
2849        break;
2850    case 0b00111111: /* or(B,A) */
2851        tcg_gen_gvec_or(MO_64, xt, xb, xa, 16, 16);
2852        break;
2853    case 0b01000111: /* B?A:C */
2854        tcg_gen_gvec_bitsel(MO_64, xt, xb, xa, xc, 16, 16);
2855        break;
2856    case 0b01010011: /* A?B:C */
2857        tcg_gen_gvec_bitsel(MO_64, xt, xa, xb, xc, 16, 16);
2858        break;
2859    case 0b01010101: /* C */
2860        tcg_gen_gvec_mov(MO_64, xt, xc, 16, 16);
2861        break;
2862    case 0b01011010: /* xor(C,A) */
2863        tcg_gen_gvec_xor(MO_64, xt, xc, xa, 16, 16);
2864        break;
2865    case 0b01011111: /* or(C,A) */
2866        tcg_gen_gvec_or(MO_64, xt, xc, xa, 16, 16);
2867        break;
2868    case 0b01100110: /* xor(C,B) */
2869        tcg_gen_gvec_xor(MO_64, xt, xc, xb, 16, 16);
2870        break;
2871    case 0b01110111: /* or(C,B) */
2872        tcg_gen_gvec_or(MO_64, xt, xc, xb, 16, 16);
2873        break;
2874    case 0b10001000: /* nor(C,B) */
2875        tcg_gen_gvec_nor(MO_64, xt, xc, xb, 16, 16);
2876        break;
2877    case 0b10011001: /* eqv(C,B) */
2878        tcg_gen_gvec_eqv(MO_64, xt, xc, xb, 16, 16);
2879        break;
2880    case 0b10100000: /* nor(C,A) */
2881        tcg_gen_gvec_nor(MO_64, xt, xc, xa, 16, 16);
2882        break;
2883    case 0b10100101: /* eqv(C,A) */
2884        tcg_gen_gvec_eqv(MO_64, xt, xc, xa, 16, 16);
2885        break;
2886    case 0b10101010: /* not(C) */
2887        tcg_gen_gvec_not(MO_64, xt, xc, 16, 16);
2888        break;
2889    case 0b11000000: /* nor(B,A) */
2890        tcg_gen_gvec_nor(MO_64, xt,  xb, xa, 16, 16);
2891        break;
2892    case 0b11000011: /* eqv(B,A) */
2893        tcg_gen_gvec_eqv(MO_64, xt,  xb, xa, 16, 16);
2894        break;
2895    case 0b11001100: /* not(B) */
2896        tcg_gen_gvec_not(MO_64, xt, xb, 16, 16);
2897        break;
2898    case 0b11101110: /* nand(C,B) */
2899        tcg_gen_gvec_nand(MO_64, xt, xc, xb, 16, 16);
2900        break;
2901    case 0b11110000: /* not(A) */
2902        tcg_gen_gvec_not(MO_64, xt, xa, 16, 16);
2903        break;
2904    case 0b11111010: /* nand(C,A) */
2905        tcg_gen_gvec_nand(MO_64, xt, xc, xa, 16, 16);
2906        break;
2907    case 0b11111100: /* nand(B,A) */
2908        tcg_gen_gvec_nand(MO_64, xt, xb, xa, 16, 16);
2909        break;
2910    case 0b11111111: /* true */
2911        set_cpu_vsr(a->xt, tcg_constant_i64(-1), true);
2912        set_cpu_vsr(a->xt, tcg_constant_i64(-1), false);
2913        break;
2914    default:
2915        /* Fallback to compute all conjunctions/disjunctions */
2916        tcg_gen_gvec_4i(xt, xa, xb, xc, 16, 16, a->imm, &op);
2917    }
2918
2919    return true;
2920}
2921
2922static void gen_xxblendv_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2923                             TCGv_vec c)
2924{
2925    TCGv_vec tmp = tcg_temp_new_vec_matching(c);
2926    tcg_gen_sari_vec(vece, tmp, c, (8 << vece) - 1);
2927    tcg_gen_bitsel_vec(vece, t, tmp, b, a);
2928    tcg_temp_free_vec(tmp);
2929}
2930
2931static bool do_xxblendv(DisasContext *ctx, arg_8RR_XX4 *a, unsigned vece)
2932{
2933    static const TCGOpcode vecop_list[] = {
2934        INDEX_op_sari_vec, 0
2935    };
2936    static const GVecGen4 ops[4] = {
2937        {
2938            .fniv = gen_xxblendv_vec,
2939            .fno = gen_helper_XXBLENDVB,
2940            .opt_opc = vecop_list,
2941            .vece = MO_8
2942        },
2943        {
2944            .fniv = gen_xxblendv_vec,
2945            .fno = gen_helper_XXBLENDVH,
2946            .opt_opc = vecop_list,
2947            .vece = MO_16
2948        },
2949        {
2950            .fniv = gen_xxblendv_vec,
2951            .fno = gen_helper_XXBLENDVW,
2952            .opt_opc = vecop_list,
2953            .vece = MO_32
2954        },
2955        {
2956            .fniv = gen_xxblendv_vec,
2957            .fno = gen_helper_XXBLENDVD,
2958            .opt_opc = vecop_list,
2959            .vece = MO_64
2960        }
2961    };
2962
2963    REQUIRE_VSX(ctx);
2964
2965    tcg_gen_gvec_4(vsr_full_offset(a->xt), vsr_full_offset(a->xa),
2966                   vsr_full_offset(a->xb), vsr_full_offset(a->xc),
2967                   16, 16, &ops[vece]);
2968
2969    return true;
2970}
2971
2972TRANS(XXBLENDVB, do_xxblendv, MO_8)
2973TRANS(XXBLENDVH, do_xxblendv, MO_16)
2974TRANS(XXBLENDVW, do_xxblendv, MO_32)
2975TRANS(XXBLENDVD, do_xxblendv, MO_64)
2976
2977static bool do_helper_XX3(DisasContext *ctx, arg_XX3 *a,
2978    void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2979{
2980    TCGv_ptr xt, xa, xb;
2981
2982    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2983    REQUIRE_VSX(ctx);
2984
2985    xt = gen_vsr_ptr(a->xt);
2986    xa = gen_vsr_ptr(a->xa);
2987    xb = gen_vsr_ptr(a->xb);
2988
2989    helper(cpu_env, xt, xa, xb);
2990
2991    tcg_temp_free_ptr(xt);
2992    tcg_temp_free_ptr(xa);
2993    tcg_temp_free_ptr(xb);
2994
2995    return true;
2996}
2997
2998TRANS(XSCMPEQDP, do_helper_XX3, gen_helper_XSCMPEQDP)
2999TRANS(XSCMPGEDP, do_helper_XX3, gen_helper_XSCMPGEDP)
3000TRANS(XSCMPGTDP, do_helper_XX3, gen_helper_XSCMPGTDP)
3001TRANS(XSMAXCDP, do_helper_XX3, gen_helper_XSMAXCDP)
3002TRANS(XSMINCDP, do_helper_XX3, gen_helper_XSMINCDP)
3003TRANS(XSMAXJDP, do_helper_XX3, gen_helper_XSMAXJDP)
3004TRANS(XSMINJDP, do_helper_XX3, gen_helper_XSMINJDP)
3005
3006static bool do_helper_X(arg_X *a,
3007    void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
3008{
3009    TCGv_ptr rt, ra, rb;
3010
3011    rt = gen_avr_ptr(a->rt);
3012    ra = gen_avr_ptr(a->ra);
3013    rb = gen_avr_ptr(a->rb);
3014
3015    helper(cpu_env, rt, ra, rb);
3016
3017    tcg_temp_free_ptr(rt);
3018    tcg_temp_free_ptr(ra);
3019    tcg_temp_free_ptr(rb);
3020
3021    return true;
3022}
3023
3024static bool do_xscmpqp(DisasContext *ctx, arg_X *a,
3025    void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
3026{
3027    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
3028    REQUIRE_VSX(ctx);
3029
3030    return do_helper_X(a, helper);
3031}
3032
3033TRANS(XSCMPEQQP, do_xscmpqp, gen_helper_XSCMPEQQP)
3034TRANS(XSCMPGEQP, do_xscmpqp, gen_helper_XSCMPGEQP)
3035TRANS(XSCMPGTQP, do_xscmpqp, gen_helper_XSCMPGTQP)
3036TRANS(XSMAXCQP, do_xscmpqp, gen_helper_XSMAXCQP)
3037TRANS(XSMINCQP, do_xscmpqp, gen_helper_XSMINCQP)
3038
3039static bool trans_XVCVSPBF16(DisasContext *ctx, arg_XX2 *a)
3040{
3041    TCGv_ptr xt, xb;
3042
3043    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
3044    REQUIRE_VSX(ctx);
3045
3046    xt = gen_vsr_ptr(a->xt);
3047    xb = gen_vsr_ptr(a->xb);
3048
3049    gen_helper_XVCVSPBF16(cpu_env, xt, xb);
3050
3051    tcg_temp_free_ptr(xt);
3052    tcg_temp_free_ptr(xb);
3053
3054    return true;
3055}
3056
3057static bool trans_XVCVBF16SPN(DisasContext *ctx, arg_XX2 *a)
3058{
3059    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
3060    REQUIRE_VSX(ctx);
3061
3062    tcg_gen_gvec_shli(MO_32, vsr_full_offset(a->xt), vsr_full_offset(a->xb),
3063                      16, 16, 16);
3064
3065    return true;
3066}
3067
3068    /*
3069     *  The PowerISA 3.1 mentions that for the current version of the
3070     *  architecture, "the hardware implementation provides the effect of
3071     *  ACC[i] and VSRs 4*i to 4*i + 3 logically containing the same data"
3072     *  and "The Accumulators introduce no new logical state at this time"
3073     *  (page 501). For now it seems unnecessary to create new structures,
3074     *  so ACC[i] is the same as VSRs 4*i to 4*i+3 and therefore
3075     *  move to and from accumulators are no-ops.
3076     */
3077static bool trans_XXMFACC(DisasContext *ctx, arg_X_a *a)
3078{
3079    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
3080    REQUIRE_VSX(ctx);
3081    return true;
3082}
3083
3084static bool trans_XXMTACC(DisasContext *ctx, arg_X_a *a)
3085{
3086    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
3087    REQUIRE_VSX(ctx);
3088    return true;
3089}
3090
3091static bool trans_XXSETACCZ(DisasContext *ctx, arg_X_a *a)
3092{
3093    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
3094    REQUIRE_VSX(ctx);
3095    tcg_gen_gvec_dup_imm(MO_64, acc_full_offset(a->ra), 64, 64, 0);
3096    return true;
3097}
3098
3099static bool do_ger(DisasContext *ctx, arg_MMIRR_XX3 *a,
3100    void (*helper)(TCGv_env, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_i32))
3101{
3102    uint32_t mask;
3103    TCGv_ptr xt, xa, xb;
3104    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
3105    REQUIRE_VSX(ctx);
3106    if (unlikely((a->xa / 4 == a->xt) || (a->xb / 4 == a->xt))) {
3107        gen_invalid(ctx);
3108        return true;
3109    }
3110
3111    xt = gen_acc_ptr(a->xt);
3112    xa = gen_vsr_ptr(a->xa);
3113    xb = gen_vsr_ptr(a->xb);
3114
3115    mask = ger_pack_masks(a->pmsk, a->ymsk, a->xmsk);
3116    helper(cpu_env, xa, xb, xt, tcg_constant_i32(mask));
3117    tcg_temp_free_ptr(xt);
3118    tcg_temp_free_ptr(xa);
3119    tcg_temp_free_ptr(xb);
3120    return true;
3121}
3122
3123TRANS(XVI4GER8, do_ger, gen_helper_XVI4GER8)
3124TRANS(XVI4GER8PP, do_ger,  gen_helper_XVI4GER8PP)
3125TRANS(XVI8GER4, do_ger, gen_helper_XVI8GER4)
3126TRANS(XVI8GER4PP, do_ger,  gen_helper_XVI8GER4PP)
3127TRANS(XVI8GER4SPP, do_ger, gen_helper_XVI8GER4SPP)
3128TRANS(XVI16GER2, do_ger, gen_helper_XVI16GER2)
3129TRANS(XVI16GER2PP, do_ger, gen_helper_XVI16GER2PP)
3130TRANS(XVI16GER2S, do_ger, gen_helper_XVI16GER2S)
3131TRANS(XVI16GER2SPP, do_ger, gen_helper_XVI16GER2SPP)
3132
3133TRANS64(PMXVI4GER8, do_ger, gen_helper_XVI4GER8)
3134TRANS64(PMXVI4GER8PP, do_ger, gen_helper_XVI4GER8PP)
3135TRANS64(PMXVI8GER4, do_ger, gen_helper_XVI8GER4)
3136TRANS64(PMXVI8GER4PP, do_ger, gen_helper_XVI8GER4PP)
3137TRANS64(PMXVI8GER4SPP, do_ger, gen_helper_XVI8GER4SPP)
3138TRANS64(PMXVI16GER2, do_ger, gen_helper_XVI16GER2)
3139TRANS64(PMXVI16GER2PP, do_ger, gen_helper_XVI16GER2PP)
3140TRANS64(PMXVI16GER2S, do_ger, gen_helper_XVI16GER2S)
3141TRANS64(PMXVI16GER2SPP, do_ger, gen_helper_XVI16GER2SPP)
3142
3143TRANS(XVBF16GER2, do_ger, gen_helper_XVBF16GER2)
3144TRANS(XVBF16GER2PP, do_ger, gen_helper_XVBF16GER2PP)
3145TRANS(XVBF16GER2PN, do_ger, gen_helper_XVBF16GER2PN)
3146TRANS(XVBF16GER2NP, do_ger, gen_helper_XVBF16GER2NP)
3147TRANS(XVBF16GER2NN, do_ger, gen_helper_XVBF16GER2NN)
3148
3149TRANS(XVF16GER2, do_ger, gen_helper_XVF16GER2)
3150TRANS(XVF16GER2PP, do_ger, gen_helper_XVF16GER2PP)
3151TRANS(XVF16GER2PN, do_ger, gen_helper_XVF16GER2PN)
3152TRANS(XVF16GER2NP, do_ger, gen_helper_XVF16GER2NP)
3153TRANS(XVF16GER2NN, do_ger, gen_helper_XVF16GER2NN)
3154
3155TRANS(XVF32GER, do_ger, gen_helper_XVF32GER)
3156TRANS(XVF32GERPP, do_ger, gen_helper_XVF32GERPP)
3157TRANS(XVF32GERPN, do_ger, gen_helper_XVF32GERPN)
3158TRANS(XVF32GERNP, do_ger, gen_helper_XVF32GERNP)
3159TRANS(XVF32GERNN, do_ger, gen_helper_XVF32GERNN)
3160
3161TRANS(XVF64GER, do_ger, gen_helper_XVF64GER)
3162TRANS(XVF64GERPP, do_ger, gen_helper_XVF64GERPP)
3163TRANS(XVF64GERPN, do_ger, gen_helper_XVF64GERPN)
3164TRANS(XVF64GERNP, do_ger, gen_helper_XVF64GERNP)
3165TRANS(XVF64GERNN, do_ger, gen_helper_XVF64GERNN)
3166
3167TRANS64(PMXVBF16GER2, do_ger, gen_helper_XVBF16GER2)
3168TRANS64(PMXVBF16GER2PP, do_ger, gen_helper_XVBF16GER2PP)
3169TRANS64(PMXVBF16GER2PN, do_ger, gen_helper_XVBF16GER2PN)
3170TRANS64(PMXVBF16GER2NP, do_ger, gen_helper_XVBF16GER2NP)
3171TRANS64(PMXVBF16GER2NN, do_ger, gen_helper_XVBF16GER2NN)
3172
3173TRANS64(PMXVF16GER2, do_ger, gen_helper_XVF16GER2)
3174TRANS64(PMXVF16GER2PP, do_ger, gen_helper_XVF16GER2PP)
3175TRANS64(PMXVF16GER2PN, do_ger, gen_helper_XVF16GER2PN)
3176TRANS64(PMXVF16GER2NP, do_ger, gen_helper_XVF16GER2NP)
3177TRANS64(PMXVF16GER2NN, do_ger, gen_helper_XVF16GER2NN)
3178
3179TRANS64(PMXVF32GER, do_ger, gen_helper_XVF32GER)
3180TRANS64(PMXVF32GERPP, do_ger, gen_helper_XVF32GERPP)
3181TRANS64(PMXVF32GERPN, do_ger, gen_helper_XVF32GERPN)
3182TRANS64(PMXVF32GERNP, do_ger, gen_helper_XVF32GERNP)
3183TRANS64(PMXVF32GERNN, do_ger, gen_helper_XVF32GERNN)
3184
3185TRANS64(PMXVF64GER, do_ger, gen_helper_XVF64GER)
3186TRANS64(PMXVF64GERPP, do_ger, gen_helper_XVF64GERPP)
3187TRANS64(PMXVF64GERPN, do_ger, gen_helper_XVF64GERPN)
3188TRANS64(PMXVF64GERNP, do_ger, gen_helper_XVF64GERNP)
3189TRANS64(PMXVF64GERNN, do_ger, gen_helper_XVF64GERNN)
3190
3191#undef GEN_XX2FORM
3192#undef GEN_XX3FORM
3193#undef GEN_XX2IFORM
3194#undef GEN_XX3_RC_FORM
3195#undef GEN_XX3FORM_DM
3196#undef VSX_LOGICAL
3197