xref: /qemu/target/ppc/translate/vsx-impl.c.inc (revision b88651cb)
1/***                           VSX extension                               ***/
2
3static inline void get_cpu_vsr(TCGv_i64 dst, int n, bool high)
4{
5    tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, high));
6}
7
8static inline void set_cpu_vsr(int n, TCGv_i64 src, bool high)
9{
10    tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, high));
11}
12
13static inline TCGv_ptr gen_vsr_ptr(int reg)
14{
15    TCGv_ptr r = tcg_temp_new_ptr();
16    tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
17    return r;
18}
19
20#define VSX_LOAD_SCALAR(name, operation)                      \
21static void gen_##name(DisasContext *ctx)                     \
22{                                                             \
23    TCGv EA;                                                  \
24    TCGv_i64 t0;                                              \
25    if (unlikely(!ctx->vsx_enabled)) {                        \
26        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
27        return;                                               \
28    }                                                         \
29    t0 = tcg_temp_new_i64();                                  \
30    gen_set_access_type(ctx, ACCESS_INT);                     \
31    EA = tcg_temp_new();                                      \
32    gen_addr_reg_index(ctx, EA);                              \
33    gen_qemu_##operation(ctx, t0, EA);                        \
34    set_cpu_vsr(xT(ctx->opcode), t0, true);                   \
35    /* NOTE: cpu_vsrl is undefined */                         \
36    tcg_temp_free(EA);                                        \
37    tcg_temp_free_i64(t0);                                    \
38}
39
40VSX_LOAD_SCALAR(lxsdx, ld64_i64)
41VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
42VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
43VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
44VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
45VSX_LOAD_SCALAR(lxsspx, ld32fs)
46
47static void gen_lxvd2x(DisasContext *ctx)
48{
49    TCGv EA;
50    TCGv_i64 t0;
51    if (unlikely(!ctx->vsx_enabled)) {
52        gen_exception(ctx, POWERPC_EXCP_VSXU);
53        return;
54    }
55    t0 = tcg_temp_new_i64();
56    gen_set_access_type(ctx, ACCESS_INT);
57    EA = tcg_temp_new();
58    gen_addr_reg_index(ctx, EA);
59    gen_qemu_ld64_i64(ctx, t0, EA);
60    set_cpu_vsr(xT(ctx->opcode), t0, true);
61    tcg_gen_addi_tl(EA, EA, 8);
62    gen_qemu_ld64_i64(ctx, t0, EA);
63    set_cpu_vsr(xT(ctx->opcode), t0, false);
64    tcg_temp_free(EA);
65    tcg_temp_free_i64(t0);
66}
67
68static void gen_lxvw4x(DisasContext *ctx)
69{
70    TCGv EA;
71    TCGv_i64 xth;
72    TCGv_i64 xtl;
73    if (unlikely(!ctx->vsx_enabled)) {
74        gen_exception(ctx, POWERPC_EXCP_VSXU);
75        return;
76    }
77    xth = tcg_temp_new_i64();
78    xtl = tcg_temp_new_i64();
79
80    gen_set_access_type(ctx, ACCESS_INT);
81    EA = tcg_temp_new();
82
83    gen_addr_reg_index(ctx, EA);
84    if (ctx->le_mode) {
85        TCGv_i64 t0 = tcg_temp_new_i64();
86        TCGv_i64 t1 = tcg_temp_new_i64();
87
88        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
89        tcg_gen_shri_i64(t1, t0, 32);
90        tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
91        tcg_gen_addi_tl(EA, EA, 8);
92        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
93        tcg_gen_shri_i64(t1, t0, 32);
94        tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
95        tcg_temp_free_i64(t0);
96        tcg_temp_free_i64(t1);
97    } else {
98        tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
99        tcg_gen_addi_tl(EA, EA, 8);
100        tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
101    }
102    set_cpu_vsr(xT(ctx->opcode), xth, true);
103    set_cpu_vsr(xT(ctx->opcode), xtl, false);
104    tcg_temp_free(EA);
105    tcg_temp_free_i64(xth);
106    tcg_temp_free_i64(xtl);
107}
108
109static void gen_lxvwsx(DisasContext *ctx)
110{
111    TCGv EA;
112    TCGv_i32 data;
113
114    if (xT(ctx->opcode) < 32) {
115        if (unlikely(!ctx->vsx_enabled)) {
116            gen_exception(ctx, POWERPC_EXCP_VSXU);
117            return;
118        }
119    } else {
120        if (unlikely(!ctx->altivec_enabled)) {
121            gen_exception(ctx, POWERPC_EXCP_VPU);
122            return;
123        }
124    }
125
126    gen_set_access_type(ctx, ACCESS_INT);
127    EA = tcg_temp_new();
128
129    gen_addr_reg_index(ctx, EA);
130
131    data = tcg_temp_new_i32();
132    tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL));
133    tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
134
135    tcg_temp_free(EA);
136    tcg_temp_free_i32(data);
137}
138
139static void gen_lxvdsx(DisasContext *ctx)
140{
141    TCGv EA;
142    TCGv_i64 data;
143
144    if (unlikely(!ctx->vsx_enabled)) {
145        gen_exception(ctx, POWERPC_EXCP_VSXU);
146        return;
147    }
148
149    gen_set_access_type(ctx, ACCESS_INT);
150    EA = tcg_temp_new();
151
152    gen_addr_reg_index(ctx, EA);
153
154    data = tcg_temp_new_i64();
155    tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UQ));
156    tcg_gen_gvec_dup_i64(MO_UQ, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
157
158    tcg_temp_free(EA);
159    tcg_temp_free_i64(data);
160}
161
162static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
163                          TCGv_i64 inh, TCGv_i64 inl)
164{
165    TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
166    TCGv_i64 t0 = tcg_temp_new_i64();
167    TCGv_i64 t1 = tcg_temp_new_i64();
168
169    /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
170    tcg_gen_and_i64(t0, inh, mask);
171    tcg_gen_shli_i64(t0, t0, 8);
172    tcg_gen_shri_i64(t1, inh, 8);
173    tcg_gen_and_i64(t1, t1, mask);
174    tcg_gen_or_i64(outh, t0, t1);
175
176    /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
177    tcg_gen_and_i64(t0, inl, mask);
178    tcg_gen_shli_i64(t0, t0, 8);
179    tcg_gen_shri_i64(t1, inl, 8);
180    tcg_gen_and_i64(t1, t1, mask);
181    tcg_gen_or_i64(outl, t0, t1);
182
183    tcg_temp_free_i64(t0);
184    tcg_temp_free_i64(t1);
185    tcg_temp_free_i64(mask);
186}
187
188static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
189                          TCGv_i64 inh, TCGv_i64 inl)
190{
191    TCGv_i64 hi = tcg_temp_new_i64();
192    TCGv_i64 lo = tcg_temp_new_i64();
193
194    tcg_gen_bswap64_i64(hi, inh);
195    tcg_gen_bswap64_i64(lo, inl);
196    tcg_gen_shri_i64(outh, hi, 32);
197    tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
198    tcg_gen_shri_i64(outl, lo, 32);
199    tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
200
201    tcg_temp_free_i64(hi);
202    tcg_temp_free_i64(lo);
203}
204static void gen_lxvh8x(DisasContext *ctx)
205{
206    TCGv EA;
207    TCGv_i64 xth;
208    TCGv_i64 xtl;
209
210    if (unlikely(!ctx->vsx_enabled)) {
211        gen_exception(ctx, POWERPC_EXCP_VSXU);
212        return;
213    }
214    xth = tcg_temp_new_i64();
215    xtl = tcg_temp_new_i64();
216    gen_set_access_type(ctx, ACCESS_INT);
217
218    EA = tcg_temp_new();
219    gen_addr_reg_index(ctx, EA);
220    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
221    tcg_gen_addi_tl(EA, EA, 8);
222    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
223    if (ctx->le_mode) {
224        gen_bswap16x8(xth, xtl, xth, xtl);
225    }
226    set_cpu_vsr(xT(ctx->opcode), xth, true);
227    set_cpu_vsr(xT(ctx->opcode), xtl, false);
228    tcg_temp_free(EA);
229    tcg_temp_free_i64(xth);
230    tcg_temp_free_i64(xtl);
231}
232
233static void gen_lxvb16x(DisasContext *ctx)
234{
235    TCGv EA;
236    TCGv_i64 xth;
237    TCGv_i64 xtl;
238
239    if (unlikely(!ctx->vsx_enabled)) {
240        gen_exception(ctx, POWERPC_EXCP_VSXU);
241        return;
242    }
243    xth = tcg_temp_new_i64();
244    xtl = tcg_temp_new_i64();
245    gen_set_access_type(ctx, ACCESS_INT);
246    EA = tcg_temp_new();
247    gen_addr_reg_index(ctx, EA);
248    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
249    tcg_gen_addi_tl(EA, EA, 8);
250    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
251    set_cpu_vsr(xT(ctx->opcode), xth, true);
252    set_cpu_vsr(xT(ctx->opcode), xtl, false);
253    tcg_temp_free(EA);
254    tcg_temp_free_i64(xth);
255    tcg_temp_free_i64(xtl);
256}
257
258#ifdef TARGET_PPC64
259#define VSX_VECTOR_LOAD_STORE_LENGTH(name)                         \
260static void gen_##name(DisasContext *ctx)                          \
261{                                                                  \
262    TCGv EA;                                                       \
263    TCGv_ptr xt;                                                   \
264                                                                   \
265    if (xT(ctx->opcode) < 32) {                                    \
266        if (unlikely(!ctx->vsx_enabled)) {                         \
267            gen_exception(ctx, POWERPC_EXCP_VSXU);                 \
268            return;                                                \
269        }                                                          \
270    } else {                                                       \
271        if (unlikely(!ctx->altivec_enabled)) {                     \
272            gen_exception(ctx, POWERPC_EXCP_VPU);                  \
273            return;                                                \
274        }                                                          \
275    }                                                              \
276    EA = tcg_temp_new();                                           \
277    xt = gen_vsr_ptr(xT(ctx->opcode));                             \
278    gen_set_access_type(ctx, ACCESS_INT);                          \
279    gen_addr_register(ctx, EA);                                    \
280    gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]);  \
281    tcg_temp_free(EA);                                             \
282    tcg_temp_free_ptr(xt);                                         \
283}
284
285VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
286VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
287VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
288VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
289#endif
290
291#define VSX_STORE_SCALAR(name, operation)                     \
292static void gen_##name(DisasContext *ctx)                     \
293{                                                             \
294    TCGv EA;                                                  \
295    TCGv_i64 t0;                                              \
296    if (unlikely(!ctx->vsx_enabled)) {                        \
297        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
298        return;                                               \
299    }                                                         \
300    t0 = tcg_temp_new_i64();                                  \
301    gen_set_access_type(ctx, ACCESS_INT);                     \
302    EA = tcg_temp_new();                                      \
303    gen_addr_reg_index(ctx, EA);                              \
304    get_cpu_vsr(t0, xS(ctx->opcode), true);                   \
305    gen_qemu_##operation(ctx, t0, EA);                        \
306    tcg_temp_free(EA);                                        \
307    tcg_temp_free_i64(t0);                                    \
308}
309
310VSX_STORE_SCALAR(stxsdx, st64_i64)
311
312VSX_STORE_SCALAR(stxsibx, st8_i64)
313VSX_STORE_SCALAR(stxsihx, st16_i64)
314VSX_STORE_SCALAR(stxsiwx, st32_i64)
315VSX_STORE_SCALAR(stxsspx, st32fs)
316
317static void gen_stxvd2x(DisasContext *ctx)
318{
319    TCGv EA;
320    TCGv_i64 t0;
321    if (unlikely(!ctx->vsx_enabled)) {
322        gen_exception(ctx, POWERPC_EXCP_VSXU);
323        return;
324    }
325    t0 = tcg_temp_new_i64();
326    gen_set_access_type(ctx, ACCESS_INT);
327    EA = tcg_temp_new();
328    gen_addr_reg_index(ctx, EA);
329    get_cpu_vsr(t0, xS(ctx->opcode), true);
330    gen_qemu_st64_i64(ctx, t0, EA);
331    tcg_gen_addi_tl(EA, EA, 8);
332    get_cpu_vsr(t0, xS(ctx->opcode), false);
333    gen_qemu_st64_i64(ctx, t0, EA);
334    tcg_temp_free(EA);
335    tcg_temp_free_i64(t0);
336}
337
338static void gen_stxvw4x(DisasContext *ctx)
339{
340    TCGv EA;
341    TCGv_i64 xsh;
342    TCGv_i64 xsl;
343
344    if (unlikely(!ctx->vsx_enabled)) {
345        gen_exception(ctx, POWERPC_EXCP_VSXU);
346        return;
347    }
348    xsh = tcg_temp_new_i64();
349    xsl = tcg_temp_new_i64();
350    get_cpu_vsr(xsh, xS(ctx->opcode), true);
351    get_cpu_vsr(xsl, xS(ctx->opcode), false);
352    gen_set_access_type(ctx, ACCESS_INT);
353    EA = tcg_temp_new();
354    gen_addr_reg_index(ctx, EA);
355    if (ctx->le_mode) {
356        TCGv_i64 t0 = tcg_temp_new_i64();
357        TCGv_i64 t1 = tcg_temp_new_i64();
358
359        tcg_gen_shri_i64(t0, xsh, 32);
360        tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
361        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
362        tcg_gen_addi_tl(EA, EA, 8);
363        tcg_gen_shri_i64(t0, xsl, 32);
364        tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
365        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
366        tcg_temp_free_i64(t0);
367        tcg_temp_free_i64(t1);
368    } else {
369        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
370        tcg_gen_addi_tl(EA, EA, 8);
371        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
372    }
373    tcg_temp_free(EA);
374    tcg_temp_free_i64(xsh);
375    tcg_temp_free_i64(xsl);
376}
377
378static void gen_stxvh8x(DisasContext *ctx)
379{
380    TCGv EA;
381    TCGv_i64 xsh;
382    TCGv_i64 xsl;
383
384    if (unlikely(!ctx->vsx_enabled)) {
385        gen_exception(ctx, POWERPC_EXCP_VSXU);
386        return;
387    }
388    xsh = tcg_temp_new_i64();
389    xsl = tcg_temp_new_i64();
390    get_cpu_vsr(xsh, xS(ctx->opcode), true);
391    get_cpu_vsr(xsl, xS(ctx->opcode), false);
392    gen_set_access_type(ctx, ACCESS_INT);
393    EA = tcg_temp_new();
394    gen_addr_reg_index(ctx, EA);
395    if (ctx->le_mode) {
396        TCGv_i64 outh = tcg_temp_new_i64();
397        TCGv_i64 outl = tcg_temp_new_i64();
398
399        gen_bswap16x8(outh, outl, xsh, xsl);
400        tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEUQ);
401        tcg_gen_addi_tl(EA, EA, 8);
402        tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEUQ);
403        tcg_temp_free_i64(outh);
404        tcg_temp_free_i64(outl);
405    } else {
406        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
407        tcg_gen_addi_tl(EA, EA, 8);
408        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
409    }
410    tcg_temp_free(EA);
411    tcg_temp_free_i64(xsh);
412    tcg_temp_free_i64(xsl);
413}
414
415static void gen_stxvb16x(DisasContext *ctx)
416{
417    TCGv EA;
418    TCGv_i64 xsh;
419    TCGv_i64 xsl;
420
421    if (unlikely(!ctx->vsx_enabled)) {
422        gen_exception(ctx, POWERPC_EXCP_VSXU);
423        return;
424    }
425    xsh = tcg_temp_new_i64();
426    xsl = tcg_temp_new_i64();
427    get_cpu_vsr(xsh, xS(ctx->opcode), true);
428    get_cpu_vsr(xsl, xS(ctx->opcode), false);
429    gen_set_access_type(ctx, ACCESS_INT);
430    EA = tcg_temp_new();
431    gen_addr_reg_index(ctx, EA);
432    tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
433    tcg_gen_addi_tl(EA, EA, 8);
434    tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
435    tcg_temp_free(EA);
436    tcg_temp_free_i64(xsh);
437    tcg_temp_free_i64(xsl);
438}
439
440static void gen_mfvsrwz(DisasContext *ctx)
441{
442    if (xS(ctx->opcode) < 32) {
443        if (unlikely(!ctx->fpu_enabled)) {
444            gen_exception(ctx, POWERPC_EXCP_FPU);
445            return;
446        }
447    } else {
448        if (unlikely(!ctx->altivec_enabled)) {
449            gen_exception(ctx, POWERPC_EXCP_VPU);
450            return;
451        }
452    }
453    TCGv_i64 tmp = tcg_temp_new_i64();
454    TCGv_i64 xsh = tcg_temp_new_i64();
455    get_cpu_vsr(xsh, xS(ctx->opcode), true);
456    tcg_gen_ext32u_i64(tmp, xsh);
457    tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
458    tcg_temp_free_i64(tmp);
459    tcg_temp_free_i64(xsh);
460}
461
462static void gen_mtvsrwa(DisasContext *ctx)
463{
464    if (xS(ctx->opcode) < 32) {
465        if (unlikely(!ctx->fpu_enabled)) {
466            gen_exception(ctx, POWERPC_EXCP_FPU);
467            return;
468        }
469    } else {
470        if (unlikely(!ctx->altivec_enabled)) {
471            gen_exception(ctx, POWERPC_EXCP_VPU);
472            return;
473        }
474    }
475    TCGv_i64 tmp = tcg_temp_new_i64();
476    TCGv_i64 xsh = tcg_temp_new_i64();
477    tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
478    tcg_gen_ext32s_i64(xsh, tmp);
479    set_cpu_vsr(xT(ctx->opcode), xsh, true);
480    tcg_temp_free_i64(tmp);
481    tcg_temp_free_i64(xsh);
482}
483
484static void gen_mtvsrwz(DisasContext *ctx)
485{
486    if (xS(ctx->opcode) < 32) {
487        if (unlikely(!ctx->fpu_enabled)) {
488            gen_exception(ctx, POWERPC_EXCP_FPU);
489            return;
490        }
491    } else {
492        if (unlikely(!ctx->altivec_enabled)) {
493            gen_exception(ctx, POWERPC_EXCP_VPU);
494            return;
495        }
496    }
497    TCGv_i64 tmp = tcg_temp_new_i64();
498    TCGv_i64 xsh = tcg_temp_new_i64();
499    tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
500    tcg_gen_ext32u_i64(xsh, tmp);
501    set_cpu_vsr(xT(ctx->opcode), xsh, true);
502    tcg_temp_free_i64(tmp);
503    tcg_temp_free_i64(xsh);
504}
505
506#if defined(TARGET_PPC64)
507static void gen_mfvsrd(DisasContext *ctx)
508{
509    TCGv_i64 t0;
510    if (xS(ctx->opcode) < 32) {
511        if (unlikely(!ctx->fpu_enabled)) {
512            gen_exception(ctx, POWERPC_EXCP_FPU);
513            return;
514        }
515    } else {
516        if (unlikely(!ctx->altivec_enabled)) {
517            gen_exception(ctx, POWERPC_EXCP_VPU);
518            return;
519        }
520    }
521    t0 = tcg_temp_new_i64();
522    get_cpu_vsr(t0, xS(ctx->opcode), true);
523    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
524    tcg_temp_free_i64(t0);
525}
526
527static void gen_mtvsrd(DisasContext *ctx)
528{
529    TCGv_i64 t0;
530    if (xS(ctx->opcode) < 32) {
531        if (unlikely(!ctx->fpu_enabled)) {
532            gen_exception(ctx, POWERPC_EXCP_FPU);
533            return;
534        }
535    } else {
536        if (unlikely(!ctx->altivec_enabled)) {
537            gen_exception(ctx, POWERPC_EXCP_VPU);
538            return;
539        }
540    }
541    t0 = tcg_temp_new_i64();
542    tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
543    set_cpu_vsr(xT(ctx->opcode), t0, true);
544    tcg_temp_free_i64(t0);
545}
546
547static void gen_mfvsrld(DisasContext *ctx)
548{
549    TCGv_i64 t0;
550    if (xS(ctx->opcode) < 32) {
551        if (unlikely(!ctx->vsx_enabled)) {
552            gen_exception(ctx, POWERPC_EXCP_VSXU);
553            return;
554        }
555    } else {
556        if (unlikely(!ctx->altivec_enabled)) {
557            gen_exception(ctx, POWERPC_EXCP_VPU);
558            return;
559        }
560    }
561    t0 = tcg_temp_new_i64();
562    get_cpu_vsr(t0, xS(ctx->opcode), false);
563    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
564    tcg_temp_free_i64(t0);
565}
566
567static void gen_mtvsrdd(DisasContext *ctx)
568{
569    TCGv_i64 t0;
570    if (xT(ctx->opcode) < 32) {
571        if (unlikely(!ctx->vsx_enabled)) {
572            gen_exception(ctx, POWERPC_EXCP_VSXU);
573            return;
574        }
575    } else {
576        if (unlikely(!ctx->altivec_enabled)) {
577            gen_exception(ctx, POWERPC_EXCP_VPU);
578            return;
579        }
580    }
581
582    t0 = tcg_temp_new_i64();
583    if (!rA(ctx->opcode)) {
584        tcg_gen_movi_i64(t0, 0);
585    } else {
586        tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
587    }
588    set_cpu_vsr(xT(ctx->opcode), t0, true);
589
590    tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
591    set_cpu_vsr(xT(ctx->opcode), t0, false);
592    tcg_temp_free_i64(t0);
593}
594
595static void gen_mtvsrws(DisasContext *ctx)
596{
597    TCGv_i64 t0;
598    if (xT(ctx->opcode) < 32) {
599        if (unlikely(!ctx->vsx_enabled)) {
600            gen_exception(ctx, POWERPC_EXCP_VSXU);
601            return;
602        }
603    } else {
604        if (unlikely(!ctx->altivec_enabled)) {
605            gen_exception(ctx, POWERPC_EXCP_VPU);
606            return;
607        }
608    }
609
610    t0 = tcg_temp_new_i64();
611    tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
612                        cpu_gpr[rA(ctx->opcode)], 32, 32);
613    set_cpu_vsr(xT(ctx->opcode), t0, false);
614    set_cpu_vsr(xT(ctx->opcode), t0, true);
615    tcg_temp_free_i64(t0);
616}
617
618#endif
619
620#define OP_ABS 1
621#define OP_NABS 2
622#define OP_NEG 3
623#define OP_CPSGN 4
624#define SGN_MASK_DP  0x8000000000000000ull
625#define SGN_MASK_SP 0x8000000080000000ull
626
627#define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
628static void glue(gen_, name)(DisasContext *ctx)                   \
629    {                                                             \
630        TCGv_i64 xb, sgm;                                         \
631        if (unlikely(!ctx->vsx_enabled)) {                        \
632            gen_exception(ctx, POWERPC_EXCP_VSXU);                \
633            return;                                               \
634        }                                                         \
635        xb = tcg_temp_new_i64();                                  \
636        sgm = tcg_temp_new_i64();                                 \
637        get_cpu_vsr(xb, xB(ctx->opcode), true);                   \
638        tcg_gen_movi_i64(sgm, sgn_mask);                          \
639        switch (op) {                                             \
640            case OP_ABS: {                                        \
641                tcg_gen_andc_i64(xb, xb, sgm);                    \
642                break;                                            \
643            }                                                     \
644            case OP_NABS: {                                       \
645                tcg_gen_or_i64(xb, xb, sgm);                      \
646                break;                                            \
647            }                                                     \
648            case OP_NEG: {                                        \
649                tcg_gen_xor_i64(xb, xb, sgm);                     \
650                break;                                            \
651            }                                                     \
652            case OP_CPSGN: {                                      \
653                TCGv_i64 xa = tcg_temp_new_i64();                 \
654                get_cpu_vsr(xa, xA(ctx->opcode), true);           \
655                tcg_gen_and_i64(xa, xa, sgm);                     \
656                tcg_gen_andc_i64(xb, xb, sgm);                    \
657                tcg_gen_or_i64(xb, xb, xa);                       \
658                tcg_temp_free_i64(xa);                            \
659                break;                                            \
660            }                                                     \
661        }                                                         \
662        set_cpu_vsr(xT(ctx->opcode), xb, true);                   \
663        set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
664        tcg_temp_free_i64(xb);                                    \
665        tcg_temp_free_i64(sgm);                                   \
666    }
667
668VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
669VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
670VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
671VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
672
673#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask)                    \
674static void glue(gen_, name)(DisasContext *ctx)                   \
675{                                                                 \
676    int xa;                                                       \
677    int xt = rD(ctx->opcode) + 32;                                \
678    int xb = rB(ctx->opcode) + 32;                                \
679    TCGv_i64 xah, xbh, xbl, sgm, tmp;                             \
680                                                                  \
681    if (unlikely(!ctx->vsx_enabled)) {                            \
682        gen_exception(ctx, POWERPC_EXCP_VSXU);                    \
683        return;                                                   \
684    }                                                             \
685    xbh = tcg_temp_new_i64();                                     \
686    xbl = tcg_temp_new_i64();                                     \
687    sgm = tcg_temp_new_i64();                                     \
688    tmp = tcg_temp_new_i64();                                     \
689    get_cpu_vsr(xbh, xb, true);                                   \
690    get_cpu_vsr(xbl, xb, false);                                  \
691    tcg_gen_movi_i64(sgm, sgn_mask);                              \
692    switch (op) {                                                 \
693    case OP_ABS:                                                  \
694        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
695        break;                                                    \
696    case OP_NABS:                                                 \
697        tcg_gen_or_i64(xbh, xbh, sgm);                            \
698        break;                                                    \
699    case OP_NEG:                                                  \
700        tcg_gen_xor_i64(xbh, xbh, sgm);                           \
701        break;                                                    \
702    case OP_CPSGN:                                                \
703        xah = tcg_temp_new_i64();                                 \
704        xa = rA(ctx->opcode) + 32;                                \
705        get_cpu_vsr(tmp, xa, true);                               \
706        tcg_gen_and_i64(xah, tmp, sgm);                           \
707        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
708        tcg_gen_or_i64(xbh, xbh, xah);                            \
709        tcg_temp_free_i64(xah);                                   \
710        break;                                                    \
711    }                                                             \
712    set_cpu_vsr(xt, xbh, true);                                   \
713    set_cpu_vsr(xt, xbl, false);                                  \
714    tcg_temp_free_i64(xbl);                                       \
715    tcg_temp_free_i64(xbh);                                       \
716    tcg_temp_free_i64(sgm);                                       \
717    tcg_temp_free_i64(tmp);                                       \
718}
719
720VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
721VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
722VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
723VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
724
725#define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
726static void glue(gen_, name)(DisasContext *ctx)                  \
727    {                                                            \
728        TCGv_i64 xbh, xbl, sgm;                                  \
729        if (unlikely(!ctx->vsx_enabled)) {                       \
730            gen_exception(ctx, POWERPC_EXCP_VSXU);               \
731            return;                                              \
732        }                                                        \
733        xbh = tcg_temp_new_i64();                                \
734        xbl = tcg_temp_new_i64();                                \
735        sgm = tcg_temp_new_i64();                                \
736        get_cpu_vsr(xbh, xB(ctx->opcode), true);                 \
737        get_cpu_vsr(xbl, xB(ctx->opcode), false);                \
738        tcg_gen_movi_i64(sgm, sgn_mask);                         \
739        switch (op) {                                            \
740            case OP_ABS: {                                       \
741                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
742                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
743                break;                                           \
744            }                                                    \
745            case OP_NABS: {                                      \
746                tcg_gen_or_i64(xbh, xbh, sgm);                   \
747                tcg_gen_or_i64(xbl, xbl, sgm);                   \
748                break;                                           \
749            }                                                    \
750            case OP_NEG: {                                       \
751                tcg_gen_xor_i64(xbh, xbh, sgm);                  \
752                tcg_gen_xor_i64(xbl, xbl, sgm);                  \
753                break;                                           \
754            }                                                    \
755            case OP_CPSGN: {                                     \
756                TCGv_i64 xah = tcg_temp_new_i64();               \
757                TCGv_i64 xal = tcg_temp_new_i64();               \
758                get_cpu_vsr(xah, xA(ctx->opcode), true);         \
759                get_cpu_vsr(xal, xA(ctx->opcode), false);        \
760                tcg_gen_and_i64(xah, xah, sgm);                  \
761                tcg_gen_and_i64(xal, xal, sgm);                  \
762                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
763                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
764                tcg_gen_or_i64(xbh, xbh, xah);                   \
765                tcg_gen_or_i64(xbl, xbl, xal);                   \
766                tcg_temp_free_i64(xah);                          \
767                tcg_temp_free_i64(xal);                          \
768                break;                                           \
769            }                                                    \
770        }                                                        \
771        set_cpu_vsr(xT(ctx->opcode), xbh, true);                 \
772        set_cpu_vsr(xT(ctx->opcode), xbl, false);                \
773        tcg_temp_free_i64(xbh);                                  \
774        tcg_temp_free_i64(xbl);                                  \
775        tcg_temp_free_i64(sgm);                                  \
776    }
777
778VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
779VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
780VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
781VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
782VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
783VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
784VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
785VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
786
787#define VSX_CMP(name, op1, op2, inval, type)                                  \
788static void gen_##name(DisasContext *ctx)                                     \
789{                                                                             \
790    TCGv_i32 ignored;                                                         \
791    TCGv_ptr xt, xa, xb;                                                      \
792    if (unlikely(!ctx->vsx_enabled)) {                                        \
793        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
794        return;                                                               \
795    }                                                                         \
796    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
797    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
798    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
799    if ((ctx->opcode >> (31 - 21)) & 1) {                                     \
800        gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb);                   \
801    } else {                                                                  \
802        ignored = tcg_temp_new_i32();                                         \
803        gen_helper_##name(ignored, cpu_env, xt, xa, xb);                      \
804        tcg_temp_free_i32(ignored);                                           \
805    }                                                                         \
806    gen_helper_float_check_status(cpu_env);                                   \
807    tcg_temp_free_ptr(xt);                                                    \
808    tcg_temp_free_ptr(xa);                                                    \
809    tcg_temp_free_ptr(xb);                                                    \
810}
811
812VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
813VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
814VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
815VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
816VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
817VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
818VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
819VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
820
821static bool trans_XSCVQPDP(DisasContext *ctx, arg_X_tb_rc *a)
822{
823    TCGv_i32 ro;
824    TCGv_ptr xt, xb;
825
826    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
827    REQUIRE_VSX(ctx);
828
829    ro = tcg_const_i32(a->rc);
830
831    xt = gen_avr_ptr(a->rt);
832    xb = gen_avr_ptr(a->rb);
833    gen_helper_XSCVQPDP(cpu_env, ro, xt, xb);
834    tcg_temp_free_i32(ro);
835    tcg_temp_free_ptr(xt);
836    tcg_temp_free_ptr(xb);
837
838    return true;
839}
840
841static bool do_helper_env_X_tb(DisasContext *ctx, arg_X_tb *a,
842                               void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr))
843{
844    TCGv_ptr xt, xb;
845
846    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
847    REQUIRE_VSX(ctx);
848
849    xt = gen_avr_ptr(a->rt);
850    xb = gen_avr_ptr(a->rb);
851    gen_helper(cpu_env, xt, xb);
852    tcg_temp_free_ptr(xt);
853    tcg_temp_free_ptr(xb);
854
855    return true;
856}
857
858TRANS(XSCVUQQP, do_helper_env_X_tb, gen_helper_XSCVUQQP)
859TRANS(XSCVSQQP, do_helper_env_X_tb, gen_helper_XSCVSQQP)
860TRANS(XSCVQPUQZ, do_helper_env_X_tb, gen_helper_XSCVQPUQZ)
861TRANS(XSCVQPSQZ, do_helper_env_X_tb, gen_helper_XSCVQPSQZ)
862
863#define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
864static void gen_##name(DisasContext *ctx)                                     \
865{                                                                             \
866    TCGv_i32 opc;                                                             \
867    if (unlikely(!ctx->vsx_enabled)) {                                        \
868        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
869        return;                                                               \
870    }                                                                         \
871    opc = tcg_const_i32(ctx->opcode);                                         \
872    gen_helper_##name(cpu_env, opc);                                          \
873    tcg_temp_free_i32(opc);                                                   \
874}
875
876#define GEN_VSX_HELPER_X3(name, op1, op2, inval, type)                        \
877static void gen_##name(DisasContext *ctx)                                     \
878{                                                                             \
879    TCGv_ptr xt, xa, xb;                                                      \
880    if (unlikely(!ctx->vsx_enabled)) {                                        \
881        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
882        return;                                                               \
883    }                                                                         \
884    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
885    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
886    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
887    gen_helper_##name(cpu_env, xt, xa, xb);                                   \
888    tcg_temp_free_ptr(xt);                                                    \
889    tcg_temp_free_ptr(xa);                                                    \
890    tcg_temp_free_ptr(xb);                                                    \
891}
892
893#define GEN_VSX_HELPER_X2(name, op1, op2, inval, type)                        \
894static void gen_##name(DisasContext *ctx)                                     \
895{                                                                             \
896    TCGv_ptr xt, xb;                                                          \
897    if (unlikely(!ctx->vsx_enabled)) {                                        \
898        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
899        return;                                                               \
900    }                                                                         \
901    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
902    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
903    gen_helper_##name(cpu_env, xt, xb);                                       \
904    tcg_temp_free_ptr(xt);                                                    \
905    tcg_temp_free_ptr(xb);                                                    \
906}
907
908#define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type)                     \
909static void gen_##name(DisasContext *ctx)                                     \
910{                                                                             \
911    TCGv_i32 opc;                                                             \
912    TCGv_ptr xa, xb;                                                          \
913    if (unlikely(!ctx->vsx_enabled)) {                                        \
914        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
915        return;                                                               \
916    }                                                                         \
917    opc = tcg_const_i32(ctx->opcode);                                         \
918    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
919    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
920    gen_helper_##name(cpu_env, opc, xa, xb);                                  \
921    tcg_temp_free_i32(opc);                                                   \
922    tcg_temp_free_ptr(xa);                                                    \
923    tcg_temp_free_ptr(xb);                                                    \
924}
925
926#define GEN_VSX_HELPER_X1(name, op1, op2, inval, type)                        \
927static void gen_##name(DisasContext *ctx)                                     \
928{                                                                             \
929    TCGv_i32 opc;                                                             \
930    TCGv_ptr xb;                                                              \
931    if (unlikely(!ctx->vsx_enabled)) {                                        \
932        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
933        return;                                                               \
934    }                                                                         \
935    opc = tcg_const_i32(ctx->opcode);                                         \
936    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
937    gen_helper_##name(cpu_env, opc, xb);                                      \
938    tcg_temp_free_i32(opc);                                                   \
939    tcg_temp_free_ptr(xb);                                                    \
940}
941
942#define GEN_VSX_HELPER_R3(name, op1, op2, inval, type)                        \
943static void gen_##name(DisasContext *ctx)                                     \
944{                                                                             \
945    TCGv_i32 opc;                                                             \
946    TCGv_ptr xt, xa, xb;                                                      \
947    if (unlikely(!ctx->vsx_enabled)) {                                        \
948        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
949        return;                                                               \
950    }                                                                         \
951    opc = tcg_const_i32(ctx->opcode);                                         \
952    xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
953    xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
954    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
955    gen_helper_##name(cpu_env, opc, xt, xa, xb);                              \
956    tcg_temp_free_i32(opc);                                                   \
957    tcg_temp_free_ptr(xt);                                                    \
958    tcg_temp_free_ptr(xa);                                                    \
959    tcg_temp_free_ptr(xb);                                                    \
960}
961
962#define GEN_VSX_HELPER_R2(name, op1, op2, inval, type)                        \
963static void gen_##name(DisasContext *ctx)                                     \
964{                                                                             \
965    TCGv_i32 opc;                                                             \
966    TCGv_ptr xt, xb;                                                          \
967    if (unlikely(!ctx->vsx_enabled)) {                                        \
968        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
969        return;                                                               \
970    }                                                                         \
971    opc = tcg_const_i32(ctx->opcode);                                         \
972    xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
973    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
974    gen_helper_##name(cpu_env, opc, xt, xb);                                  \
975    tcg_temp_free_i32(opc);                                                   \
976    tcg_temp_free_ptr(xt);                                                    \
977    tcg_temp_free_ptr(xb);                                                    \
978}
979
980#define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type)                     \
981static void gen_##name(DisasContext *ctx)                                     \
982{                                                                             \
983    TCGv_i32 opc;                                                             \
984    TCGv_ptr xa, xb;                                                          \
985    if (unlikely(!ctx->vsx_enabled)) {                                        \
986        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
987        return;                                                               \
988    }                                                                         \
989    opc = tcg_const_i32(ctx->opcode);                                         \
990    xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
991    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
992    gen_helper_##name(cpu_env, opc, xa, xb);                                  \
993    tcg_temp_free_i32(opc);                                                   \
994    tcg_temp_free_ptr(xa);                                                    \
995    tcg_temp_free_ptr(xb);                                                    \
996}
997
998#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
999static void gen_##name(DisasContext *ctx)                     \
1000{                                                             \
1001    TCGv_i64 t0;                                              \
1002    TCGv_i64 t1;                                              \
1003    if (unlikely(!ctx->vsx_enabled)) {                        \
1004        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
1005        return;                                               \
1006    }                                                         \
1007    t0 = tcg_temp_new_i64();                                  \
1008    t1 = tcg_temp_new_i64();                                  \
1009    get_cpu_vsr(t0, xB(ctx->opcode), true);                   \
1010    gen_helper_##name(t1, cpu_env, t0);                       \
1011    set_cpu_vsr(xT(ctx->opcode), t1, true);                   \
1012    set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
1013    tcg_temp_free_i64(t0);                                    \
1014    tcg_temp_free_i64(t1);                                    \
1015}
1016
1017GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1018GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1019GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1020GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1021GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1022GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1023GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1024GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1025GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1026GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1027GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1028GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1029GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1030GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1031GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1032GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1033GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1034GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1035GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1036GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1037GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1038GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1039GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1040GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1041GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1042GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1043GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1044GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1045GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1046GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1047GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1048GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1049GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1050GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1051GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1052GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1053GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1054GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1055GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1056GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1057GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1058GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1059GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1060GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1061GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1062GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1063GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1064GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1065GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1066GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1067GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1068GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1069GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1070GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1071GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1072GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1073GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1074GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1075GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1076GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1077GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1078
1079GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1080GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1081GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1082GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1083GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1084GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1085GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1086GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1087GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1088GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1089GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1090GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1091GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1092GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1093GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1094GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1095GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1096GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1097GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1098GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1099GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1100GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1101GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1102GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1103GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1104
1105GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1106GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1107GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1108GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1109GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1110GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1111GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1112GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1113GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1114GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1115GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1116GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1117GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1118GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1119GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1120GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1121GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1122GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1123GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1124GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1125GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1126GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1127GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1128GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1129GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1130GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1131GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1132GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1133GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1134
1135static bool trans_XXPERM(DisasContext *ctx, arg_XX3 *a)
1136{
1137    TCGv_ptr xt, xa, xb;
1138
1139    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1140    REQUIRE_VSX(ctx);
1141
1142    xt = gen_vsr_ptr(a->xt);
1143    xa = gen_vsr_ptr(a->xa);
1144    xb = gen_vsr_ptr(a->xb);
1145
1146    gen_helper_VPERM(xt, xa, xt, xb);
1147
1148    tcg_temp_free_ptr(xt);
1149    tcg_temp_free_ptr(xa);
1150    tcg_temp_free_ptr(xb);
1151
1152    return true;
1153}
1154
1155static bool trans_XXPERMR(DisasContext *ctx, arg_XX3 *a)
1156{
1157    TCGv_ptr xt, xa, xb;
1158
1159    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1160    REQUIRE_VSX(ctx);
1161
1162    xt = gen_vsr_ptr(a->xt);
1163    xa = gen_vsr_ptr(a->xa);
1164    xb = gen_vsr_ptr(a->xb);
1165
1166    gen_helper_VPERMR(xt, xa, xt, xb);
1167
1168    tcg_temp_free_ptr(xt);
1169    tcg_temp_free_ptr(xa);
1170    tcg_temp_free_ptr(xb);
1171
1172    return true;
1173}
1174
1175static bool trans_XXPERMDI(DisasContext *ctx, arg_XX3_dm *a)
1176{
1177    TCGv_i64 t0, t1;
1178
1179    REQUIRE_INSNS_FLAGS2(ctx, VSX);
1180    REQUIRE_VSX(ctx);
1181
1182    t0 = tcg_temp_new_i64();
1183
1184    if (unlikely(a->xt == a->xa || a->xt == a->xb)) {
1185        t1 = tcg_temp_new_i64();
1186
1187        get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1188        get_cpu_vsr(t1, a->xb, (a->dm & 1) == 0);
1189
1190        set_cpu_vsr(a->xt, t0, true);
1191        set_cpu_vsr(a->xt, t1, false);
1192
1193        tcg_temp_free_i64(t1);
1194    } else {
1195        get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1196        set_cpu_vsr(a->xt, t0, true);
1197
1198        get_cpu_vsr(t0, a->xb, (a->dm & 1) == 0);
1199        set_cpu_vsr(a->xt, t0, false);
1200    }
1201
1202    tcg_temp_free_i64(t0);
1203
1204    return true;
1205}
1206
1207static bool trans_XXPERMX(DisasContext *ctx, arg_8RR_XX4_uim3 *a)
1208{
1209    TCGv_ptr xt, xa, xb, xc;
1210
1211    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1212    REQUIRE_VSX(ctx);
1213
1214    xt = gen_vsr_ptr(a->xt);
1215    xa = gen_vsr_ptr(a->xa);
1216    xb = gen_vsr_ptr(a->xb);
1217    xc = gen_vsr_ptr(a->xc);
1218
1219    gen_helper_XXPERMX(xt, xa, xb, xc, tcg_constant_tl(a->uim3));
1220
1221    tcg_temp_free_ptr(xt);
1222    tcg_temp_free_ptr(xa);
1223    tcg_temp_free_ptr(xb);
1224    tcg_temp_free_ptr(xc);
1225
1226    return true;
1227}
1228
1229typedef void (*xxgenpcv_genfn)(TCGv_ptr, TCGv_ptr);
1230
1231static bool do_xxgenpcv(DisasContext *ctx, arg_X_imm5 *a,
1232                        const xxgenpcv_genfn fn[4])
1233{
1234    TCGv_ptr xt, vrb;
1235
1236    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1237    REQUIRE_VSX(ctx);
1238
1239    if (a->imm & ~0x3) {
1240        gen_invalid(ctx);
1241        return true;
1242    }
1243
1244    xt = gen_vsr_ptr(a->xt);
1245    vrb = gen_avr_ptr(a->vrb);
1246
1247    fn[a->imm](xt, vrb);
1248
1249    tcg_temp_free_ptr(xt);
1250    tcg_temp_free_ptr(vrb);
1251
1252    return true;
1253}
1254
1255#define XXGENPCV(NAME) \
1256    static bool trans_##NAME(DisasContext *ctx, arg_X_imm5 *a)  \
1257    {                                                           \
1258        static const xxgenpcv_genfn fn[4] = {                   \
1259            gen_helper_##NAME##_be_exp,                         \
1260            gen_helper_##NAME##_be_comp,                        \
1261            gen_helper_##NAME##_le_exp,                         \
1262            gen_helper_##NAME##_le_comp,                        \
1263        };                                                      \
1264        return do_xxgenpcv(ctx, a, fn);                         \
1265    }
1266
1267XXGENPCV(XXGENPCVBM)
1268XXGENPCV(XXGENPCVHM)
1269XXGENPCV(XXGENPCVWM)
1270XXGENPCV(XXGENPCVDM)
1271#undef XXGENPCV
1272
1273static bool do_xsmadd(DisasContext *ctx, int tgt, int src1, int src2, int src3,
1274        void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1275{
1276    TCGv_ptr t, s1, s2, s3;
1277
1278    t = gen_vsr_ptr(tgt);
1279    s1 = gen_vsr_ptr(src1);
1280    s2 = gen_vsr_ptr(src2);
1281    s3 = gen_vsr_ptr(src3);
1282
1283    gen_helper(cpu_env, t, s1, s2, s3);
1284
1285    tcg_temp_free_ptr(t);
1286    tcg_temp_free_ptr(s1);
1287    tcg_temp_free_ptr(s2);
1288    tcg_temp_free_ptr(s3);
1289
1290    return true;
1291}
1292
1293static bool do_xsmadd_XX3(DisasContext *ctx, arg_XX3 *a, bool type_a,
1294        void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1295{
1296    REQUIRE_VSX(ctx);
1297
1298    if (type_a) {
1299        return do_xsmadd(ctx, a->xt, a->xa, a->xt, a->xb, gen_helper);
1300    }
1301    return do_xsmadd(ctx, a->xt, a->xa, a->xb, a->xt, gen_helper);
1302}
1303
1304TRANS_FLAGS2(VSX, XSMADDADP, do_xsmadd_XX3, true, gen_helper_XSMADDDP)
1305TRANS_FLAGS2(VSX, XSMADDMDP, do_xsmadd_XX3, false, gen_helper_XSMADDDP)
1306TRANS_FLAGS2(VSX, XSMSUBADP, do_xsmadd_XX3, true, gen_helper_XSMSUBDP)
1307TRANS_FLAGS2(VSX, XSMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSMSUBDP)
1308TRANS_FLAGS2(VSX, XSNMADDADP, do_xsmadd_XX3, true, gen_helper_XSNMADDDP)
1309TRANS_FLAGS2(VSX, XSNMADDMDP, do_xsmadd_XX3, false, gen_helper_XSNMADDDP)
1310TRANS_FLAGS2(VSX, XSNMSUBADP, do_xsmadd_XX3, true, gen_helper_XSNMSUBDP)
1311TRANS_FLAGS2(VSX, XSNMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSNMSUBDP)
1312TRANS_FLAGS2(VSX207, XSMADDASP, do_xsmadd_XX3, true, gen_helper_XSMADDSP)
1313TRANS_FLAGS2(VSX207, XSMADDMSP, do_xsmadd_XX3, false, gen_helper_XSMADDSP)
1314TRANS_FLAGS2(VSX207, XSMSUBASP, do_xsmadd_XX3, true, gen_helper_XSMSUBSP)
1315TRANS_FLAGS2(VSX207, XSMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSMSUBSP)
1316TRANS_FLAGS2(VSX207, XSNMADDASP, do_xsmadd_XX3, true, gen_helper_XSNMADDSP)
1317TRANS_FLAGS2(VSX207, XSNMADDMSP, do_xsmadd_XX3, false, gen_helper_XSNMADDSP)
1318TRANS_FLAGS2(VSX207, XSNMSUBASP, do_xsmadd_XX3, true, gen_helper_XSNMSUBSP)
1319TRANS_FLAGS2(VSX207, XSNMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSNMSUBSP)
1320
1321static bool do_xsmadd_X(DisasContext *ctx, arg_X_rc *a,
1322        void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr),
1323        void (*gen_helper_ro)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1324{
1325    int vrt, vra, vrb;
1326
1327    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1328    REQUIRE_VSX(ctx);
1329
1330    vrt = a->rt + 32;
1331    vra = a->ra + 32;
1332    vrb = a->rb + 32;
1333
1334    if (a->rc) {
1335        return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper_ro);
1336    }
1337
1338    return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper);
1339}
1340
1341TRANS(XSMADDQP, do_xsmadd_X, gen_helper_XSMADDQP, gen_helper_XSMADDQPO)
1342TRANS(XSMSUBQP, do_xsmadd_X, gen_helper_XSMSUBQP, gen_helper_XSMSUBQPO)
1343TRANS(XSNMADDQP, do_xsmadd_X, gen_helper_XSNMADDQP, gen_helper_XSNMADDQPO)
1344TRANS(XSNMSUBQP, do_xsmadd_X, gen_helper_XSNMSUBQP, gen_helper_XSNMSUBQPO)
1345
1346#define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type)             \
1347static void gen_##name(DisasContext *ctx)                                     \
1348{                                                                             \
1349    TCGv_ptr xt, s1, s2, s3;                                                  \
1350    if (unlikely(!ctx->vsx_enabled)) {                                        \
1351        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1352        return;                                                               \
1353    }                                                                         \
1354    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1355    s1 = gen_vsr_ptr(xA(ctx->opcode));                                        \
1356    if (ctx->opcode & PPC_BIT32(25)) {                                        \
1357        /*                                                                    \
1358         * AxT + B                                                            \
1359         */                                                                   \
1360        s2 = gen_vsr_ptr(xB(ctx->opcode));                                    \
1361        s3 = gen_vsr_ptr(xT(ctx->opcode));                                    \
1362    } else {                                                                  \
1363        /*                                                                    \
1364         * AxB + T                                                            \
1365         */                                                                   \
1366        s2 = gen_vsr_ptr(xT(ctx->opcode));                                    \
1367        s3 = gen_vsr_ptr(xB(ctx->opcode));                                    \
1368    }                                                                         \
1369    gen_helper_##name(cpu_env, xt, s1, s2, s3);                               \
1370    tcg_temp_free_ptr(xt);                                                    \
1371    tcg_temp_free_ptr(s1);                                                    \
1372    tcg_temp_free_ptr(s2);                                                    \
1373    tcg_temp_free_ptr(s3);                                                    \
1374}
1375
1376GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1377GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1378GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1379GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1380GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1381GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1382GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1383GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1384
1385static void gen_xxbrd(DisasContext *ctx)
1386{
1387    TCGv_i64 xth;
1388    TCGv_i64 xtl;
1389    TCGv_i64 xbh;
1390    TCGv_i64 xbl;
1391
1392    if (unlikely(!ctx->vsx_enabled)) {
1393        gen_exception(ctx, POWERPC_EXCP_VSXU);
1394        return;
1395    }
1396    xth = tcg_temp_new_i64();
1397    xtl = tcg_temp_new_i64();
1398    xbh = tcg_temp_new_i64();
1399    xbl = tcg_temp_new_i64();
1400    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1401    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1402
1403    tcg_gen_bswap64_i64(xth, xbh);
1404    tcg_gen_bswap64_i64(xtl, xbl);
1405    set_cpu_vsr(xT(ctx->opcode), xth, true);
1406    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1407
1408    tcg_temp_free_i64(xth);
1409    tcg_temp_free_i64(xtl);
1410    tcg_temp_free_i64(xbh);
1411    tcg_temp_free_i64(xbl);
1412}
1413
1414static void gen_xxbrh(DisasContext *ctx)
1415{
1416    TCGv_i64 xth;
1417    TCGv_i64 xtl;
1418    TCGv_i64 xbh;
1419    TCGv_i64 xbl;
1420
1421    if (unlikely(!ctx->vsx_enabled)) {
1422        gen_exception(ctx, POWERPC_EXCP_VSXU);
1423        return;
1424    }
1425    xth = tcg_temp_new_i64();
1426    xtl = tcg_temp_new_i64();
1427    xbh = tcg_temp_new_i64();
1428    xbl = tcg_temp_new_i64();
1429    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1430    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1431
1432    gen_bswap16x8(xth, xtl, xbh, xbl);
1433    set_cpu_vsr(xT(ctx->opcode), xth, true);
1434    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1435
1436    tcg_temp_free_i64(xth);
1437    tcg_temp_free_i64(xtl);
1438    tcg_temp_free_i64(xbh);
1439    tcg_temp_free_i64(xbl);
1440}
1441
1442static void gen_xxbrq(DisasContext *ctx)
1443{
1444    TCGv_i64 xth;
1445    TCGv_i64 xtl;
1446    TCGv_i64 xbh;
1447    TCGv_i64 xbl;
1448    TCGv_i64 t0;
1449
1450    if (unlikely(!ctx->vsx_enabled)) {
1451        gen_exception(ctx, POWERPC_EXCP_VSXU);
1452        return;
1453    }
1454    xth = tcg_temp_new_i64();
1455    xtl = tcg_temp_new_i64();
1456    xbh = tcg_temp_new_i64();
1457    xbl = tcg_temp_new_i64();
1458    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1459    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1460    t0 = tcg_temp_new_i64();
1461
1462    tcg_gen_bswap64_i64(t0, xbl);
1463    tcg_gen_bswap64_i64(xtl, xbh);
1464    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1465    tcg_gen_mov_i64(xth, t0);
1466    set_cpu_vsr(xT(ctx->opcode), xth, true);
1467
1468    tcg_temp_free_i64(t0);
1469    tcg_temp_free_i64(xth);
1470    tcg_temp_free_i64(xtl);
1471    tcg_temp_free_i64(xbh);
1472    tcg_temp_free_i64(xbl);
1473}
1474
1475static void gen_xxbrw(DisasContext *ctx)
1476{
1477    TCGv_i64 xth;
1478    TCGv_i64 xtl;
1479    TCGv_i64 xbh;
1480    TCGv_i64 xbl;
1481
1482    if (unlikely(!ctx->vsx_enabled)) {
1483        gen_exception(ctx, POWERPC_EXCP_VSXU);
1484        return;
1485    }
1486    xth = tcg_temp_new_i64();
1487    xtl = tcg_temp_new_i64();
1488    xbh = tcg_temp_new_i64();
1489    xbl = tcg_temp_new_i64();
1490    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1491    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1492
1493    gen_bswap32x4(xth, xtl, xbh, xbl);
1494    set_cpu_vsr(xT(ctx->opcode), xth, true);
1495    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1496
1497    tcg_temp_free_i64(xth);
1498    tcg_temp_free_i64(xtl);
1499    tcg_temp_free_i64(xbh);
1500    tcg_temp_free_i64(xbl);
1501}
1502
1503#define VSX_LOGICAL(name, vece, tcg_op)                              \
1504static void glue(gen_, name)(DisasContext *ctx)                      \
1505    {                                                                \
1506        if (unlikely(!ctx->vsx_enabled)) {                           \
1507            gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
1508            return;                                                  \
1509        }                                                            \
1510        tcg_op(vece, vsr_full_offset(xT(ctx->opcode)),               \
1511               vsr_full_offset(xA(ctx->opcode)),                     \
1512               vsr_full_offset(xB(ctx->opcode)), 16, 16);            \
1513    }
1514
1515VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1516VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1517VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1518VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1519VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1520VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1521VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1522VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1523
1524#define VSX_XXMRG(name, high)                               \
1525static void glue(gen_, name)(DisasContext *ctx)             \
1526    {                                                       \
1527        TCGv_i64 a0, a1, b0, b1, tmp;                       \
1528        if (unlikely(!ctx->vsx_enabled)) {                  \
1529            gen_exception(ctx, POWERPC_EXCP_VSXU);          \
1530            return;                                         \
1531        }                                                   \
1532        a0 = tcg_temp_new_i64();                            \
1533        a1 = tcg_temp_new_i64();                            \
1534        b0 = tcg_temp_new_i64();                            \
1535        b1 = tcg_temp_new_i64();                            \
1536        tmp = tcg_temp_new_i64();                           \
1537        get_cpu_vsr(a0, xA(ctx->opcode), high);             \
1538        get_cpu_vsr(a1, xA(ctx->opcode), high);             \
1539        get_cpu_vsr(b0, xB(ctx->opcode), high);             \
1540        get_cpu_vsr(b1, xB(ctx->opcode), high);             \
1541        tcg_gen_shri_i64(a0, a0, 32);                       \
1542        tcg_gen_shri_i64(b0, b0, 32);                       \
1543        tcg_gen_deposit_i64(tmp, b0, a0, 32, 32);           \
1544        set_cpu_vsr(xT(ctx->opcode), tmp, true);            \
1545        tcg_gen_deposit_i64(tmp, b1, a1, 32, 32);           \
1546        set_cpu_vsr(xT(ctx->opcode), tmp, false);           \
1547        tcg_temp_free_i64(a0);                              \
1548        tcg_temp_free_i64(a1);                              \
1549        tcg_temp_free_i64(b0);                              \
1550        tcg_temp_free_i64(b1);                              \
1551        tcg_temp_free_i64(tmp);                             \
1552    }
1553
1554VSX_XXMRG(xxmrghw, 1)
1555VSX_XXMRG(xxmrglw, 0)
1556
1557static bool trans_XXSEL(DisasContext *ctx, arg_XX4 *a)
1558{
1559    REQUIRE_INSNS_FLAGS2(ctx, VSX);
1560    REQUIRE_VSX(ctx);
1561
1562    tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(a->xt), vsr_full_offset(a->xc),
1563                        vsr_full_offset(a->xb), vsr_full_offset(a->xa), 16, 16);
1564
1565    return true;
1566}
1567
1568static bool trans_XXSPLTW(DisasContext *ctx, arg_XX2_uim2 *a)
1569{
1570    int tofs, bofs;
1571
1572    REQUIRE_VSX(ctx);
1573
1574    tofs = vsr_full_offset(a->xt);
1575    bofs = vsr_full_offset(a->xb);
1576    bofs += a->uim << MO_32;
1577#if !HOST_BIG_ENDIAN
1578    bofs ^= 8 | 4;
1579#endif
1580
1581    tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1582    return true;
1583}
1584
1585#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1586
1587static bool trans_XXSPLTIB(DisasContext *ctx, arg_X_imm8 *a)
1588{
1589    if (a->xt < 32) {
1590        REQUIRE_VSX(ctx);
1591    } else {
1592        REQUIRE_VECTOR(ctx);
1593    }
1594    tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(a->xt), 16, 16, a->imm);
1595    return true;
1596}
1597
1598static bool trans_XXSPLTIW(DisasContext *ctx, arg_8RR_D *a)
1599{
1600    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1601    REQUIRE_VSX(ctx);
1602
1603    tcg_gen_gvec_dup_imm(MO_32, vsr_full_offset(a->xt), 16, 16, a->si);
1604
1605    return true;
1606}
1607
1608static bool trans_XXSPLTIDP(DisasContext *ctx, arg_8RR_D *a)
1609{
1610    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1611    REQUIRE_VSX(ctx);
1612
1613    tcg_gen_gvec_dup_imm(MO_64, vsr_full_offset(a->xt), 16, 16,
1614                         helper_todouble(a->si));
1615    return true;
1616}
1617
1618static bool trans_XXSPLTI32DX(DisasContext *ctx, arg_8RR_D_IX *a)
1619{
1620    TCGv_i32 imm;
1621
1622    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1623    REQUIRE_VSX(ctx);
1624
1625    imm = tcg_constant_i32(a->si);
1626
1627    tcg_gen_st_i32(imm, cpu_env,
1628        offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix)));
1629    tcg_gen_st_i32(imm, cpu_env,
1630        offsetof(CPUPPCState, vsr[a->xt].VsrW(2 + a->ix)));
1631
1632    return true;
1633}
1634
1635static bool trans_LXVKQ(DisasContext *ctx, arg_X_uim5 *a)
1636{
1637    static const uint64_t values[32] = {
1638        0, /* Unspecified */
1639        0x3FFF000000000000llu, /* QP +1.0 */
1640        0x4000000000000000llu, /* QP +2.0 */
1641        0x4000800000000000llu, /* QP +3.0 */
1642        0x4001000000000000llu, /* QP +4.0 */
1643        0x4001400000000000llu, /* QP +5.0 */
1644        0x4001800000000000llu, /* QP +6.0 */
1645        0x4001C00000000000llu, /* QP +7.0 */
1646        0x7FFF000000000000llu, /* QP +Inf */
1647        0x7FFF800000000000llu, /* QP dQNaN */
1648        0, /* Unspecified */
1649        0, /* Unspecified */
1650        0, /* Unspecified */
1651        0, /* Unspecified */
1652        0, /* Unspecified */
1653        0, /* Unspecified */
1654        0x8000000000000000llu, /* QP -0.0 */
1655        0xBFFF000000000000llu, /* QP -1.0 */
1656        0xC000000000000000llu, /* QP -2.0 */
1657        0xC000800000000000llu, /* QP -3.0 */
1658        0xC001000000000000llu, /* QP -4.0 */
1659        0xC001400000000000llu, /* QP -5.0 */
1660        0xC001800000000000llu, /* QP -6.0 */
1661        0xC001C00000000000llu, /* QP -7.0 */
1662        0xFFFF000000000000llu, /* QP -Inf */
1663    };
1664
1665    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1666    REQUIRE_VSX(ctx);
1667
1668    if (values[a->uim]) {
1669        set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false);
1670        set_cpu_vsr(a->xt, tcg_constant_i64(values[a->uim]), true);
1671    } else {
1672        gen_invalid(ctx);
1673    }
1674
1675    return true;
1676}
1677
1678static bool trans_XVTLSBB(DisasContext *ctx, arg_XX2_bf_xb *a)
1679{
1680    TCGv_i64 xb, t0, t1, all_true, all_false, mask, zero;
1681
1682    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1683    REQUIRE_VSX(ctx);
1684
1685    xb = tcg_temp_new_i64();
1686    t0 = tcg_temp_new_i64();
1687    t1 = tcg_temp_new_i64();
1688    all_true = tcg_temp_new_i64();
1689    all_false = tcg_temp_new_i64();
1690    mask = tcg_constant_i64(dup_const(MO_8, 1));
1691    zero = tcg_constant_i64(0);
1692
1693    get_cpu_vsr(xb, a->xb, true);
1694    tcg_gen_and_i64(t0, mask, xb);
1695    get_cpu_vsr(xb, a->xb, false);
1696    tcg_gen_and_i64(t1, mask, xb);
1697
1698    tcg_gen_or_i64(all_false, t0, t1);
1699    tcg_gen_and_i64(all_true, t0, t1);
1700
1701    tcg_gen_setcond_i64(TCG_COND_EQ, all_false, all_false, zero);
1702    tcg_gen_shli_i64(all_false, all_false, 1);
1703    tcg_gen_setcond_i64(TCG_COND_EQ, all_true, all_true, mask);
1704    tcg_gen_shli_i64(all_true, all_true, 3);
1705
1706    tcg_gen_or_i64(t0, all_false, all_true);
1707    tcg_gen_extrl_i64_i32(cpu_crf[a->bf], t0);
1708
1709    tcg_temp_free_i64(xb);
1710    tcg_temp_free_i64(t0);
1711    tcg_temp_free_i64(t1);
1712    tcg_temp_free_i64(all_true);
1713    tcg_temp_free_i64(all_false);
1714
1715    return true;
1716}
1717
1718static void gen_xxsldwi(DisasContext *ctx)
1719{
1720    TCGv_i64 xth, xtl;
1721    if (unlikely(!ctx->vsx_enabled)) {
1722        gen_exception(ctx, POWERPC_EXCP_VSXU);
1723        return;
1724    }
1725    xth = tcg_temp_new_i64();
1726    xtl = tcg_temp_new_i64();
1727
1728    switch (SHW(ctx->opcode)) {
1729        case 0: {
1730            get_cpu_vsr(xth, xA(ctx->opcode), true);
1731            get_cpu_vsr(xtl, xA(ctx->opcode), false);
1732            break;
1733        }
1734        case 1: {
1735            TCGv_i64 t0 = tcg_temp_new_i64();
1736            get_cpu_vsr(xth, xA(ctx->opcode), true);
1737            tcg_gen_shli_i64(xth, xth, 32);
1738            get_cpu_vsr(t0, xA(ctx->opcode), false);
1739            tcg_gen_shri_i64(t0, t0, 32);
1740            tcg_gen_or_i64(xth, xth, t0);
1741            get_cpu_vsr(xtl, xA(ctx->opcode), false);
1742            tcg_gen_shli_i64(xtl, xtl, 32);
1743            get_cpu_vsr(t0, xB(ctx->opcode), true);
1744            tcg_gen_shri_i64(t0, t0, 32);
1745            tcg_gen_or_i64(xtl, xtl, t0);
1746            tcg_temp_free_i64(t0);
1747            break;
1748        }
1749        case 2: {
1750            get_cpu_vsr(xth, xA(ctx->opcode), false);
1751            get_cpu_vsr(xtl, xB(ctx->opcode), true);
1752            break;
1753        }
1754        case 3: {
1755            TCGv_i64 t0 = tcg_temp_new_i64();
1756            get_cpu_vsr(xth, xA(ctx->opcode), false);
1757            tcg_gen_shli_i64(xth, xth, 32);
1758            get_cpu_vsr(t0, xB(ctx->opcode), true);
1759            tcg_gen_shri_i64(t0, t0, 32);
1760            tcg_gen_or_i64(xth, xth, t0);
1761            get_cpu_vsr(xtl, xB(ctx->opcode), true);
1762            tcg_gen_shli_i64(xtl, xtl, 32);
1763            get_cpu_vsr(t0, xB(ctx->opcode), false);
1764            tcg_gen_shri_i64(t0, t0, 32);
1765            tcg_gen_or_i64(xtl, xtl, t0);
1766            tcg_temp_free_i64(t0);
1767            break;
1768        }
1769    }
1770
1771    set_cpu_vsr(xT(ctx->opcode), xth, true);
1772    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1773
1774    tcg_temp_free_i64(xth);
1775    tcg_temp_free_i64(xtl);
1776}
1777
1778#define VSX_EXTRACT_INSERT(name)                                \
1779static void gen_##name(DisasContext *ctx)                       \
1780{                                                               \
1781    TCGv_ptr xt, xb;                                            \
1782    TCGv_i32 t0;                                                \
1783    TCGv_i64 t1;                                                \
1784    uint8_t uimm = UIMM4(ctx->opcode);                          \
1785                                                                \
1786    if (unlikely(!ctx->vsx_enabled)) {                          \
1787        gen_exception(ctx, POWERPC_EXCP_VSXU);                  \
1788        return;                                                 \
1789    }                                                           \
1790    xt = gen_vsr_ptr(xT(ctx->opcode));                          \
1791    xb = gen_vsr_ptr(xB(ctx->opcode));                          \
1792    t0 = tcg_temp_new_i32();                                    \
1793    t1 = tcg_temp_new_i64();                                    \
1794    /*                                                          \
1795     * uimm > 15 out of bound and for                           \
1796     * uimm > 12 handle as per hardware in helper               \
1797     */                                                         \
1798    if (uimm > 15) {                                            \
1799        tcg_gen_movi_i64(t1, 0);                                \
1800        set_cpu_vsr(xT(ctx->opcode), t1, true);                 \
1801        set_cpu_vsr(xT(ctx->opcode), t1, false);                \
1802        return;                                                 \
1803    }                                                           \
1804    tcg_gen_movi_i32(t0, uimm);                                 \
1805    gen_helper_##name(cpu_env, xt, xb, t0);                     \
1806    tcg_temp_free_ptr(xb);                                      \
1807    tcg_temp_free_ptr(xt);                                      \
1808    tcg_temp_free_i32(t0);                                      \
1809    tcg_temp_free_i64(t1);                                      \
1810}
1811
1812VSX_EXTRACT_INSERT(xxextractuw)
1813VSX_EXTRACT_INSERT(xxinsertw)
1814
1815#ifdef TARGET_PPC64
1816static void gen_xsxexpdp(DisasContext *ctx)
1817{
1818    TCGv rt = cpu_gpr[rD(ctx->opcode)];
1819    TCGv_i64 t0;
1820    if (unlikely(!ctx->vsx_enabled)) {
1821        gen_exception(ctx, POWERPC_EXCP_VSXU);
1822        return;
1823    }
1824    t0 = tcg_temp_new_i64();
1825    get_cpu_vsr(t0, xB(ctx->opcode), true);
1826    tcg_gen_extract_i64(rt, t0, 52, 11);
1827    tcg_temp_free_i64(t0);
1828}
1829
1830static void gen_xsxexpqp(DisasContext *ctx)
1831{
1832    TCGv_i64 xth;
1833    TCGv_i64 xtl;
1834    TCGv_i64 xbh;
1835
1836    if (unlikely(!ctx->vsx_enabled)) {
1837        gen_exception(ctx, POWERPC_EXCP_VSXU);
1838        return;
1839    }
1840    xth = tcg_temp_new_i64();
1841    xtl = tcg_temp_new_i64();
1842    xbh = tcg_temp_new_i64();
1843    get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1844
1845    tcg_gen_extract_i64(xth, xbh, 48, 15);
1846    set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1847    tcg_gen_movi_i64(xtl, 0);
1848    set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1849
1850    tcg_temp_free_i64(xbh);
1851    tcg_temp_free_i64(xth);
1852    tcg_temp_free_i64(xtl);
1853}
1854
1855static void gen_xsiexpdp(DisasContext *ctx)
1856{
1857    TCGv_i64 xth;
1858    TCGv ra = cpu_gpr[rA(ctx->opcode)];
1859    TCGv rb = cpu_gpr[rB(ctx->opcode)];
1860    TCGv_i64 t0;
1861
1862    if (unlikely(!ctx->vsx_enabled)) {
1863        gen_exception(ctx, POWERPC_EXCP_VSXU);
1864        return;
1865    }
1866    t0 = tcg_temp_new_i64();
1867    xth = tcg_temp_new_i64();
1868    tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1869    tcg_gen_andi_i64(t0, rb, 0x7FF);
1870    tcg_gen_shli_i64(t0, t0, 52);
1871    tcg_gen_or_i64(xth, xth, t0);
1872    set_cpu_vsr(xT(ctx->opcode), xth, true);
1873    set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false);
1874    tcg_temp_free_i64(t0);
1875    tcg_temp_free_i64(xth);
1876}
1877
1878static void gen_xsiexpqp(DisasContext *ctx)
1879{
1880    TCGv_i64 xth;
1881    TCGv_i64 xtl;
1882    TCGv_i64 xah;
1883    TCGv_i64 xal;
1884    TCGv_i64 xbh;
1885    TCGv_i64 t0;
1886
1887    if (unlikely(!ctx->vsx_enabled)) {
1888        gen_exception(ctx, POWERPC_EXCP_VSXU);
1889        return;
1890    }
1891    xth = tcg_temp_new_i64();
1892    xtl = tcg_temp_new_i64();
1893    xah = tcg_temp_new_i64();
1894    xal = tcg_temp_new_i64();
1895    get_cpu_vsr(xah, rA(ctx->opcode) + 32, true);
1896    get_cpu_vsr(xal, rA(ctx->opcode) + 32, false);
1897    xbh = tcg_temp_new_i64();
1898    get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1899    t0 = tcg_temp_new_i64();
1900
1901    tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1902    tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1903    tcg_gen_shli_i64(t0, t0, 48);
1904    tcg_gen_or_i64(xth, xth, t0);
1905    set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1906    tcg_gen_mov_i64(xtl, xal);
1907    set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1908
1909    tcg_temp_free_i64(t0);
1910    tcg_temp_free_i64(xth);
1911    tcg_temp_free_i64(xtl);
1912    tcg_temp_free_i64(xah);
1913    tcg_temp_free_i64(xal);
1914    tcg_temp_free_i64(xbh);
1915}
1916
1917static void gen_xsxsigdp(DisasContext *ctx)
1918{
1919    TCGv rt = cpu_gpr[rD(ctx->opcode)];
1920    TCGv_i64 t0, t1, zr, nan, exp;
1921
1922    if (unlikely(!ctx->vsx_enabled)) {
1923        gen_exception(ctx, POWERPC_EXCP_VSXU);
1924        return;
1925    }
1926    exp = tcg_temp_new_i64();
1927    t0 = tcg_temp_new_i64();
1928    t1 = tcg_temp_new_i64();
1929    zr = tcg_const_i64(0);
1930    nan = tcg_const_i64(2047);
1931
1932    get_cpu_vsr(t1, xB(ctx->opcode), true);
1933    tcg_gen_extract_i64(exp, t1, 52, 11);
1934    tcg_gen_movi_i64(t0, 0x0010000000000000);
1935    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1936    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1937    get_cpu_vsr(t1, xB(ctx->opcode), true);
1938    tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1939
1940    tcg_temp_free_i64(t0);
1941    tcg_temp_free_i64(t1);
1942    tcg_temp_free_i64(exp);
1943    tcg_temp_free_i64(zr);
1944    tcg_temp_free_i64(nan);
1945}
1946
1947static void gen_xsxsigqp(DisasContext *ctx)
1948{
1949    TCGv_i64 t0, zr, nan, exp;
1950    TCGv_i64 xth;
1951    TCGv_i64 xtl;
1952    TCGv_i64 xbh;
1953    TCGv_i64 xbl;
1954
1955    if (unlikely(!ctx->vsx_enabled)) {
1956        gen_exception(ctx, POWERPC_EXCP_VSXU);
1957        return;
1958    }
1959    xth = tcg_temp_new_i64();
1960    xtl = tcg_temp_new_i64();
1961    xbh = tcg_temp_new_i64();
1962    xbl = tcg_temp_new_i64();
1963    get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1964    get_cpu_vsr(xbl, rB(ctx->opcode) + 32, false);
1965    exp = tcg_temp_new_i64();
1966    t0 = tcg_temp_new_i64();
1967    zr = tcg_const_i64(0);
1968    nan = tcg_const_i64(32767);
1969
1970    tcg_gen_extract_i64(exp, xbh, 48, 15);
1971    tcg_gen_movi_i64(t0, 0x0001000000000000);
1972    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1973    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1974    tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1975    set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1976    tcg_gen_mov_i64(xtl, xbl);
1977    set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1978
1979    tcg_temp_free_i64(t0);
1980    tcg_temp_free_i64(exp);
1981    tcg_temp_free_i64(zr);
1982    tcg_temp_free_i64(nan);
1983    tcg_temp_free_i64(xth);
1984    tcg_temp_free_i64(xtl);
1985    tcg_temp_free_i64(xbh);
1986    tcg_temp_free_i64(xbl);
1987}
1988#endif
1989
1990static void gen_xviexpsp(DisasContext *ctx)
1991{
1992    TCGv_i64 xth;
1993    TCGv_i64 xtl;
1994    TCGv_i64 xah;
1995    TCGv_i64 xal;
1996    TCGv_i64 xbh;
1997    TCGv_i64 xbl;
1998    TCGv_i64 t0;
1999
2000    if (unlikely(!ctx->vsx_enabled)) {
2001        gen_exception(ctx, POWERPC_EXCP_VSXU);
2002        return;
2003    }
2004    xth = tcg_temp_new_i64();
2005    xtl = tcg_temp_new_i64();
2006    xah = tcg_temp_new_i64();
2007    xal = tcg_temp_new_i64();
2008    xbh = tcg_temp_new_i64();
2009    xbl = tcg_temp_new_i64();
2010    get_cpu_vsr(xah, xA(ctx->opcode), true);
2011    get_cpu_vsr(xal, xA(ctx->opcode), false);
2012    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2013    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2014    t0 = tcg_temp_new_i64();
2015
2016    tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
2017    tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
2018    tcg_gen_shli_i64(t0, t0, 23);
2019    tcg_gen_or_i64(xth, xth, t0);
2020    set_cpu_vsr(xT(ctx->opcode), xth, true);
2021    tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
2022    tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
2023    tcg_gen_shli_i64(t0, t0, 23);
2024    tcg_gen_or_i64(xtl, xtl, t0);
2025    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2026
2027    tcg_temp_free_i64(t0);
2028    tcg_temp_free_i64(xth);
2029    tcg_temp_free_i64(xtl);
2030    tcg_temp_free_i64(xah);
2031    tcg_temp_free_i64(xal);
2032    tcg_temp_free_i64(xbh);
2033    tcg_temp_free_i64(xbl);
2034}
2035
2036static void gen_xviexpdp(DisasContext *ctx)
2037{
2038    TCGv_i64 xth;
2039    TCGv_i64 xtl;
2040    TCGv_i64 xah;
2041    TCGv_i64 xal;
2042    TCGv_i64 xbh;
2043    TCGv_i64 xbl;
2044
2045    if (unlikely(!ctx->vsx_enabled)) {
2046        gen_exception(ctx, POWERPC_EXCP_VSXU);
2047        return;
2048    }
2049    xth = tcg_temp_new_i64();
2050    xtl = tcg_temp_new_i64();
2051    xah = tcg_temp_new_i64();
2052    xal = tcg_temp_new_i64();
2053    xbh = tcg_temp_new_i64();
2054    xbl = tcg_temp_new_i64();
2055    get_cpu_vsr(xah, xA(ctx->opcode), true);
2056    get_cpu_vsr(xal, xA(ctx->opcode), false);
2057    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2058    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2059
2060    tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
2061    set_cpu_vsr(xT(ctx->opcode), xth, true);
2062
2063    tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
2064    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2065
2066    tcg_temp_free_i64(xth);
2067    tcg_temp_free_i64(xtl);
2068    tcg_temp_free_i64(xah);
2069    tcg_temp_free_i64(xal);
2070    tcg_temp_free_i64(xbh);
2071    tcg_temp_free_i64(xbl);
2072}
2073
2074static void gen_xvxexpsp(DisasContext *ctx)
2075{
2076    TCGv_i64 xth;
2077    TCGv_i64 xtl;
2078    TCGv_i64 xbh;
2079    TCGv_i64 xbl;
2080
2081    if (unlikely(!ctx->vsx_enabled)) {
2082        gen_exception(ctx, POWERPC_EXCP_VSXU);
2083        return;
2084    }
2085    xth = tcg_temp_new_i64();
2086    xtl = tcg_temp_new_i64();
2087    xbh = tcg_temp_new_i64();
2088    xbl = tcg_temp_new_i64();
2089    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2090    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2091
2092    tcg_gen_shri_i64(xth, xbh, 23);
2093    tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
2094    set_cpu_vsr(xT(ctx->opcode), xth, true);
2095    tcg_gen_shri_i64(xtl, xbl, 23);
2096    tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
2097    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2098
2099    tcg_temp_free_i64(xth);
2100    tcg_temp_free_i64(xtl);
2101    tcg_temp_free_i64(xbh);
2102    tcg_temp_free_i64(xbl);
2103}
2104
2105static void gen_xvxexpdp(DisasContext *ctx)
2106{
2107    TCGv_i64 xth;
2108    TCGv_i64 xtl;
2109    TCGv_i64 xbh;
2110    TCGv_i64 xbl;
2111
2112    if (unlikely(!ctx->vsx_enabled)) {
2113        gen_exception(ctx, POWERPC_EXCP_VSXU);
2114        return;
2115    }
2116    xth = tcg_temp_new_i64();
2117    xtl = tcg_temp_new_i64();
2118    xbh = tcg_temp_new_i64();
2119    xbl = tcg_temp_new_i64();
2120    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2121    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2122
2123    tcg_gen_extract_i64(xth, xbh, 52, 11);
2124    set_cpu_vsr(xT(ctx->opcode), xth, true);
2125    tcg_gen_extract_i64(xtl, xbl, 52, 11);
2126    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2127
2128    tcg_temp_free_i64(xth);
2129    tcg_temp_free_i64(xtl);
2130    tcg_temp_free_i64(xbh);
2131    tcg_temp_free_i64(xbl);
2132}
2133
2134GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
2135
2136static void gen_xvxsigdp(DisasContext *ctx)
2137{
2138    TCGv_i64 xth;
2139    TCGv_i64 xtl;
2140    TCGv_i64 xbh;
2141    TCGv_i64 xbl;
2142    TCGv_i64 t0, zr, nan, exp;
2143
2144    if (unlikely(!ctx->vsx_enabled)) {
2145        gen_exception(ctx, POWERPC_EXCP_VSXU);
2146        return;
2147    }
2148    xth = tcg_temp_new_i64();
2149    xtl = tcg_temp_new_i64();
2150    xbh = tcg_temp_new_i64();
2151    xbl = tcg_temp_new_i64();
2152    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2153    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2154    exp = tcg_temp_new_i64();
2155    t0 = tcg_temp_new_i64();
2156    zr = tcg_const_i64(0);
2157    nan = tcg_const_i64(2047);
2158
2159    tcg_gen_extract_i64(exp, xbh, 52, 11);
2160    tcg_gen_movi_i64(t0, 0x0010000000000000);
2161    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2162    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2163    tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
2164    set_cpu_vsr(xT(ctx->opcode), xth, true);
2165
2166    tcg_gen_extract_i64(exp, xbl, 52, 11);
2167    tcg_gen_movi_i64(t0, 0x0010000000000000);
2168    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2169    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2170    tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2171    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2172
2173    tcg_temp_free_i64(t0);
2174    tcg_temp_free_i64(exp);
2175    tcg_temp_free_i64(zr);
2176    tcg_temp_free_i64(nan);
2177    tcg_temp_free_i64(xth);
2178    tcg_temp_free_i64(xtl);
2179    tcg_temp_free_i64(xbh);
2180    tcg_temp_free_i64(xbl);
2181}
2182
2183static bool do_lstxv(DisasContext *ctx, int ra, TCGv displ,
2184                     int rt, bool store, bool paired)
2185{
2186    TCGv ea;
2187    TCGv_i64 xt;
2188    MemOp mop;
2189    int rt1, rt2;
2190
2191    xt = tcg_temp_new_i64();
2192
2193    mop = DEF_MEMOP(MO_UQ);
2194
2195    gen_set_access_type(ctx, ACCESS_INT);
2196    ea = do_ea_calc(ctx, ra, displ);
2197
2198    if (paired && ctx->le_mode) {
2199        rt1 = rt + 1;
2200        rt2 = rt;
2201    } else {
2202        rt1 = rt;
2203        rt2 = rt + 1;
2204    }
2205
2206    if (store) {
2207        get_cpu_vsr(xt, rt1, !ctx->le_mode);
2208        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2209        gen_addr_add(ctx, ea, ea, 8);
2210        get_cpu_vsr(xt, rt1, ctx->le_mode);
2211        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2212        if (paired) {
2213            gen_addr_add(ctx, ea, ea, 8);
2214            get_cpu_vsr(xt, rt2, !ctx->le_mode);
2215            tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2216            gen_addr_add(ctx, ea, ea, 8);
2217            get_cpu_vsr(xt, rt2, ctx->le_mode);
2218            tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2219        }
2220    } else {
2221        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2222        set_cpu_vsr(rt1, xt, !ctx->le_mode);
2223        gen_addr_add(ctx, ea, ea, 8);
2224        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2225        set_cpu_vsr(rt1, xt, ctx->le_mode);
2226        if (paired) {
2227            gen_addr_add(ctx, ea, ea, 8);
2228            tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2229            set_cpu_vsr(rt2, xt, !ctx->le_mode);
2230            gen_addr_add(ctx, ea, ea, 8);
2231            tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2232            set_cpu_vsr(rt2, xt, ctx->le_mode);
2233        }
2234    }
2235
2236    tcg_temp_free(ea);
2237    tcg_temp_free_i64(xt);
2238    return true;
2239}
2240
2241static bool do_lstxv_D(DisasContext *ctx, arg_D *a, bool store, bool paired)
2242{
2243    if (paired || a->rt >= 32) {
2244        REQUIRE_VSX(ctx);
2245    } else {
2246        REQUIRE_VECTOR(ctx);
2247    }
2248
2249    return do_lstxv(ctx, a->ra, tcg_constant_tl(a->si), a->rt, store, paired);
2250}
2251
2252static bool do_lstxv_PLS_D(DisasContext *ctx, arg_PLS_D *a,
2253                           bool store, bool paired)
2254{
2255    arg_D d;
2256    REQUIRE_VSX(ctx);
2257
2258    if (!resolve_PLS_D(ctx, &d, a)) {
2259        return true;
2260    }
2261
2262    return do_lstxv(ctx, d.ra, tcg_constant_tl(d.si), d.rt, store, paired);
2263}
2264
2265static bool do_lstxv_X(DisasContext *ctx, arg_X *a, bool store, bool paired)
2266{
2267    if (paired || a->rt >= 32) {
2268        REQUIRE_VSX(ctx);
2269    } else {
2270        REQUIRE_VECTOR(ctx);
2271    }
2272
2273    return do_lstxv(ctx, a->ra, cpu_gpr[a->rb], a->rt, store, paired);
2274}
2275
2276static bool do_lstxsd(DisasContext *ctx, int rt, int ra, TCGv displ, bool store)
2277{
2278    TCGv ea;
2279    TCGv_i64 xt;
2280    MemOp mop;
2281
2282    if (store) {
2283        REQUIRE_VECTOR(ctx);
2284    } else {
2285        REQUIRE_VSX(ctx);
2286    }
2287
2288    xt = tcg_temp_new_i64();
2289    mop = DEF_MEMOP(MO_UQ);
2290
2291    gen_set_access_type(ctx, ACCESS_INT);
2292    ea = do_ea_calc(ctx, ra, displ);
2293
2294    if (store) {
2295        get_cpu_vsr(xt, rt + 32, true);
2296        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2297    } else {
2298        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2299        set_cpu_vsr(rt + 32, xt, true);
2300        set_cpu_vsr(rt + 32, tcg_constant_i64(0), false);
2301    }
2302
2303    tcg_temp_free(ea);
2304    tcg_temp_free_i64(xt);
2305
2306    return true;
2307}
2308
2309static bool do_lstxsd_DS(DisasContext *ctx, arg_D *a, bool store)
2310{
2311    return do_lstxsd(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store);
2312}
2313
2314static bool do_plstxsd_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store)
2315{
2316    arg_D d;
2317
2318    if (!resolve_PLS_D(ctx, &d, a)) {
2319        return true;
2320    }
2321
2322    return do_lstxsd(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store);
2323}
2324
2325static bool do_lstxssp(DisasContext *ctx, int rt, int ra, TCGv displ, bool store)
2326{
2327    TCGv ea;
2328    TCGv_i64 xt;
2329
2330    REQUIRE_VECTOR(ctx);
2331
2332    xt = tcg_temp_new_i64();
2333
2334    gen_set_access_type(ctx, ACCESS_INT);
2335    ea = do_ea_calc(ctx, ra, displ);
2336
2337    if (store) {
2338        get_cpu_vsr(xt, rt + 32, true);
2339        gen_qemu_st32fs(ctx, xt, ea);
2340    } else {
2341        gen_qemu_ld32fs(ctx, xt, ea);
2342        set_cpu_vsr(rt + 32, xt, true);
2343        set_cpu_vsr(rt + 32, tcg_constant_i64(0), false);
2344    }
2345
2346    tcg_temp_free(ea);
2347    tcg_temp_free_i64(xt);
2348
2349    return true;
2350}
2351
2352static bool do_lstxssp_DS(DisasContext *ctx, arg_D *a, bool store)
2353{
2354    return do_lstxssp(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store);
2355}
2356
2357static bool do_plstxssp_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store)
2358{
2359    arg_D d;
2360
2361    if (!resolve_PLS_D(ctx, &d, a)) {
2362        return true;
2363    }
2364
2365    return do_lstxssp(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store);
2366}
2367
2368TRANS_FLAGS2(ISA300, LXSD, do_lstxsd_DS, false)
2369TRANS_FLAGS2(ISA300, STXSD, do_lstxsd_DS, true)
2370TRANS_FLAGS2(ISA300, LXSSP, do_lstxssp_DS, false)
2371TRANS_FLAGS2(ISA300, STXSSP, do_lstxssp_DS, true)
2372TRANS_FLAGS2(ISA300, STXV, do_lstxv_D, true, false)
2373TRANS_FLAGS2(ISA300, LXV, do_lstxv_D, false, false)
2374TRANS_FLAGS2(ISA310, STXVP, do_lstxv_D, true, true)
2375TRANS_FLAGS2(ISA310, LXVP, do_lstxv_D, false, true)
2376TRANS_FLAGS2(ISA300, STXVX, do_lstxv_X, true, false)
2377TRANS_FLAGS2(ISA300, LXVX, do_lstxv_X, false, false)
2378TRANS_FLAGS2(ISA310, STXVPX, do_lstxv_X, true, true)
2379TRANS_FLAGS2(ISA310, LXVPX, do_lstxv_X, false, true)
2380TRANS64_FLAGS2(ISA310, PLXSD, do_plstxsd_PLS_D, false)
2381TRANS64_FLAGS2(ISA310, PSTXSD, do_plstxsd_PLS_D, true)
2382TRANS64_FLAGS2(ISA310, PLXSSP, do_plstxssp_PLS_D, false)
2383TRANS64_FLAGS2(ISA310, PSTXSSP, do_plstxssp_PLS_D, true)
2384TRANS64_FLAGS2(ISA310, PSTXV, do_lstxv_PLS_D, true, false)
2385TRANS64_FLAGS2(ISA310, PLXV, do_lstxv_PLS_D, false, false)
2386TRANS64_FLAGS2(ISA310, PSTXVP, do_lstxv_PLS_D, true, true)
2387TRANS64_FLAGS2(ISA310, PLXVP, do_lstxv_PLS_D, false, true)
2388
2389static bool do_lstrm(DisasContext *ctx, arg_X *a, MemOp mop, bool store)
2390{
2391    TCGv ea;
2392    TCGv_i64 xt;
2393
2394    REQUIRE_VSX(ctx);
2395
2396    xt = tcg_temp_new_i64();
2397
2398    gen_set_access_type(ctx, ACCESS_INT);
2399    ea = do_ea_calc(ctx, a->ra , cpu_gpr[a->rb]);
2400
2401    if (store) {
2402        get_cpu_vsr(xt, a->rt, false);
2403        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2404    } else {
2405        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2406        set_cpu_vsr(a->rt, xt, false);
2407        set_cpu_vsr(a->rt, tcg_constant_i64(0), true);
2408    }
2409
2410    tcg_temp_free(ea);
2411    tcg_temp_free_i64(xt);
2412    return true;
2413}
2414
2415TRANS_FLAGS2(ISA310, LXVRBX, do_lstrm, DEF_MEMOP(MO_UB), false)
2416TRANS_FLAGS2(ISA310, LXVRHX, do_lstrm, DEF_MEMOP(MO_UW), false)
2417TRANS_FLAGS2(ISA310, LXVRWX, do_lstrm, DEF_MEMOP(MO_UL), false)
2418TRANS_FLAGS2(ISA310, LXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), false)
2419TRANS_FLAGS2(ISA310, STXVRBX, do_lstrm, DEF_MEMOP(MO_UB), true)
2420TRANS_FLAGS2(ISA310, STXVRHX, do_lstrm, DEF_MEMOP(MO_UW), true)
2421TRANS_FLAGS2(ISA310, STXVRWX, do_lstrm, DEF_MEMOP(MO_UL), true)
2422TRANS_FLAGS2(ISA310, STXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), true)
2423
2424static void gen_xxeval_i64(TCGv_i64 t, TCGv_i64 a, TCGv_i64 b, TCGv_i64 c,
2425                           int64_t imm)
2426{
2427    /*
2428     * Instead of processing imm bit-by-bit, we'll skip the computation of
2429     * conjunctions whose corresponding bit is unset.
2430     */
2431    int bit;
2432    TCGv_i64 conj, disj;
2433
2434    conj = tcg_temp_new_i64();
2435    disj = tcg_const_i64(0);
2436
2437    /* Iterate over set bits from the least to the most significant bit */
2438    while (imm) {
2439        /*
2440         * Get the next bit to be processed with ctz64. Invert the result of
2441         * ctz64 to match the indexing used by PowerISA.
2442         */
2443        bit = 7 - ctz64(imm);
2444        if (bit & 0x4) {
2445            tcg_gen_mov_i64(conj, a);
2446        } else {
2447            tcg_gen_not_i64(conj, a);
2448        }
2449        if (bit & 0x2) {
2450            tcg_gen_and_i64(conj, conj, b);
2451        } else {
2452            tcg_gen_andc_i64(conj, conj, b);
2453        }
2454        if (bit & 0x1) {
2455            tcg_gen_and_i64(conj, conj, c);
2456        } else {
2457            tcg_gen_andc_i64(conj, conj, c);
2458        }
2459        tcg_gen_or_i64(disj, disj, conj);
2460
2461        /* Unset the least significant bit that is set */
2462        imm &= imm - 1;
2463    }
2464
2465    tcg_gen_mov_i64(t, disj);
2466
2467    tcg_temp_free_i64(conj);
2468    tcg_temp_free_i64(disj);
2469}
2470
2471static void gen_xxeval_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2472                           TCGv_vec c, int64_t imm)
2473{
2474    /*
2475     * Instead of processing imm bit-by-bit, we'll skip the computation of
2476     * conjunctions whose corresponding bit is unset.
2477     */
2478    int bit;
2479    TCGv_vec disj, conj;
2480
2481    disj = tcg_const_zeros_vec_matching(t);
2482    conj = tcg_temp_new_vec_matching(t);
2483
2484    /* Iterate over set bits from the least to the most significant bit */
2485    while (imm) {
2486        /*
2487         * Get the next bit to be processed with ctz64. Invert the result of
2488         * ctz64 to match the indexing used by PowerISA.
2489         */
2490        bit = 7 - ctz64(imm);
2491        if (bit & 0x4) {
2492            tcg_gen_mov_vec(conj, a);
2493        } else {
2494            tcg_gen_not_vec(vece, conj, a);
2495        }
2496        if (bit & 0x2) {
2497            tcg_gen_and_vec(vece, conj, conj, b);
2498        } else {
2499            tcg_gen_andc_vec(vece, conj, conj, b);
2500        }
2501        if (bit & 0x1) {
2502            tcg_gen_and_vec(vece, conj, conj, c);
2503        } else {
2504            tcg_gen_andc_vec(vece, conj, conj, c);
2505        }
2506        tcg_gen_or_vec(vece, disj, disj, conj);
2507
2508        /* Unset the least significant bit that is set */
2509        imm &= imm - 1;
2510    }
2511
2512    tcg_gen_mov_vec(t, disj);
2513
2514    tcg_temp_free_vec(disj);
2515    tcg_temp_free_vec(conj);
2516}
2517
2518static bool trans_XXEVAL(DisasContext *ctx, arg_8RR_XX4_imm *a)
2519{
2520    static const TCGOpcode vecop_list[] = {
2521        INDEX_op_andc_vec, 0
2522    };
2523    static const GVecGen4i op = {
2524        .fniv = gen_xxeval_vec,
2525        .fno = gen_helper_XXEVAL,
2526        .fni8 = gen_xxeval_i64,
2527        .opt_opc = vecop_list,
2528        .vece = MO_64
2529    };
2530    int xt = vsr_full_offset(a->xt), xa = vsr_full_offset(a->xa),
2531        xb = vsr_full_offset(a->xb), xc = vsr_full_offset(a->xc);
2532
2533    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2534    REQUIRE_VSX(ctx);
2535
2536    /* Equivalent functions that can be implemented with a single gen_gvec */
2537    switch (a->imm) {
2538    case 0b00000000: /* true */
2539        set_cpu_vsr(a->xt, tcg_constant_i64(0), true);
2540        set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
2541        break;
2542    case 0b00000011: /* and(B,A) */
2543        tcg_gen_gvec_and(MO_64, xt, xb, xa, 16, 16);
2544        break;
2545    case 0b00000101: /* and(C,A) */
2546        tcg_gen_gvec_and(MO_64, xt, xc, xa, 16, 16);
2547        break;
2548    case 0b00001111: /* A */
2549        tcg_gen_gvec_mov(MO_64, xt, xa, 16, 16);
2550        break;
2551    case 0b00010001: /* and(C,B) */
2552        tcg_gen_gvec_and(MO_64, xt, xc, xb, 16, 16);
2553        break;
2554    case 0b00011011: /* C?B:A */
2555        tcg_gen_gvec_bitsel(MO_64, xt, xc, xb, xa, 16, 16);
2556        break;
2557    case 0b00011101: /* B?C:A */
2558        tcg_gen_gvec_bitsel(MO_64, xt, xb, xc, xa, 16, 16);
2559        break;
2560    case 0b00100111: /* C?A:B */
2561        tcg_gen_gvec_bitsel(MO_64, xt, xc, xa, xb, 16, 16);
2562        break;
2563    case 0b00110011: /* B */
2564        tcg_gen_gvec_mov(MO_64, xt, xb, 16, 16);
2565        break;
2566    case 0b00110101: /* A?C:B */
2567        tcg_gen_gvec_bitsel(MO_64, xt, xa, xc, xb, 16, 16);
2568        break;
2569    case 0b00111100: /* xor(B,A) */
2570        tcg_gen_gvec_xor(MO_64, xt, xb, xa, 16, 16);
2571        break;
2572    case 0b00111111: /* or(B,A) */
2573        tcg_gen_gvec_or(MO_64, xt, xb, xa, 16, 16);
2574        break;
2575    case 0b01000111: /* B?A:C */
2576        tcg_gen_gvec_bitsel(MO_64, xt, xb, xa, xc, 16, 16);
2577        break;
2578    case 0b01010011: /* A?B:C */
2579        tcg_gen_gvec_bitsel(MO_64, xt, xa, xb, xc, 16, 16);
2580        break;
2581    case 0b01010101: /* C */
2582        tcg_gen_gvec_mov(MO_64, xt, xc, 16, 16);
2583        break;
2584    case 0b01011010: /* xor(C,A) */
2585        tcg_gen_gvec_xor(MO_64, xt, xc, xa, 16, 16);
2586        break;
2587    case 0b01011111: /* or(C,A) */
2588        tcg_gen_gvec_or(MO_64, xt, xc, xa, 16, 16);
2589        break;
2590    case 0b01100110: /* xor(C,B) */
2591        tcg_gen_gvec_xor(MO_64, xt, xc, xb, 16, 16);
2592        break;
2593    case 0b01110111: /* or(C,B) */
2594        tcg_gen_gvec_or(MO_64, xt, xc, xb, 16, 16);
2595        break;
2596    case 0b10001000: /* nor(C,B) */
2597        tcg_gen_gvec_nor(MO_64, xt, xc, xb, 16, 16);
2598        break;
2599    case 0b10011001: /* eqv(C,B) */
2600        tcg_gen_gvec_eqv(MO_64, xt, xc, xb, 16, 16);
2601        break;
2602    case 0b10100000: /* nor(C,A) */
2603        tcg_gen_gvec_nor(MO_64, xt, xc, xa, 16, 16);
2604        break;
2605    case 0b10100101: /* eqv(C,A) */
2606        tcg_gen_gvec_eqv(MO_64, xt, xc, xa, 16, 16);
2607        break;
2608    case 0b10101010: /* not(C) */
2609        tcg_gen_gvec_not(MO_64, xt, xc, 16, 16);
2610        break;
2611    case 0b11000000: /* nor(B,A) */
2612        tcg_gen_gvec_nor(MO_64, xt,  xb, xa, 16, 16);
2613        break;
2614    case 0b11000011: /* eqv(B,A) */
2615        tcg_gen_gvec_eqv(MO_64, xt,  xb, xa, 16, 16);
2616        break;
2617    case 0b11001100: /* not(B) */
2618        tcg_gen_gvec_not(MO_64, xt, xb, 16, 16);
2619        break;
2620    case 0b11101110: /* nand(C,B) */
2621        tcg_gen_gvec_nand(MO_64, xt, xc, xb, 16, 16);
2622        break;
2623    case 0b11110000: /* not(A) */
2624        tcg_gen_gvec_not(MO_64, xt, xa, 16, 16);
2625        break;
2626    case 0b11111010: /* nand(C,A) */
2627        tcg_gen_gvec_nand(MO_64, xt, xc, xa, 16, 16);
2628        break;
2629    case 0b11111100: /* nand(B,A) */
2630        tcg_gen_gvec_nand(MO_64, xt, xb, xa, 16, 16);
2631        break;
2632    case 0b11111111: /* true */
2633        set_cpu_vsr(a->xt, tcg_constant_i64(-1), true);
2634        set_cpu_vsr(a->xt, tcg_constant_i64(-1), false);
2635        break;
2636    default:
2637        /* Fallback to compute all conjunctions/disjunctions */
2638        tcg_gen_gvec_4i(xt, xa, xb, xc, 16, 16, a->imm, &op);
2639    }
2640
2641    return true;
2642}
2643
2644static void gen_xxblendv_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2645                             TCGv_vec c)
2646{
2647    TCGv_vec tmp = tcg_temp_new_vec_matching(c);
2648    tcg_gen_sari_vec(vece, tmp, c, (8 << vece) - 1);
2649    tcg_gen_bitsel_vec(vece, t, tmp, b, a);
2650    tcg_temp_free_vec(tmp);
2651}
2652
2653static bool do_xxblendv(DisasContext *ctx, arg_8RR_XX4 *a, unsigned vece)
2654{
2655    static const TCGOpcode vecop_list[] = {
2656        INDEX_op_sari_vec, 0
2657    };
2658    static const GVecGen4 ops[4] = {
2659        {
2660            .fniv = gen_xxblendv_vec,
2661            .fno = gen_helper_XXBLENDVB,
2662            .opt_opc = vecop_list,
2663            .vece = MO_8
2664        },
2665        {
2666            .fniv = gen_xxblendv_vec,
2667            .fno = gen_helper_XXBLENDVH,
2668            .opt_opc = vecop_list,
2669            .vece = MO_16
2670        },
2671        {
2672            .fniv = gen_xxblendv_vec,
2673            .fno = gen_helper_XXBLENDVW,
2674            .opt_opc = vecop_list,
2675            .vece = MO_32
2676        },
2677        {
2678            .fniv = gen_xxblendv_vec,
2679            .fno = gen_helper_XXBLENDVD,
2680            .opt_opc = vecop_list,
2681            .vece = MO_64
2682        }
2683    };
2684
2685    REQUIRE_VSX(ctx);
2686
2687    tcg_gen_gvec_4(vsr_full_offset(a->xt), vsr_full_offset(a->xa),
2688                   vsr_full_offset(a->xb), vsr_full_offset(a->xc),
2689                   16, 16, &ops[vece]);
2690
2691    return true;
2692}
2693
2694TRANS(XXBLENDVB, do_xxblendv, MO_8)
2695TRANS(XXBLENDVH, do_xxblendv, MO_16)
2696TRANS(XXBLENDVW, do_xxblendv, MO_32)
2697TRANS(XXBLENDVD, do_xxblendv, MO_64)
2698
2699static bool do_helper_XX3(DisasContext *ctx, arg_XX3 *a,
2700    void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2701{
2702    TCGv_ptr xt, xa, xb;
2703
2704    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2705    REQUIRE_VSX(ctx);
2706
2707    xt = gen_vsr_ptr(a->xt);
2708    xa = gen_vsr_ptr(a->xa);
2709    xb = gen_vsr_ptr(a->xb);
2710
2711    helper(cpu_env, xt, xa, xb);
2712
2713    tcg_temp_free_ptr(xt);
2714    tcg_temp_free_ptr(xa);
2715    tcg_temp_free_ptr(xb);
2716
2717    return true;
2718}
2719
2720TRANS(XSCMPEQDP, do_helper_XX3, gen_helper_XSCMPEQDP)
2721TRANS(XSCMPGEDP, do_helper_XX3, gen_helper_XSCMPGEDP)
2722TRANS(XSCMPGTDP, do_helper_XX3, gen_helper_XSCMPGTDP)
2723TRANS(XSMAXCDP, do_helper_XX3, gen_helper_XSMAXCDP)
2724TRANS(XSMINCDP, do_helper_XX3, gen_helper_XSMINCDP)
2725TRANS(XSMAXJDP, do_helper_XX3, gen_helper_XSMAXJDP)
2726TRANS(XSMINJDP, do_helper_XX3, gen_helper_XSMINJDP)
2727
2728static bool do_helper_X(arg_X *a,
2729    void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2730{
2731    TCGv_ptr rt, ra, rb;
2732
2733    rt = gen_avr_ptr(a->rt);
2734    ra = gen_avr_ptr(a->ra);
2735    rb = gen_avr_ptr(a->rb);
2736
2737    helper(cpu_env, rt, ra, rb);
2738
2739    tcg_temp_free_ptr(rt);
2740    tcg_temp_free_ptr(ra);
2741    tcg_temp_free_ptr(rb);
2742
2743    return true;
2744}
2745
2746static bool do_xscmpqp(DisasContext *ctx, arg_X *a,
2747    void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2748{
2749    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2750    REQUIRE_VSX(ctx);
2751
2752    return do_helper_X(a, helper);
2753}
2754
2755TRANS(XSCMPEQQP, do_xscmpqp, gen_helper_XSCMPEQQP)
2756TRANS(XSCMPGEQP, do_xscmpqp, gen_helper_XSCMPGEQP)
2757TRANS(XSCMPGTQP, do_xscmpqp, gen_helper_XSCMPGTQP)
2758TRANS(XSMAXCQP, do_xscmpqp, gen_helper_XSMAXCQP)
2759TRANS(XSMINCQP, do_xscmpqp, gen_helper_XSMINCQP)
2760
2761static bool trans_XVCVSPBF16(DisasContext *ctx, arg_XX2 *a)
2762{
2763    TCGv_ptr xt, xb;
2764
2765    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2766    REQUIRE_VSX(ctx);
2767
2768    xt = gen_vsr_ptr(a->xt);
2769    xb = gen_vsr_ptr(a->xb);
2770
2771    gen_helper_XVCVSPBF16(cpu_env, xt, xb);
2772
2773    tcg_temp_free_ptr(xt);
2774    tcg_temp_free_ptr(xb);
2775
2776    return true;
2777}
2778
2779static bool trans_XVCVBF16SPN(DisasContext *ctx, arg_XX2 *a)
2780{
2781    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2782    REQUIRE_VSX(ctx);
2783
2784    tcg_gen_gvec_shli(MO_32, vsr_full_offset(a->xt), vsr_full_offset(a->xb),
2785                      16, 16, 16);
2786
2787    return true;
2788}
2789
2790#undef GEN_XX2FORM
2791#undef GEN_XX3FORM
2792#undef GEN_XX2IFORM
2793#undef GEN_XX3_RC_FORM
2794#undef GEN_XX3FORM_DM
2795#undef VSX_LOGICAL
2796