1/*
2 * RISC-V translation routines for the RV64Zfh Standard Extension.
3 *
4 * Copyright (c) 2020 Chih-Min Chao, chihmin.chao@sifive.com
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms and conditions of the GNU General Public License,
8 * version 2 or later, as published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program.  If not, see <http://www.gnu.org/licenses/>.
17 */
18
19#define REQUIRE_ZFH(ctx) do { \
20    if (!ctx->cfg_ptr->ext_zfh) {      \
21        return false;         \
22    }                         \
23} while (0)
24
25#define REQUIRE_ZHINX_OR_ZFH(ctx) do { \
26    if (!ctx->cfg_ptr->ext_zhinx && !ctx->cfg_ptr->ext_zfh) { \
27        return false;                  \
28    }                                  \
29} while (0)
30
31#define REQUIRE_ZFH_OR_ZFHMIN(ctx) do {       \
32    if (!(ctx->cfg_ptr->ext_zfh || ctx->cfg_ptr->ext_zfhmin)) { \
33        return false;                         \
34    }                                         \
35} while (0)
36
37#define REQUIRE_ZFH_OR_ZFHMIN_OR_ZHINX_OR_ZHINXMIN(ctx) do { \
38    if (!(ctx->cfg_ptr->ext_zfh || ctx->cfg_ptr->ext_zfhmin ||          \
39          ctx->cfg_ptr->ext_zhinx || ctx->cfg_ptr->ext_zhinxmin)) {     \
40        return false;                                        \
41    }                                                        \
42} while (0)
43
44static bool trans_flh(DisasContext *ctx, arg_flh *a)
45{
46    TCGv_i64 dest;
47    TCGv t0;
48
49    REQUIRE_FPU;
50    REQUIRE_ZFH_OR_ZFHMIN(ctx);
51
52    t0 = get_gpr(ctx, a->rs1, EXT_NONE);
53    if (a->imm) {
54        TCGv temp = temp_new(ctx);
55        tcg_gen_addi_tl(temp, t0, a->imm);
56        t0 = temp;
57    }
58
59    dest = cpu_fpr[a->rd];
60    tcg_gen_qemu_ld_i64(dest, t0, ctx->mem_idx, MO_TEUW);
61    gen_nanbox_h(dest, dest);
62
63    mark_fs_dirty(ctx);
64    return true;
65}
66
67static bool trans_fsh(DisasContext *ctx, arg_fsh *a)
68{
69    TCGv t0;
70
71    REQUIRE_FPU;
72    REQUIRE_ZFH_OR_ZFHMIN(ctx);
73
74    t0 = get_gpr(ctx, a->rs1, EXT_NONE);
75    if (a->imm) {
76        TCGv temp = tcg_temp_new();
77        tcg_gen_addi_tl(temp, t0, a->imm);
78        t0 = temp;
79    }
80
81    tcg_gen_qemu_st_i64(cpu_fpr[a->rs2], t0, ctx->mem_idx, MO_TEUW);
82
83    return true;
84}
85
86static bool trans_fmadd_h(DisasContext *ctx, arg_fmadd_h *a)
87{
88    REQUIRE_FPU;
89    REQUIRE_ZHINX_OR_ZFH(ctx);
90
91    TCGv_i64 dest = dest_fpr(ctx, a->rd);
92    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
93    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
94    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
95
96    gen_set_rm(ctx, a->rm);
97    gen_helper_fmadd_h(dest, cpu_env, src1, src2, src3);
98    gen_set_fpr_hs(ctx, a->rd, dest);
99    mark_fs_dirty(ctx);
100    return true;
101}
102
103static bool trans_fmsub_h(DisasContext *ctx, arg_fmsub_h *a)
104{
105    REQUIRE_FPU;
106    REQUIRE_ZHINX_OR_ZFH(ctx);
107
108    TCGv_i64 dest = dest_fpr(ctx, a->rd);
109    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
110    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
111    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
112
113    gen_set_rm(ctx, a->rm);
114    gen_helper_fmsub_h(dest, cpu_env, src1, src2, src3);
115    gen_set_fpr_hs(ctx, a->rd, dest);
116    mark_fs_dirty(ctx);
117    return true;
118}
119
120static bool trans_fnmsub_h(DisasContext *ctx, arg_fnmsub_h *a)
121{
122    REQUIRE_FPU;
123    REQUIRE_ZHINX_OR_ZFH(ctx);
124
125    TCGv_i64 dest = dest_fpr(ctx, a->rd);
126    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
127    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
128    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
129
130    gen_set_rm(ctx, a->rm);
131    gen_helper_fnmsub_h(dest, cpu_env, src1, src2, src3);
132    gen_set_fpr_hs(ctx, a->rd, dest);
133    mark_fs_dirty(ctx);
134    return true;
135}
136
137static bool trans_fnmadd_h(DisasContext *ctx, arg_fnmadd_h *a)
138{
139    REQUIRE_FPU;
140    REQUIRE_ZHINX_OR_ZFH(ctx);
141
142    TCGv_i64 dest = dest_fpr(ctx, a->rd);
143    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
144    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
145    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
146
147    gen_set_rm(ctx, a->rm);
148    gen_helper_fnmadd_h(dest, cpu_env, src1, src2, src3);
149    gen_set_fpr_hs(ctx, a->rd, dest);
150    mark_fs_dirty(ctx);
151    return true;
152}
153
154static bool trans_fadd_h(DisasContext *ctx, arg_fadd_h *a)
155{
156    REQUIRE_FPU;
157    REQUIRE_ZHINX_OR_ZFH(ctx);
158
159    TCGv_i64 dest = dest_fpr(ctx, a->rd);
160    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
161    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
162
163    gen_set_rm(ctx, a->rm);
164    gen_helper_fadd_h(dest, cpu_env, src1, src2);
165    gen_set_fpr_hs(ctx, a->rd, dest);
166    mark_fs_dirty(ctx);
167    return true;
168}
169
170static bool trans_fsub_h(DisasContext *ctx, arg_fsub_h *a)
171{
172    REQUIRE_FPU;
173    REQUIRE_ZHINX_OR_ZFH(ctx);
174
175    TCGv_i64 dest = dest_fpr(ctx, a->rd);
176    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
177    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
178
179    gen_set_rm(ctx, a->rm);
180    gen_helper_fsub_h(dest, cpu_env, src1, src2);
181    gen_set_fpr_hs(ctx, a->rd, dest);
182    mark_fs_dirty(ctx);
183    return true;
184}
185
186static bool trans_fmul_h(DisasContext *ctx, arg_fmul_h *a)
187{
188    REQUIRE_FPU;
189    REQUIRE_ZHINX_OR_ZFH(ctx);
190
191    TCGv_i64 dest = dest_fpr(ctx, a->rd);
192    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
193    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
194
195    gen_set_rm(ctx, a->rm);
196    gen_helper_fmul_h(dest, cpu_env, src1, src2);
197    gen_set_fpr_hs(ctx, a->rd, dest);
198    mark_fs_dirty(ctx);
199    return true;
200}
201
202static bool trans_fdiv_h(DisasContext *ctx, arg_fdiv_h *a)
203{
204    REQUIRE_FPU;
205    REQUIRE_ZHINX_OR_ZFH(ctx);
206
207    TCGv_i64 dest = dest_fpr(ctx, a->rd);
208    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
209    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
210
211    gen_set_rm(ctx, a->rm);
212    gen_helper_fdiv_h(dest, cpu_env, src1, src2);
213    gen_set_fpr_hs(ctx, a->rd, dest);
214    mark_fs_dirty(ctx);
215    return true;
216}
217
218static bool trans_fsqrt_h(DisasContext *ctx, arg_fsqrt_h *a)
219{
220    REQUIRE_FPU;
221    REQUIRE_ZHINX_OR_ZFH(ctx);
222
223    TCGv_i64 dest = dest_fpr(ctx, a->rd);
224    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
225
226    gen_set_rm(ctx, a->rm);
227    gen_helper_fsqrt_h(dest, cpu_env, src1);
228    gen_set_fpr_hs(ctx, a->rd, dest);
229    mark_fs_dirty(ctx);
230    return true;
231}
232
233static bool trans_fsgnj_h(DisasContext *ctx, arg_fsgnj_h *a)
234{
235    REQUIRE_FPU;
236    REQUIRE_ZHINX_OR_ZFH(ctx);
237
238    TCGv_i64 dest = dest_fpr(ctx, a->rd);
239    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
240
241    if (a->rs1 == a->rs2) { /* FMOV */
242        if (!ctx->cfg_ptr->ext_zfinx) {
243            gen_check_nanbox_h(dest, src1);
244        } else {
245            tcg_gen_ext16s_i64(dest, src1);
246        }
247    } else {
248        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
249
250        if (!ctx->cfg_ptr->ext_zfinx) {
251            TCGv_i64 rs1 = tcg_temp_new_i64();
252            TCGv_i64 rs2 = tcg_temp_new_i64();
253            gen_check_nanbox_h(rs1, src1);
254            gen_check_nanbox_h(rs2, src2);
255
256            /* This formulation retains the nanboxing of rs2 in normal 'Zfh'. */
257            tcg_gen_deposit_i64(dest, rs2, rs1, 0, 15);
258
259            tcg_temp_free_i64(rs1);
260            tcg_temp_free_i64(rs2);
261        } else {
262            tcg_gen_deposit_i64(dest, src2, src1, 0, 15);
263            tcg_gen_ext16s_i64(dest, dest);
264        }
265    }
266    gen_set_fpr_hs(ctx, a->rd, dest);
267    mark_fs_dirty(ctx);
268    return true;
269}
270
271static bool trans_fsgnjn_h(DisasContext *ctx, arg_fsgnjn_h *a)
272{
273    TCGv_i64 rs1, rs2, mask;
274
275    REQUIRE_FPU;
276    REQUIRE_ZHINX_OR_ZFH(ctx);
277
278    TCGv_i64 dest = dest_fpr(ctx, a->rd);
279    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
280
281    rs1 = tcg_temp_new_i64();
282    if (!ctx->cfg_ptr->ext_zfinx) {
283        gen_check_nanbox_h(rs1, src1);
284    } else {
285        tcg_gen_mov_i64(rs1, src1);
286    }
287
288    if (a->rs1 == a->rs2) { /* FNEG */
289        tcg_gen_xori_i64(dest, rs1, MAKE_64BIT_MASK(15, 1));
290    } else {
291        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
292        rs2 = tcg_temp_new_i64();
293
294        if (!ctx->cfg_ptr->ext_zfinx) {
295            gen_check_nanbox_h(rs2, src2);
296        } else {
297            tcg_gen_mov_i64(rs2, src2);
298        }
299
300        /*
301         * Replace bit 15 in rs1 with inverse in rs2.
302         * This formulation retains the nanboxing of rs1.
303         */
304        mask = tcg_const_i64(~MAKE_64BIT_MASK(15, 1));
305        tcg_gen_not_i64(rs2, rs2);
306        tcg_gen_andc_i64(rs2, rs2, mask);
307        tcg_gen_and_i64(dest, mask, rs1);
308        tcg_gen_or_i64(dest, dest, rs2);
309
310        tcg_temp_free_i64(mask);
311        tcg_temp_free_i64(rs2);
312    }
313    /* signed-extended intead of nanboxing for result if enable zfinx */
314    if (ctx->cfg_ptr->ext_zfinx) {
315        tcg_gen_ext16s_i64(dest, dest);
316    }
317    tcg_temp_free_i64(rs1);
318    mark_fs_dirty(ctx);
319    return true;
320}
321
322static bool trans_fsgnjx_h(DisasContext *ctx, arg_fsgnjx_h *a)
323{
324    TCGv_i64 rs1, rs2;
325
326    REQUIRE_FPU;
327    REQUIRE_ZHINX_OR_ZFH(ctx);
328
329    TCGv_i64 dest = dest_fpr(ctx, a->rd);
330    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
331
332    rs1 = tcg_temp_new_i64();
333    if (!ctx->cfg_ptr->ext_zfinx) {
334        gen_check_nanbox_h(rs1, src1);
335    } else {
336        tcg_gen_mov_i64(rs1, src1);
337    }
338
339    if (a->rs1 == a->rs2) { /* FABS */
340        tcg_gen_andi_i64(dest, rs1, ~MAKE_64BIT_MASK(15, 1));
341    } else {
342        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
343        rs2 = tcg_temp_new_i64();
344
345        if (!ctx->cfg_ptr->ext_zfinx) {
346            gen_check_nanbox_h(rs2, src2);
347        } else {
348            tcg_gen_mov_i64(rs2, src2);
349        }
350
351        /*
352         * Xor bit 15 in rs1 with that in rs2.
353         * This formulation retains the nanboxing of rs1.
354         */
355        tcg_gen_andi_i64(dest, rs2, MAKE_64BIT_MASK(15, 1));
356        tcg_gen_xor_i64(dest, rs1, dest);
357
358        tcg_temp_free_i64(rs2);
359    }
360    /* signed-extended intead of nanboxing for result if enable zfinx */
361    if (ctx->cfg_ptr->ext_zfinx) {
362        tcg_gen_ext16s_i64(dest, dest);
363    }
364    tcg_temp_free_i64(rs1);
365    mark_fs_dirty(ctx);
366    return true;
367}
368
369static bool trans_fmin_h(DisasContext *ctx, arg_fmin_h *a)
370{
371    REQUIRE_FPU;
372    REQUIRE_ZHINX_OR_ZFH(ctx);
373
374    TCGv_i64 dest = dest_fpr(ctx, a->rd);
375    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
376    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
377
378    gen_helper_fmin_h(dest, cpu_env, src1, src2);
379    gen_set_fpr_hs(ctx, a->rd, dest);
380    mark_fs_dirty(ctx);
381    return true;
382}
383
384static bool trans_fmax_h(DisasContext *ctx, arg_fmax_h *a)
385{
386    REQUIRE_FPU;
387    REQUIRE_ZHINX_OR_ZFH(ctx);
388
389    TCGv_i64 dest = dest_fpr(ctx, a->rd);
390    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
391    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
392
393    gen_helper_fmax_h(dest, cpu_env, src1, src2);
394    gen_set_fpr_hs(ctx, a->rd, dest);
395    mark_fs_dirty(ctx);
396    return true;
397}
398
399static bool trans_fcvt_s_h(DisasContext *ctx, arg_fcvt_s_h *a)
400{
401    REQUIRE_FPU;
402    REQUIRE_ZFH_OR_ZFHMIN_OR_ZHINX_OR_ZHINXMIN(ctx);
403
404    TCGv_i64 dest = dest_fpr(ctx, a->rd);
405    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
406
407    gen_set_rm(ctx, a->rm);
408    gen_helper_fcvt_s_h(dest, cpu_env, src1);
409    gen_set_fpr_hs(ctx, a->rd, dest);
410
411    mark_fs_dirty(ctx);
412
413    return true;
414}
415
416static bool trans_fcvt_d_h(DisasContext *ctx, arg_fcvt_d_h *a)
417{
418    REQUIRE_FPU;
419    REQUIRE_ZFH_OR_ZFHMIN_OR_ZHINX_OR_ZHINXMIN(ctx);
420    REQUIRE_ZDINX_OR_D(ctx);
421
422    TCGv_i64 dest = dest_fpr(ctx, a->rd);
423    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
424
425    gen_set_rm(ctx, a->rm);
426    gen_helper_fcvt_d_h(dest, cpu_env, src1);
427    gen_set_fpr_d(ctx, a->rd, dest);
428
429    mark_fs_dirty(ctx);
430
431    return true;
432}
433
434static bool trans_fcvt_h_s(DisasContext *ctx, arg_fcvt_h_s *a)
435{
436    REQUIRE_FPU;
437    REQUIRE_ZFH_OR_ZFHMIN_OR_ZHINX_OR_ZHINXMIN(ctx);
438
439    TCGv_i64 dest = dest_fpr(ctx, a->rd);
440    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
441
442    gen_set_rm(ctx, a->rm);
443    gen_helper_fcvt_h_s(dest, cpu_env, src1);
444    gen_set_fpr_hs(ctx, a->rd, dest);
445    mark_fs_dirty(ctx);
446
447    return true;
448}
449
450static bool trans_fcvt_h_d(DisasContext *ctx, arg_fcvt_h_d *a)
451{
452    REQUIRE_FPU;
453    REQUIRE_ZFH_OR_ZFHMIN_OR_ZHINX_OR_ZHINXMIN(ctx);
454    REQUIRE_ZDINX_OR_D(ctx);
455
456    TCGv_i64 dest = dest_fpr(ctx, a->rd);
457    TCGv_i64 src1 = get_fpr_d(ctx, a->rs1);
458
459    gen_set_rm(ctx, a->rm);
460    gen_helper_fcvt_h_d(dest, cpu_env, src1);
461    gen_set_fpr_hs(ctx, a->rd, dest);
462    mark_fs_dirty(ctx);
463
464    return true;
465}
466
467static bool trans_feq_h(DisasContext *ctx, arg_feq_h *a)
468{
469    REQUIRE_FPU;
470    REQUIRE_ZHINX_OR_ZFH(ctx);
471
472    TCGv dest = dest_gpr(ctx, a->rd);
473    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
474    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
475
476    gen_helper_feq_h(dest, cpu_env, src1, src2);
477    gen_set_gpr(ctx, a->rd, dest);
478    return true;
479}
480
481static bool trans_flt_h(DisasContext *ctx, arg_flt_h *a)
482{
483    REQUIRE_FPU;
484    REQUIRE_ZHINX_OR_ZFH(ctx);
485
486    TCGv dest = dest_gpr(ctx, a->rd);
487    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
488    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
489
490    gen_helper_flt_h(dest, cpu_env, src1, src2);
491    gen_set_gpr(ctx, a->rd, dest);
492
493    return true;
494}
495
496static bool trans_fle_h(DisasContext *ctx, arg_fle_h *a)
497{
498    REQUIRE_FPU;
499    REQUIRE_ZHINX_OR_ZFH(ctx);
500
501    TCGv dest = dest_gpr(ctx, a->rd);
502    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
503    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
504
505    gen_helper_fle_h(dest, cpu_env, src1, src2);
506    gen_set_gpr(ctx, a->rd, dest);
507    return true;
508}
509
510static bool trans_fclass_h(DisasContext *ctx, arg_fclass_h *a)
511{
512    REQUIRE_FPU;
513    REQUIRE_ZHINX_OR_ZFH(ctx);
514
515    TCGv dest = dest_gpr(ctx, a->rd);
516    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
517
518    gen_helper_fclass_h(dest, cpu_env, src1);
519    gen_set_gpr(ctx, a->rd, dest);
520    return true;
521}
522
523static bool trans_fcvt_w_h(DisasContext *ctx, arg_fcvt_w_h *a)
524{
525    REQUIRE_FPU;
526    REQUIRE_ZHINX_OR_ZFH(ctx);
527
528    TCGv dest = dest_gpr(ctx, a->rd);
529    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
530
531    gen_set_rm(ctx, a->rm);
532    gen_helper_fcvt_w_h(dest, cpu_env, src1);
533    gen_set_gpr(ctx, a->rd, dest);
534    return true;
535}
536
537static bool trans_fcvt_wu_h(DisasContext *ctx, arg_fcvt_wu_h *a)
538{
539    REQUIRE_FPU;
540    REQUIRE_ZHINX_OR_ZFH(ctx);
541
542    TCGv dest = dest_gpr(ctx, a->rd);
543    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
544
545    gen_set_rm(ctx, a->rm);
546    gen_helper_fcvt_wu_h(dest, cpu_env, src1);
547    gen_set_gpr(ctx, a->rd, dest);
548    return true;
549}
550
551static bool trans_fcvt_h_w(DisasContext *ctx, arg_fcvt_h_w *a)
552{
553    REQUIRE_FPU;
554    REQUIRE_ZHINX_OR_ZFH(ctx);
555
556    TCGv_i64 dest = dest_fpr(ctx, a->rd);
557    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
558
559    gen_set_rm(ctx, a->rm);
560    gen_helper_fcvt_h_w(dest, cpu_env, t0);
561    gen_set_fpr_hs(ctx, a->rd, dest);
562
563    mark_fs_dirty(ctx);
564    return true;
565}
566
567static bool trans_fcvt_h_wu(DisasContext *ctx, arg_fcvt_h_wu *a)
568{
569    REQUIRE_FPU;
570    REQUIRE_ZHINX_OR_ZFH(ctx);
571
572    TCGv_i64 dest = dest_fpr(ctx, a->rd);
573    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
574
575    gen_set_rm(ctx, a->rm);
576    gen_helper_fcvt_h_wu(dest, cpu_env, t0);
577    gen_set_fpr_hs(ctx, a->rd, dest);
578
579    mark_fs_dirty(ctx);
580    return true;
581}
582
583static bool trans_fmv_x_h(DisasContext *ctx, arg_fmv_x_h *a)
584{
585    REQUIRE_FPU;
586    REQUIRE_ZFH_OR_ZFHMIN(ctx);
587
588    TCGv dest = dest_gpr(ctx, a->rd);
589
590#if defined(TARGET_RISCV64)
591    /* 16 bits -> 64 bits */
592    tcg_gen_ext16s_tl(dest, cpu_fpr[a->rs1]);
593#else
594    /* 16 bits -> 32 bits */
595    tcg_gen_extrl_i64_i32(dest, cpu_fpr[a->rs1]);
596    tcg_gen_ext16s_tl(dest, dest);
597#endif
598
599    gen_set_gpr(ctx, a->rd, dest);
600    return true;
601}
602
603static bool trans_fmv_h_x(DisasContext *ctx, arg_fmv_h_x *a)
604{
605    REQUIRE_FPU;
606    REQUIRE_ZFH_OR_ZFHMIN(ctx);
607
608    TCGv t0 = get_gpr(ctx, a->rs1, EXT_ZERO);
609
610    tcg_gen_extu_tl_i64(cpu_fpr[a->rd], t0);
611    gen_nanbox_h(cpu_fpr[a->rd], cpu_fpr[a->rd]);
612
613    mark_fs_dirty(ctx);
614    return true;
615}
616
617static bool trans_fcvt_l_h(DisasContext *ctx, arg_fcvt_l_h *a)
618{
619    REQUIRE_64BIT(ctx);
620    REQUIRE_FPU;
621    REQUIRE_ZHINX_OR_ZFH(ctx);
622
623    TCGv dest = dest_gpr(ctx, a->rd);
624    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
625
626    gen_set_rm(ctx, a->rm);
627    gen_helper_fcvt_l_h(dest, cpu_env, src1);
628    gen_set_gpr(ctx, a->rd, dest);
629    return true;
630}
631
632static bool trans_fcvt_lu_h(DisasContext *ctx, arg_fcvt_lu_h *a)
633{
634    REQUIRE_64BIT(ctx);
635    REQUIRE_FPU;
636    REQUIRE_ZHINX_OR_ZFH(ctx);
637
638    TCGv dest = dest_gpr(ctx, a->rd);
639    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
640
641    gen_set_rm(ctx, a->rm);
642    gen_helper_fcvt_lu_h(dest, cpu_env, src1);
643    gen_set_gpr(ctx, a->rd, dest);
644    return true;
645}
646
647static bool trans_fcvt_h_l(DisasContext *ctx, arg_fcvt_h_l *a)
648{
649    REQUIRE_64BIT(ctx);
650    REQUIRE_FPU;
651    REQUIRE_ZHINX_OR_ZFH(ctx);
652
653    TCGv_i64 dest = dest_fpr(ctx, a->rd);
654    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
655
656    gen_set_rm(ctx, a->rm);
657    gen_helper_fcvt_h_l(dest, cpu_env, t0);
658    gen_set_fpr_hs(ctx, a->rd, dest);
659
660    mark_fs_dirty(ctx);
661    return true;
662}
663
664static bool trans_fcvt_h_lu(DisasContext *ctx, arg_fcvt_h_lu *a)
665{
666    REQUIRE_64BIT(ctx);
667    REQUIRE_FPU;
668    REQUIRE_ZHINX_OR_ZFH(ctx);
669
670    TCGv_i64 dest = dest_fpr(ctx, a->rd);
671    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
672
673    gen_set_rm(ctx, a->rm);
674    gen_helper_fcvt_h_lu(dest, cpu_env, t0);
675    gen_set_fpr_hs(ctx, a->rd, dest);
676
677    mark_fs_dirty(ctx);
678    return true;
679}
680