1/*
2 * RISC-V translation routines for the RV64Zfh Standard Extension.
3 *
4 * Copyright (c) 2020 Chih-Min Chao, chihmin.chao@sifive.com
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms and conditions of the GNU General Public License,
8 * version 2 or later, as published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program.  If not, see <http://www.gnu.org/licenses/>.
17 */
18
19#define REQUIRE_ZFH(ctx) do { \
20    if (!ctx->cfg_ptr->ext_zfh) {      \
21        return false;         \
22    }                         \
23} while (0)
24
25#define REQUIRE_ZHINX_OR_ZFH(ctx) do { \
26    if (!ctx->cfg_ptr->ext_zhinx && !ctx->cfg_ptr->ext_zfh) { \
27        return false;                  \
28    }                                  \
29} while (0)
30
31#define REQUIRE_ZFHMIN_OR_ZFBFMIN(ctx) do {   \
32    if (!ctx->cfg_ptr->ext_zfhmin && !ctx->cfg_ptr->ext_zfbfmin) { \
33        return false;                         \
34    }                                         \
35} while (0)
36
37#define REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx) do {                 \
38    if (!(ctx->cfg_ptr->ext_zfhmin || ctx->cfg_ptr->ext_zhinxmin)) { \
39        return false;                                        \
40    }                                                        \
41} while (0)
42
43static bool trans_flh(DisasContext *ctx, arg_flh *a)
44{
45    TCGv_i64 dest;
46    TCGv t0;
47
48    REQUIRE_FPU;
49    REQUIRE_ZFHMIN_OR_ZFBFMIN(ctx);
50
51    decode_save_opc(ctx);
52    t0 = get_gpr(ctx, a->rs1, EXT_NONE);
53    if (a->imm) {
54        TCGv temp = tcg_temp_new();
55        tcg_gen_addi_tl(temp, t0, a->imm);
56        t0 = temp;
57    }
58
59    dest = cpu_fpr[a->rd];
60    tcg_gen_qemu_ld_i64(dest, t0, ctx->mem_idx, MO_TEUW);
61    gen_nanbox_h(dest, dest);
62
63    mark_fs_dirty(ctx);
64    return true;
65}
66
67static bool trans_fsh(DisasContext *ctx, arg_fsh *a)
68{
69    TCGv t0;
70
71    REQUIRE_FPU;
72    REQUIRE_ZFHMIN_OR_ZFBFMIN(ctx);
73
74    decode_save_opc(ctx);
75    t0 = get_gpr(ctx, a->rs1, EXT_NONE);
76    if (a->imm) {
77        TCGv temp = tcg_temp_new();
78        tcg_gen_addi_tl(temp, t0, a->imm);
79        t0 = temp;
80    }
81
82    tcg_gen_qemu_st_i64(cpu_fpr[a->rs2], t0, ctx->mem_idx, MO_TEUW);
83
84    return true;
85}
86
87static bool trans_fmadd_h(DisasContext *ctx, arg_fmadd_h *a)
88{
89    REQUIRE_FPU;
90    REQUIRE_ZHINX_OR_ZFH(ctx);
91
92    TCGv_i64 dest = dest_fpr(ctx, a->rd);
93    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
94    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
95    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
96
97    gen_set_rm(ctx, a->rm);
98    gen_helper_fmadd_h(dest, tcg_env, src1, src2, src3);
99    gen_set_fpr_hs(ctx, a->rd, dest);
100    mark_fs_dirty(ctx);
101    return true;
102}
103
104static bool trans_fmsub_h(DisasContext *ctx, arg_fmsub_h *a)
105{
106    REQUIRE_FPU;
107    REQUIRE_ZHINX_OR_ZFH(ctx);
108
109    TCGv_i64 dest = dest_fpr(ctx, a->rd);
110    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
111    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
112    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
113
114    gen_set_rm(ctx, a->rm);
115    gen_helper_fmsub_h(dest, tcg_env, src1, src2, src3);
116    gen_set_fpr_hs(ctx, a->rd, dest);
117    mark_fs_dirty(ctx);
118    return true;
119}
120
121static bool trans_fnmsub_h(DisasContext *ctx, arg_fnmsub_h *a)
122{
123    REQUIRE_FPU;
124    REQUIRE_ZHINX_OR_ZFH(ctx);
125
126    TCGv_i64 dest = dest_fpr(ctx, a->rd);
127    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
128    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
129    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
130
131    gen_set_rm(ctx, a->rm);
132    gen_helper_fnmsub_h(dest, tcg_env, src1, src2, src3);
133    gen_set_fpr_hs(ctx, a->rd, dest);
134    mark_fs_dirty(ctx);
135    return true;
136}
137
138static bool trans_fnmadd_h(DisasContext *ctx, arg_fnmadd_h *a)
139{
140    REQUIRE_FPU;
141    REQUIRE_ZHINX_OR_ZFH(ctx);
142
143    TCGv_i64 dest = dest_fpr(ctx, a->rd);
144    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
145    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
146    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
147
148    gen_set_rm(ctx, a->rm);
149    gen_helper_fnmadd_h(dest, tcg_env, src1, src2, src3);
150    gen_set_fpr_hs(ctx, a->rd, dest);
151    mark_fs_dirty(ctx);
152    return true;
153}
154
155static bool trans_fadd_h(DisasContext *ctx, arg_fadd_h *a)
156{
157    REQUIRE_FPU;
158    REQUIRE_ZHINX_OR_ZFH(ctx);
159
160    TCGv_i64 dest = dest_fpr(ctx, a->rd);
161    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
162    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
163
164    gen_set_rm(ctx, a->rm);
165    gen_helper_fadd_h(dest, tcg_env, src1, src2);
166    gen_set_fpr_hs(ctx, a->rd, dest);
167    mark_fs_dirty(ctx);
168    return true;
169}
170
171static bool trans_fsub_h(DisasContext *ctx, arg_fsub_h *a)
172{
173    REQUIRE_FPU;
174    REQUIRE_ZHINX_OR_ZFH(ctx);
175
176    TCGv_i64 dest = dest_fpr(ctx, a->rd);
177    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
178    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
179
180    gen_set_rm(ctx, a->rm);
181    gen_helper_fsub_h(dest, tcg_env, src1, src2);
182    gen_set_fpr_hs(ctx, a->rd, dest);
183    mark_fs_dirty(ctx);
184    return true;
185}
186
187static bool trans_fmul_h(DisasContext *ctx, arg_fmul_h *a)
188{
189    REQUIRE_FPU;
190    REQUIRE_ZHINX_OR_ZFH(ctx);
191
192    TCGv_i64 dest = dest_fpr(ctx, a->rd);
193    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
194    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
195
196    gen_set_rm(ctx, a->rm);
197    gen_helper_fmul_h(dest, tcg_env, src1, src2);
198    gen_set_fpr_hs(ctx, a->rd, dest);
199    mark_fs_dirty(ctx);
200    return true;
201}
202
203static bool trans_fdiv_h(DisasContext *ctx, arg_fdiv_h *a)
204{
205    REQUIRE_FPU;
206    REQUIRE_ZHINX_OR_ZFH(ctx);
207
208    TCGv_i64 dest = dest_fpr(ctx, a->rd);
209    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
210    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
211
212    gen_set_rm(ctx, a->rm);
213    gen_helper_fdiv_h(dest, tcg_env, src1, src2);
214    gen_set_fpr_hs(ctx, a->rd, dest);
215    mark_fs_dirty(ctx);
216    return true;
217}
218
219static bool trans_fsqrt_h(DisasContext *ctx, arg_fsqrt_h *a)
220{
221    REQUIRE_FPU;
222    REQUIRE_ZHINX_OR_ZFH(ctx);
223
224    TCGv_i64 dest = dest_fpr(ctx, a->rd);
225    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
226
227    gen_set_rm(ctx, a->rm);
228    gen_helper_fsqrt_h(dest, tcg_env, src1);
229    gen_set_fpr_hs(ctx, a->rd, dest);
230    mark_fs_dirty(ctx);
231    return true;
232}
233
234static bool trans_fsgnj_h(DisasContext *ctx, arg_fsgnj_h *a)
235{
236    REQUIRE_FPU;
237    REQUIRE_ZHINX_OR_ZFH(ctx);
238
239    TCGv_i64 dest = dest_fpr(ctx, a->rd);
240    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
241
242    if (a->rs1 == a->rs2) { /* FMOV */
243        if (!ctx->cfg_ptr->ext_zfinx) {
244            gen_check_nanbox_h(dest, src1);
245        } else {
246            tcg_gen_ext16s_i64(dest, src1);
247        }
248    } else {
249        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
250
251        if (!ctx->cfg_ptr->ext_zfinx) {
252            TCGv_i64 rs1 = tcg_temp_new_i64();
253            TCGv_i64 rs2 = tcg_temp_new_i64();
254            gen_check_nanbox_h(rs1, src1);
255            gen_check_nanbox_h(rs2, src2);
256
257            /* This formulation retains the nanboxing of rs2 in normal 'Zfh'. */
258            tcg_gen_deposit_i64(dest, rs2, rs1, 0, 15);
259        } else {
260            tcg_gen_deposit_i64(dest, src2, src1, 0, 15);
261            tcg_gen_ext16s_i64(dest, dest);
262        }
263    }
264    gen_set_fpr_hs(ctx, a->rd, dest);
265    mark_fs_dirty(ctx);
266    return true;
267}
268
269static bool trans_fsgnjn_h(DisasContext *ctx, arg_fsgnjn_h *a)
270{
271    TCGv_i64 rs1, rs2, mask;
272
273    REQUIRE_FPU;
274    REQUIRE_ZHINX_OR_ZFH(ctx);
275
276    TCGv_i64 dest = dest_fpr(ctx, a->rd);
277    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
278
279    rs1 = tcg_temp_new_i64();
280    if (!ctx->cfg_ptr->ext_zfinx) {
281        gen_check_nanbox_h(rs1, src1);
282    } else {
283        tcg_gen_mov_i64(rs1, src1);
284    }
285
286    if (a->rs1 == a->rs2) { /* FNEG */
287        tcg_gen_xori_i64(dest, rs1, MAKE_64BIT_MASK(15, 1));
288    } else {
289        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
290        rs2 = tcg_temp_new_i64();
291
292        if (!ctx->cfg_ptr->ext_zfinx) {
293            gen_check_nanbox_h(rs2, src2);
294        } else {
295            tcg_gen_mov_i64(rs2, src2);
296        }
297
298        /*
299         * Replace bit 15 in rs1 with inverse in rs2.
300         * This formulation retains the nanboxing of rs1.
301         */
302        mask = tcg_constant_i64(~MAKE_64BIT_MASK(15, 1));
303        tcg_gen_not_i64(rs2, rs2);
304        tcg_gen_andc_i64(rs2, rs2, mask);
305        tcg_gen_and_i64(dest, mask, rs1);
306        tcg_gen_or_i64(dest, dest, rs2);
307    }
308    /* signed-extended instead of nanboxing for result if enable zfinx */
309    if (ctx->cfg_ptr->ext_zfinx) {
310        tcg_gen_ext16s_i64(dest, dest);
311    }
312    mark_fs_dirty(ctx);
313    return true;
314}
315
316static bool trans_fsgnjx_h(DisasContext *ctx, arg_fsgnjx_h *a)
317{
318    TCGv_i64 rs1, rs2;
319
320    REQUIRE_FPU;
321    REQUIRE_ZHINX_OR_ZFH(ctx);
322
323    TCGv_i64 dest = dest_fpr(ctx, a->rd);
324    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
325
326    rs1 = tcg_temp_new_i64();
327    if (!ctx->cfg_ptr->ext_zfinx) {
328        gen_check_nanbox_h(rs1, src1);
329    } else {
330        tcg_gen_mov_i64(rs1, src1);
331    }
332
333    if (a->rs1 == a->rs2) { /* FABS */
334        tcg_gen_andi_i64(dest, rs1, ~MAKE_64BIT_MASK(15, 1));
335    } else {
336        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
337        rs2 = tcg_temp_new_i64();
338
339        if (!ctx->cfg_ptr->ext_zfinx) {
340            gen_check_nanbox_h(rs2, src2);
341        } else {
342            tcg_gen_mov_i64(rs2, src2);
343        }
344
345        /*
346         * Xor bit 15 in rs1 with that in rs2.
347         * This formulation retains the nanboxing of rs1.
348         */
349        tcg_gen_andi_i64(dest, rs2, MAKE_64BIT_MASK(15, 1));
350        tcg_gen_xor_i64(dest, rs1, dest);
351    }
352    /* signed-extended instead of nanboxing for result if enable zfinx */
353    if (ctx->cfg_ptr->ext_zfinx) {
354        tcg_gen_ext16s_i64(dest, dest);
355    }
356    mark_fs_dirty(ctx);
357    return true;
358}
359
360static bool trans_fmin_h(DisasContext *ctx, arg_fmin_h *a)
361{
362    REQUIRE_FPU;
363    REQUIRE_ZHINX_OR_ZFH(ctx);
364
365    TCGv_i64 dest = dest_fpr(ctx, a->rd);
366    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
367    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
368
369    gen_helper_fmin_h(dest, tcg_env, src1, src2);
370    gen_set_fpr_hs(ctx, a->rd, dest);
371    mark_fs_dirty(ctx);
372    return true;
373}
374
375static bool trans_fmax_h(DisasContext *ctx, arg_fmax_h *a)
376{
377    REQUIRE_FPU;
378    REQUIRE_ZHINX_OR_ZFH(ctx);
379
380    TCGv_i64 dest = dest_fpr(ctx, a->rd);
381    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
382    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
383
384    gen_helper_fmax_h(dest, tcg_env, src1, src2);
385    gen_set_fpr_hs(ctx, a->rd, dest);
386    mark_fs_dirty(ctx);
387    return true;
388}
389
390static bool trans_fcvt_s_h(DisasContext *ctx, arg_fcvt_s_h *a)
391{
392    REQUIRE_FPU;
393    REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx);
394
395    TCGv_i64 dest = dest_fpr(ctx, a->rd);
396    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
397
398    gen_set_rm(ctx, a->rm);
399    gen_helper_fcvt_s_h(dest, tcg_env, src1);
400    gen_set_fpr_hs(ctx, a->rd, dest);
401
402    mark_fs_dirty(ctx);
403
404    return true;
405}
406
407static bool trans_fcvt_d_h(DisasContext *ctx, arg_fcvt_d_h *a)
408{
409    REQUIRE_FPU;
410    REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx);
411    REQUIRE_ZDINX_OR_D(ctx);
412
413    TCGv_i64 dest = dest_fpr(ctx, a->rd);
414    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
415
416    gen_set_rm(ctx, a->rm);
417    gen_helper_fcvt_d_h(dest, tcg_env, src1);
418    gen_set_fpr_d(ctx, a->rd, dest);
419
420    mark_fs_dirty(ctx);
421
422    return true;
423}
424
425static bool trans_fcvt_h_s(DisasContext *ctx, arg_fcvt_h_s *a)
426{
427    REQUIRE_FPU;
428    REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx);
429
430    TCGv_i64 dest = dest_fpr(ctx, a->rd);
431    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
432
433    gen_set_rm(ctx, a->rm);
434    gen_helper_fcvt_h_s(dest, tcg_env, src1);
435    gen_set_fpr_hs(ctx, a->rd, dest);
436    mark_fs_dirty(ctx);
437
438    return true;
439}
440
441static bool trans_fcvt_h_d(DisasContext *ctx, arg_fcvt_h_d *a)
442{
443    REQUIRE_FPU;
444    REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx);
445    REQUIRE_ZDINX_OR_D(ctx);
446
447    TCGv_i64 dest = dest_fpr(ctx, a->rd);
448    TCGv_i64 src1 = get_fpr_d(ctx, a->rs1);
449
450    gen_set_rm(ctx, a->rm);
451    gen_helper_fcvt_h_d(dest, tcg_env, src1);
452    gen_set_fpr_hs(ctx, a->rd, dest);
453    mark_fs_dirty(ctx);
454
455    return true;
456}
457
458static bool trans_feq_h(DisasContext *ctx, arg_feq_h *a)
459{
460    REQUIRE_FPU;
461    REQUIRE_ZHINX_OR_ZFH(ctx);
462
463    TCGv dest = dest_gpr(ctx, a->rd);
464    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
465    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
466
467    gen_helper_feq_h(dest, tcg_env, src1, src2);
468    gen_set_gpr(ctx, a->rd, dest);
469    return true;
470}
471
472static bool trans_flt_h(DisasContext *ctx, arg_flt_h *a)
473{
474    REQUIRE_FPU;
475    REQUIRE_ZHINX_OR_ZFH(ctx);
476
477    TCGv dest = dest_gpr(ctx, a->rd);
478    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
479    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
480
481    gen_helper_flt_h(dest, tcg_env, src1, src2);
482    gen_set_gpr(ctx, a->rd, dest);
483
484    return true;
485}
486
487static bool trans_fle_h(DisasContext *ctx, arg_fle_h *a)
488{
489    REQUIRE_FPU;
490    REQUIRE_ZHINX_OR_ZFH(ctx);
491
492    TCGv dest = dest_gpr(ctx, a->rd);
493    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
494    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
495
496    gen_helper_fle_h(dest, tcg_env, src1, src2);
497    gen_set_gpr(ctx, a->rd, dest);
498    return true;
499}
500
501static bool trans_fclass_h(DisasContext *ctx, arg_fclass_h *a)
502{
503    REQUIRE_FPU;
504    REQUIRE_ZHINX_OR_ZFH(ctx);
505
506    TCGv dest = dest_gpr(ctx, a->rd);
507    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
508
509    gen_helper_fclass_h(dest, tcg_env, src1);
510    gen_set_gpr(ctx, a->rd, dest);
511    return true;
512}
513
514static bool trans_fcvt_w_h(DisasContext *ctx, arg_fcvt_w_h *a)
515{
516    REQUIRE_FPU;
517    REQUIRE_ZHINX_OR_ZFH(ctx);
518
519    TCGv dest = dest_gpr(ctx, a->rd);
520    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
521
522    gen_set_rm(ctx, a->rm);
523    gen_helper_fcvt_w_h(dest, tcg_env, src1);
524    gen_set_gpr(ctx, a->rd, dest);
525    return true;
526}
527
528static bool trans_fcvt_wu_h(DisasContext *ctx, arg_fcvt_wu_h *a)
529{
530    REQUIRE_FPU;
531    REQUIRE_ZHINX_OR_ZFH(ctx);
532
533    TCGv dest = dest_gpr(ctx, a->rd);
534    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
535
536    gen_set_rm(ctx, a->rm);
537    gen_helper_fcvt_wu_h(dest, tcg_env, src1);
538    gen_set_gpr(ctx, a->rd, dest);
539    return true;
540}
541
542static bool trans_fcvt_h_w(DisasContext *ctx, arg_fcvt_h_w *a)
543{
544    REQUIRE_FPU;
545    REQUIRE_ZHINX_OR_ZFH(ctx);
546
547    TCGv_i64 dest = dest_fpr(ctx, a->rd);
548    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
549
550    gen_set_rm(ctx, a->rm);
551    gen_helper_fcvt_h_w(dest, tcg_env, t0);
552    gen_set_fpr_hs(ctx, a->rd, dest);
553
554    mark_fs_dirty(ctx);
555    return true;
556}
557
558static bool trans_fcvt_h_wu(DisasContext *ctx, arg_fcvt_h_wu *a)
559{
560    REQUIRE_FPU;
561    REQUIRE_ZHINX_OR_ZFH(ctx);
562
563    TCGv_i64 dest = dest_fpr(ctx, a->rd);
564    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
565
566    gen_set_rm(ctx, a->rm);
567    gen_helper_fcvt_h_wu(dest, tcg_env, t0);
568    gen_set_fpr_hs(ctx, a->rd, dest);
569
570    mark_fs_dirty(ctx);
571    return true;
572}
573
574static bool trans_fmv_x_h(DisasContext *ctx, arg_fmv_x_h *a)
575{
576    REQUIRE_FPU;
577    REQUIRE_ZFHMIN_OR_ZFBFMIN(ctx);
578
579    TCGv dest = dest_gpr(ctx, a->rd);
580
581#if defined(TARGET_RISCV64)
582    /* 16 bits -> 64 bits */
583    tcg_gen_ext16s_tl(dest, cpu_fpr[a->rs1]);
584#else
585    /* 16 bits -> 32 bits */
586    tcg_gen_extrl_i64_i32(dest, cpu_fpr[a->rs1]);
587    tcg_gen_ext16s_tl(dest, dest);
588#endif
589
590    gen_set_gpr(ctx, a->rd, dest);
591    return true;
592}
593
594static bool trans_fmv_h_x(DisasContext *ctx, arg_fmv_h_x *a)
595{
596    REQUIRE_FPU;
597    REQUIRE_ZFHMIN_OR_ZFBFMIN(ctx);
598
599    TCGv t0 = get_gpr(ctx, a->rs1, EXT_ZERO);
600
601    tcg_gen_extu_tl_i64(cpu_fpr[a->rd], t0);
602    gen_nanbox_h(cpu_fpr[a->rd], cpu_fpr[a->rd]);
603
604    mark_fs_dirty(ctx);
605    return true;
606}
607
608static bool trans_fcvt_l_h(DisasContext *ctx, arg_fcvt_l_h *a)
609{
610    REQUIRE_64BIT(ctx);
611    REQUIRE_FPU;
612    REQUIRE_ZHINX_OR_ZFH(ctx);
613
614    TCGv dest = dest_gpr(ctx, a->rd);
615    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
616
617    gen_set_rm(ctx, a->rm);
618    gen_helper_fcvt_l_h(dest, tcg_env, src1);
619    gen_set_gpr(ctx, a->rd, dest);
620    return true;
621}
622
623static bool trans_fcvt_lu_h(DisasContext *ctx, arg_fcvt_lu_h *a)
624{
625    REQUIRE_64BIT(ctx);
626    REQUIRE_FPU;
627    REQUIRE_ZHINX_OR_ZFH(ctx);
628
629    TCGv dest = dest_gpr(ctx, a->rd);
630    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
631
632    gen_set_rm(ctx, a->rm);
633    gen_helper_fcvt_lu_h(dest, tcg_env, src1);
634    gen_set_gpr(ctx, a->rd, dest);
635    return true;
636}
637
638static bool trans_fcvt_h_l(DisasContext *ctx, arg_fcvt_h_l *a)
639{
640    REQUIRE_64BIT(ctx);
641    REQUIRE_FPU;
642    REQUIRE_ZHINX_OR_ZFH(ctx);
643
644    TCGv_i64 dest = dest_fpr(ctx, a->rd);
645    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
646
647    gen_set_rm(ctx, a->rm);
648    gen_helper_fcvt_h_l(dest, tcg_env, t0);
649    gen_set_fpr_hs(ctx, a->rd, dest);
650
651    mark_fs_dirty(ctx);
652    return true;
653}
654
655static bool trans_fcvt_h_lu(DisasContext *ctx, arg_fcvt_h_lu *a)
656{
657    REQUIRE_64BIT(ctx);
658    REQUIRE_FPU;
659    REQUIRE_ZHINX_OR_ZFH(ctx);
660
661    TCGv_i64 dest = dest_fpr(ctx, a->rd);
662    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
663
664    gen_set_rm(ctx, a->rm);
665    gen_helper_fcvt_h_lu(dest, tcg_env, t0);
666    gen_set_fpr_hs(ctx, a->rd, dest);
667
668    mark_fs_dirty(ctx);
669    return true;
670}
671