1/*
2 * RISC-V translation routines for the RV64Zfh Standard Extension.
3 *
4 * Copyright (c) 2020 Chih-Min Chao, chihmin.chao@sifive.com
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms and conditions of the GNU General Public License,
8 * version 2 or later, as published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program.  If not, see <http://www.gnu.org/licenses/>.
17 */
18
19#define REQUIRE_ZFH(ctx) do { \
20    if (!ctx->cfg_ptr->ext_zfh) {      \
21        return false;         \
22    }                         \
23} while (0)
24
25#define REQUIRE_ZHINX_OR_ZFH(ctx) do { \
26    if (!ctx->cfg_ptr->ext_zhinx && !ctx->cfg_ptr->ext_zfh) { \
27        return false;                  \
28    }                                  \
29} while (0)
30
31#define REQUIRE_ZFH_OR_ZFHMIN(ctx) do {       \
32    if (!(ctx->cfg_ptr->ext_zfh || ctx->cfg_ptr->ext_zfhmin)) { \
33        return false;                         \
34    }                                         \
35} while (0)
36
37#define REQUIRE_ZFH_OR_ZFHMIN_OR_ZHINX_OR_ZHINXMIN(ctx) do { \
38    if (!(ctx->cfg_ptr->ext_zfh || ctx->cfg_ptr->ext_zfhmin ||          \
39          ctx->cfg_ptr->ext_zhinx || ctx->cfg_ptr->ext_zhinxmin)) {     \
40        return false;                                        \
41    }                                                        \
42} while (0)
43
44static bool trans_flh(DisasContext *ctx, arg_flh *a)
45{
46    TCGv_i64 dest;
47    TCGv t0;
48
49    REQUIRE_FPU;
50    REQUIRE_ZFH_OR_ZFHMIN(ctx);
51
52    decode_save_opc(ctx);
53    t0 = get_gpr(ctx, a->rs1, EXT_NONE);
54    if (a->imm) {
55        TCGv temp = temp_new(ctx);
56        tcg_gen_addi_tl(temp, t0, a->imm);
57        t0 = temp;
58    }
59
60    dest = cpu_fpr[a->rd];
61    tcg_gen_qemu_ld_i64(dest, t0, ctx->mem_idx, MO_TEUW);
62    gen_nanbox_h(dest, dest);
63
64    mark_fs_dirty(ctx);
65    return true;
66}
67
68static bool trans_fsh(DisasContext *ctx, arg_fsh *a)
69{
70    TCGv t0;
71
72    REQUIRE_FPU;
73    REQUIRE_ZFH_OR_ZFHMIN(ctx);
74
75    decode_save_opc(ctx);
76    t0 = get_gpr(ctx, a->rs1, EXT_NONE);
77    if (a->imm) {
78        TCGv temp = tcg_temp_new();
79        tcg_gen_addi_tl(temp, t0, a->imm);
80        t0 = temp;
81    }
82
83    tcg_gen_qemu_st_i64(cpu_fpr[a->rs2], t0, ctx->mem_idx, MO_TEUW);
84
85    return true;
86}
87
88static bool trans_fmadd_h(DisasContext *ctx, arg_fmadd_h *a)
89{
90    REQUIRE_FPU;
91    REQUIRE_ZHINX_OR_ZFH(ctx);
92
93    TCGv_i64 dest = dest_fpr(ctx, a->rd);
94    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
95    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
96    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
97
98    gen_set_rm(ctx, a->rm);
99    gen_helper_fmadd_h(dest, cpu_env, src1, src2, src3);
100    gen_set_fpr_hs(ctx, a->rd, dest);
101    mark_fs_dirty(ctx);
102    return true;
103}
104
105static bool trans_fmsub_h(DisasContext *ctx, arg_fmsub_h *a)
106{
107    REQUIRE_FPU;
108    REQUIRE_ZHINX_OR_ZFH(ctx);
109
110    TCGv_i64 dest = dest_fpr(ctx, a->rd);
111    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
112    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
113    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
114
115    gen_set_rm(ctx, a->rm);
116    gen_helper_fmsub_h(dest, cpu_env, src1, src2, src3);
117    gen_set_fpr_hs(ctx, a->rd, dest);
118    mark_fs_dirty(ctx);
119    return true;
120}
121
122static bool trans_fnmsub_h(DisasContext *ctx, arg_fnmsub_h *a)
123{
124    REQUIRE_FPU;
125    REQUIRE_ZHINX_OR_ZFH(ctx);
126
127    TCGv_i64 dest = dest_fpr(ctx, a->rd);
128    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
129    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
130    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
131
132    gen_set_rm(ctx, a->rm);
133    gen_helper_fnmsub_h(dest, cpu_env, src1, src2, src3);
134    gen_set_fpr_hs(ctx, a->rd, dest);
135    mark_fs_dirty(ctx);
136    return true;
137}
138
139static bool trans_fnmadd_h(DisasContext *ctx, arg_fnmadd_h *a)
140{
141    REQUIRE_FPU;
142    REQUIRE_ZHINX_OR_ZFH(ctx);
143
144    TCGv_i64 dest = dest_fpr(ctx, a->rd);
145    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
146    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
147    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
148
149    gen_set_rm(ctx, a->rm);
150    gen_helper_fnmadd_h(dest, cpu_env, src1, src2, src3);
151    gen_set_fpr_hs(ctx, a->rd, dest);
152    mark_fs_dirty(ctx);
153    return true;
154}
155
156static bool trans_fadd_h(DisasContext *ctx, arg_fadd_h *a)
157{
158    REQUIRE_FPU;
159    REQUIRE_ZHINX_OR_ZFH(ctx);
160
161    TCGv_i64 dest = dest_fpr(ctx, a->rd);
162    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
163    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
164
165    gen_set_rm(ctx, a->rm);
166    gen_helper_fadd_h(dest, cpu_env, src1, src2);
167    gen_set_fpr_hs(ctx, a->rd, dest);
168    mark_fs_dirty(ctx);
169    return true;
170}
171
172static bool trans_fsub_h(DisasContext *ctx, arg_fsub_h *a)
173{
174    REQUIRE_FPU;
175    REQUIRE_ZHINX_OR_ZFH(ctx);
176
177    TCGv_i64 dest = dest_fpr(ctx, a->rd);
178    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
179    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
180
181    gen_set_rm(ctx, a->rm);
182    gen_helper_fsub_h(dest, cpu_env, src1, src2);
183    gen_set_fpr_hs(ctx, a->rd, dest);
184    mark_fs_dirty(ctx);
185    return true;
186}
187
188static bool trans_fmul_h(DisasContext *ctx, arg_fmul_h *a)
189{
190    REQUIRE_FPU;
191    REQUIRE_ZHINX_OR_ZFH(ctx);
192
193    TCGv_i64 dest = dest_fpr(ctx, a->rd);
194    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
195    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
196
197    gen_set_rm(ctx, a->rm);
198    gen_helper_fmul_h(dest, cpu_env, src1, src2);
199    gen_set_fpr_hs(ctx, a->rd, dest);
200    mark_fs_dirty(ctx);
201    return true;
202}
203
204static bool trans_fdiv_h(DisasContext *ctx, arg_fdiv_h *a)
205{
206    REQUIRE_FPU;
207    REQUIRE_ZHINX_OR_ZFH(ctx);
208
209    TCGv_i64 dest = dest_fpr(ctx, a->rd);
210    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
211    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
212
213    gen_set_rm(ctx, a->rm);
214    gen_helper_fdiv_h(dest, cpu_env, src1, src2);
215    gen_set_fpr_hs(ctx, a->rd, dest);
216    mark_fs_dirty(ctx);
217    return true;
218}
219
220static bool trans_fsqrt_h(DisasContext *ctx, arg_fsqrt_h *a)
221{
222    REQUIRE_FPU;
223    REQUIRE_ZHINX_OR_ZFH(ctx);
224
225    TCGv_i64 dest = dest_fpr(ctx, a->rd);
226    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
227
228    gen_set_rm(ctx, a->rm);
229    gen_helper_fsqrt_h(dest, cpu_env, src1);
230    gen_set_fpr_hs(ctx, a->rd, dest);
231    mark_fs_dirty(ctx);
232    return true;
233}
234
235static bool trans_fsgnj_h(DisasContext *ctx, arg_fsgnj_h *a)
236{
237    REQUIRE_FPU;
238    REQUIRE_ZHINX_OR_ZFH(ctx);
239
240    TCGv_i64 dest = dest_fpr(ctx, a->rd);
241    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
242
243    if (a->rs1 == a->rs2) { /* FMOV */
244        if (!ctx->cfg_ptr->ext_zfinx) {
245            gen_check_nanbox_h(dest, src1);
246        } else {
247            tcg_gen_ext16s_i64(dest, src1);
248        }
249    } else {
250        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
251
252        if (!ctx->cfg_ptr->ext_zfinx) {
253            TCGv_i64 rs1 = tcg_temp_new_i64();
254            TCGv_i64 rs2 = tcg_temp_new_i64();
255            gen_check_nanbox_h(rs1, src1);
256            gen_check_nanbox_h(rs2, src2);
257
258            /* This formulation retains the nanboxing of rs2 in normal 'Zfh'. */
259            tcg_gen_deposit_i64(dest, rs2, rs1, 0, 15);
260
261            tcg_temp_free_i64(rs1);
262            tcg_temp_free_i64(rs2);
263        } else {
264            tcg_gen_deposit_i64(dest, src2, src1, 0, 15);
265            tcg_gen_ext16s_i64(dest, dest);
266        }
267    }
268    gen_set_fpr_hs(ctx, a->rd, dest);
269    mark_fs_dirty(ctx);
270    return true;
271}
272
273static bool trans_fsgnjn_h(DisasContext *ctx, arg_fsgnjn_h *a)
274{
275    TCGv_i64 rs1, rs2, mask;
276
277    REQUIRE_FPU;
278    REQUIRE_ZHINX_OR_ZFH(ctx);
279
280    TCGv_i64 dest = dest_fpr(ctx, a->rd);
281    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
282
283    rs1 = tcg_temp_new_i64();
284    if (!ctx->cfg_ptr->ext_zfinx) {
285        gen_check_nanbox_h(rs1, src1);
286    } else {
287        tcg_gen_mov_i64(rs1, src1);
288    }
289
290    if (a->rs1 == a->rs2) { /* FNEG */
291        tcg_gen_xori_i64(dest, rs1, MAKE_64BIT_MASK(15, 1));
292    } else {
293        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
294        rs2 = tcg_temp_new_i64();
295
296        if (!ctx->cfg_ptr->ext_zfinx) {
297            gen_check_nanbox_h(rs2, src2);
298        } else {
299            tcg_gen_mov_i64(rs2, src2);
300        }
301
302        /*
303         * Replace bit 15 in rs1 with inverse in rs2.
304         * This formulation retains the nanboxing of rs1.
305         */
306        mask = tcg_const_i64(~MAKE_64BIT_MASK(15, 1));
307        tcg_gen_not_i64(rs2, rs2);
308        tcg_gen_andc_i64(rs2, rs2, mask);
309        tcg_gen_and_i64(dest, mask, rs1);
310        tcg_gen_or_i64(dest, dest, rs2);
311
312        tcg_temp_free_i64(mask);
313        tcg_temp_free_i64(rs2);
314    }
315    /* signed-extended intead of nanboxing for result if enable zfinx */
316    if (ctx->cfg_ptr->ext_zfinx) {
317        tcg_gen_ext16s_i64(dest, dest);
318    }
319    tcg_temp_free_i64(rs1);
320    mark_fs_dirty(ctx);
321    return true;
322}
323
324static bool trans_fsgnjx_h(DisasContext *ctx, arg_fsgnjx_h *a)
325{
326    TCGv_i64 rs1, rs2;
327
328    REQUIRE_FPU;
329    REQUIRE_ZHINX_OR_ZFH(ctx);
330
331    TCGv_i64 dest = dest_fpr(ctx, a->rd);
332    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
333
334    rs1 = tcg_temp_new_i64();
335    if (!ctx->cfg_ptr->ext_zfinx) {
336        gen_check_nanbox_h(rs1, src1);
337    } else {
338        tcg_gen_mov_i64(rs1, src1);
339    }
340
341    if (a->rs1 == a->rs2) { /* FABS */
342        tcg_gen_andi_i64(dest, rs1, ~MAKE_64BIT_MASK(15, 1));
343    } else {
344        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
345        rs2 = tcg_temp_new_i64();
346
347        if (!ctx->cfg_ptr->ext_zfinx) {
348            gen_check_nanbox_h(rs2, src2);
349        } else {
350            tcg_gen_mov_i64(rs2, src2);
351        }
352
353        /*
354         * Xor bit 15 in rs1 with that in rs2.
355         * This formulation retains the nanboxing of rs1.
356         */
357        tcg_gen_andi_i64(dest, rs2, MAKE_64BIT_MASK(15, 1));
358        tcg_gen_xor_i64(dest, rs1, dest);
359
360        tcg_temp_free_i64(rs2);
361    }
362    /* signed-extended intead of nanboxing for result if enable zfinx */
363    if (ctx->cfg_ptr->ext_zfinx) {
364        tcg_gen_ext16s_i64(dest, dest);
365    }
366    tcg_temp_free_i64(rs1);
367    mark_fs_dirty(ctx);
368    return true;
369}
370
371static bool trans_fmin_h(DisasContext *ctx, arg_fmin_h *a)
372{
373    REQUIRE_FPU;
374    REQUIRE_ZHINX_OR_ZFH(ctx);
375
376    TCGv_i64 dest = dest_fpr(ctx, a->rd);
377    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
378    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
379
380    gen_helper_fmin_h(dest, cpu_env, src1, src2);
381    gen_set_fpr_hs(ctx, a->rd, dest);
382    mark_fs_dirty(ctx);
383    return true;
384}
385
386static bool trans_fmax_h(DisasContext *ctx, arg_fmax_h *a)
387{
388    REQUIRE_FPU;
389    REQUIRE_ZHINX_OR_ZFH(ctx);
390
391    TCGv_i64 dest = dest_fpr(ctx, a->rd);
392    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
393    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
394
395    gen_helper_fmax_h(dest, cpu_env, src1, src2);
396    gen_set_fpr_hs(ctx, a->rd, dest);
397    mark_fs_dirty(ctx);
398    return true;
399}
400
401static bool trans_fcvt_s_h(DisasContext *ctx, arg_fcvt_s_h *a)
402{
403    REQUIRE_FPU;
404    REQUIRE_ZFH_OR_ZFHMIN_OR_ZHINX_OR_ZHINXMIN(ctx);
405
406    TCGv_i64 dest = dest_fpr(ctx, a->rd);
407    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
408
409    gen_set_rm(ctx, a->rm);
410    gen_helper_fcvt_s_h(dest, cpu_env, src1);
411    gen_set_fpr_hs(ctx, a->rd, dest);
412
413    mark_fs_dirty(ctx);
414
415    return true;
416}
417
418static bool trans_fcvt_d_h(DisasContext *ctx, arg_fcvt_d_h *a)
419{
420    REQUIRE_FPU;
421    REQUIRE_ZFH_OR_ZFHMIN_OR_ZHINX_OR_ZHINXMIN(ctx);
422    REQUIRE_ZDINX_OR_D(ctx);
423
424    TCGv_i64 dest = dest_fpr(ctx, a->rd);
425    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
426
427    gen_set_rm(ctx, a->rm);
428    gen_helper_fcvt_d_h(dest, cpu_env, src1);
429    gen_set_fpr_d(ctx, a->rd, dest);
430
431    mark_fs_dirty(ctx);
432
433    return true;
434}
435
436static bool trans_fcvt_h_s(DisasContext *ctx, arg_fcvt_h_s *a)
437{
438    REQUIRE_FPU;
439    REQUIRE_ZFH_OR_ZFHMIN_OR_ZHINX_OR_ZHINXMIN(ctx);
440
441    TCGv_i64 dest = dest_fpr(ctx, a->rd);
442    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
443
444    gen_set_rm(ctx, a->rm);
445    gen_helper_fcvt_h_s(dest, cpu_env, src1);
446    gen_set_fpr_hs(ctx, a->rd, dest);
447    mark_fs_dirty(ctx);
448
449    return true;
450}
451
452static bool trans_fcvt_h_d(DisasContext *ctx, arg_fcvt_h_d *a)
453{
454    REQUIRE_FPU;
455    REQUIRE_ZFH_OR_ZFHMIN_OR_ZHINX_OR_ZHINXMIN(ctx);
456    REQUIRE_ZDINX_OR_D(ctx);
457
458    TCGv_i64 dest = dest_fpr(ctx, a->rd);
459    TCGv_i64 src1 = get_fpr_d(ctx, a->rs1);
460
461    gen_set_rm(ctx, a->rm);
462    gen_helper_fcvt_h_d(dest, cpu_env, src1);
463    gen_set_fpr_hs(ctx, a->rd, dest);
464    mark_fs_dirty(ctx);
465
466    return true;
467}
468
469static bool trans_feq_h(DisasContext *ctx, arg_feq_h *a)
470{
471    REQUIRE_FPU;
472    REQUIRE_ZHINX_OR_ZFH(ctx);
473
474    TCGv dest = dest_gpr(ctx, a->rd);
475    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
476    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
477
478    gen_helper_feq_h(dest, cpu_env, src1, src2);
479    gen_set_gpr(ctx, a->rd, dest);
480    return true;
481}
482
483static bool trans_flt_h(DisasContext *ctx, arg_flt_h *a)
484{
485    REQUIRE_FPU;
486    REQUIRE_ZHINX_OR_ZFH(ctx);
487
488    TCGv dest = dest_gpr(ctx, a->rd);
489    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
490    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
491
492    gen_helper_flt_h(dest, cpu_env, src1, src2);
493    gen_set_gpr(ctx, a->rd, dest);
494
495    return true;
496}
497
498static bool trans_fle_h(DisasContext *ctx, arg_fle_h *a)
499{
500    REQUIRE_FPU;
501    REQUIRE_ZHINX_OR_ZFH(ctx);
502
503    TCGv dest = dest_gpr(ctx, a->rd);
504    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
505    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
506
507    gen_helper_fle_h(dest, cpu_env, src1, src2);
508    gen_set_gpr(ctx, a->rd, dest);
509    return true;
510}
511
512static bool trans_fclass_h(DisasContext *ctx, arg_fclass_h *a)
513{
514    REQUIRE_FPU;
515    REQUIRE_ZHINX_OR_ZFH(ctx);
516
517    TCGv dest = dest_gpr(ctx, a->rd);
518    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
519
520    gen_helper_fclass_h(dest, cpu_env, src1);
521    gen_set_gpr(ctx, a->rd, dest);
522    return true;
523}
524
525static bool trans_fcvt_w_h(DisasContext *ctx, arg_fcvt_w_h *a)
526{
527    REQUIRE_FPU;
528    REQUIRE_ZHINX_OR_ZFH(ctx);
529
530    TCGv dest = dest_gpr(ctx, a->rd);
531    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
532
533    gen_set_rm(ctx, a->rm);
534    gen_helper_fcvt_w_h(dest, cpu_env, src1);
535    gen_set_gpr(ctx, a->rd, dest);
536    return true;
537}
538
539static bool trans_fcvt_wu_h(DisasContext *ctx, arg_fcvt_wu_h *a)
540{
541    REQUIRE_FPU;
542    REQUIRE_ZHINX_OR_ZFH(ctx);
543
544    TCGv dest = dest_gpr(ctx, a->rd);
545    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
546
547    gen_set_rm(ctx, a->rm);
548    gen_helper_fcvt_wu_h(dest, cpu_env, src1);
549    gen_set_gpr(ctx, a->rd, dest);
550    return true;
551}
552
553static bool trans_fcvt_h_w(DisasContext *ctx, arg_fcvt_h_w *a)
554{
555    REQUIRE_FPU;
556    REQUIRE_ZHINX_OR_ZFH(ctx);
557
558    TCGv_i64 dest = dest_fpr(ctx, a->rd);
559    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
560
561    gen_set_rm(ctx, a->rm);
562    gen_helper_fcvt_h_w(dest, cpu_env, t0);
563    gen_set_fpr_hs(ctx, a->rd, dest);
564
565    mark_fs_dirty(ctx);
566    return true;
567}
568
569static bool trans_fcvt_h_wu(DisasContext *ctx, arg_fcvt_h_wu *a)
570{
571    REQUIRE_FPU;
572    REQUIRE_ZHINX_OR_ZFH(ctx);
573
574    TCGv_i64 dest = dest_fpr(ctx, a->rd);
575    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
576
577    gen_set_rm(ctx, a->rm);
578    gen_helper_fcvt_h_wu(dest, cpu_env, t0);
579    gen_set_fpr_hs(ctx, a->rd, dest);
580
581    mark_fs_dirty(ctx);
582    return true;
583}
584
585static bool trans_fmv_x_h(DisasContext *ctx, arg_fmv_x_h *a)
586{
587    REQUIRE_FPU;
588    REQUIRE_ZFH_OR_ZFHMIN(ctx);
589
590    TCGv dest = dest_gpr(ctx, a->rd);
591
592#if defined(TARGET_RISCV64)
593    /* 16 bits -> 64 bits */
594    tcg_gen_ext16s_tl(dest, cpu_fpr[a->rs1]);
595#else
596    /* 16 bits -> 32 bits */
597    tcg_gen_extrl_i64_i32(dest, cpu_fpr[a->rs1]);
598    tcg_gen_ext16s_tl(dest, dest);
599#endif
600
601    gen_set_gpr(ctx, a->rd, dest);
602    return true;
603}
604
605static bool trans_fmv_h_x(DisasContext *ctx, arg_fmv_h_x *a)
606{
607    REQUIRE_FPU;
608    REQUIRE_ZFH_OR_ZFHMIN(ctx);
609
610    TCGv t0 = get_gpr(ctx, a->rs1, EXT_ZERO);
611
612    tcg_gen_extu_tl_i64(cpu_fpr[a->rd], t0);
613    gen_nanbox_h(cpu_fpr[a->rd], cpu_fpr[a->rd]);
614
615    mark_fs_dirty(ctx);
616    return true;
617}
618
619static bool trans_fcvt_l_h(DisasContext *ctx, arg_fcvt_l_h *a)
620{
621    REQUIRE_64BIT(ctx);
622    REQUIRE_FPU;
623    REQUIRE_ZHINX_OR_ZFH(ctx);
624
625    TCGv dest = dest_gpr(ctx, a->rd);
626    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
627
628    gen_set_rm(ctx, a->rm);
629    gen_helper_fcvt_l_h(dest, cpu_env, src1);
630    gen_set_gpr(ctx, a->rd, dest);
631    return true;
632}
633
634static bool trans_fcvt_lu_h(DisasContext *ctx, arg_fcvt_lu_h *a)
635{
636    REQUIRE_64BIT(ctx);
637    REQUIRE_FPU;
638    REQUIRE_ZHINX_OR_ZFH(ctx);
639
640    TCGv dest = dest_gpr(ctx, a->rd);
641    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
642
643    gen_set_rm(ctx, a->rm);
644    gen_helper_fcvt_lu_h(dest, cpu_env, src1);
645    gen_set_gpr(ctx, a->rd, dest);
646    return true;
647}
648
649static bool trans_fcvt_h_l(DisasContext *ctx, arg_fcvt_h_l *a)
650{
651    REQUIRE_64BIT(ctx);
652    REQUIRE_FPU;
653    REQUIRE_ZHINX_OR_ZFH(ctx);
654
655    TCGv_i64 dest = dest_fpr(ctx, a->rd);
656    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
657
658    gen_set_rm(ctx, a->rm);
659    gen_helper_fcvt_h_l(dest, cpu_env, t0);
660    gen_set_fpr_hs(ctx, a->rd, dest);
661
662    mark_fs_dirty(ctx);
663    return true;
664}
665
666static bool trans_fcvt_h_lu(DisasContext *ctx, arg_fcvt_h_lu *a)
667{
668    REQUIRE_64BIT(ctx);
669    REQUIRE_FPU;
670    REQUIRE_ZHINX_OR_ZFH(ctx);
671
672    TCGv_i64 dest = dest_fpr(ctx, a->rd);
673    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
674
675    gen_set_rm(ctx, a->rm);
676    gen_helper_fcvt_h_lu(dest, cpu_env, t0);
677    gen_set_fpr_hs(ctx, a->rd, dest);
678
679    mark_fs_dirty(ctx);
680    return true;
681}
682