1/*
2 * RISC-V translation routines for the RV64F Standard Extension.
3 *
4 * Copyright (c) 2016-2017 Sagar Karandikar, sagark@eecs.berkeley.edu
5 * Copyright (c) 2018 Peer Adelt, peer.adelt@hni.uni-paderborn.de
6 *                    Bastian Koppelmann, kbastian@mail.uni-paderborn.de
7 *
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms and conditions of the GNU General Public License,
10 * version 2 or later, as published by the Free Software Foundation.
11 *
12 * This program is distributed in the hope it will be useful, but WITHOUT
13 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
15 * more details.
16 *
17 * You should have received a copy of the GNU General Public License along with
18 * this program.  If not, see <http://www.gnu.org/licenses/>.
19 */
20
21#define REQUIRE_FPU do {\
22    if (ctx->mstatus_fs == EXT_STATUS_DISABLED) {                           \
23        ctx->virt_inst_excp = ctx->virt_enabled && ctx->cfg_ptr->ext_zfinx; \
24        return false;                                                       \
25    }                                                                       \
26} while (0)
27
28#define REQUIRE_ZFINX_OR_F(ctx) do {\
29    if (!ctx->cfg_ptr->ext_zfinx) { \
30        REQUIRE_EXT(ctx, RVF); \
31    } \
32} while (0)
33
34#define REQUIRE_ZCF_OR_FC(ctx) do {                     \
35    if (!ctx->cfg_ptr->ext_zcf) {                       \
36        if (!has_ext(ctx, RVF) || !has_ext(ctx, RVC)) { \
37            return false;                               \
38        }                                               \
39    }                                                   \
40} while (0)
41
42static bool trans_flw(DisasContext *ctx, arg_flw *a)
43{
44    TCGv_i64 dest;
45    TCGv addr;
46
47    REQUIRE_FPU;
48    REQUIRE_EXT(ctx, RVF);
49
50    decode_save_opc(ctx);
51    addr = get_address(ctx, a->rs1, a->imm);
52    dest = cpu_fpr[a->rd];
53    tcg_gen_qemu_ld_i64(dest, addr, ctx->mem_idx, MO_TEUL);
54    gen_nanbox_s(dest, dest);
55
56    mark_fs_dirty(ctx);
57    return true;
58}
59
60static bool trans_fsw(DisasContext *ctx, arg_fsw *a)
61{
62    TCGv addr;
63
64    REQUIRE_FPU;
65    REQUIRE_EXT(ctx, RVF);
66
67    decode_save_opc(ctx);
68    addr = get_address(ctx, a->rs1, a->imm);
69    tcg_gen_qemu_st_i64(cpu_fpr[a->rs2], addr, ctx->mem_idx, MO_TEUL);
70    return true;
71}
72
73static bool trans_c_flw(DisasContext *ctx, arg_flw *a)
74{
75    REQUIRE_ZCF_OR_FC(ctx);
76    return trans_flw(ctx, a);
77}
78
79static bool trans_c_fsw(DisasContext *ctx, arg_fsw *a)
80{
81    REQUIRE_ZCF_OR_FC(ctx);
82    return trans_fsw(ctx, a);
83}
84
85static bool trans_fmadd_s(DisasContext *ctx, arg_fmadd_s *a)
86{
87    REQUIRE_FPU;
88    REQUIRE_ZFINX_OR_F(ctx);
89
90    TCGv_i64 dest = dest_fpr(ctx, a->rd);
91    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
92    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
93    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
94
95    gen_set_rm(ctx, a->rm);
96    gen_helper_fmadd_s(dest, cpu_env, src1, src2, src3);
97    gen_set_fpr_hs(ctx, a->rd, dest);
98    mark_fs_dirty(ctx);
99    return true;
100}
101
102static bool trans_fmsub_s(DisasContext *ctx, arg_fmsub_s *a)
103{
104    REQUIRE_FPU;
105    REQUIRE_ZFINX_OR_F(ctx);
106
107    TCGv_i64 dest = dest_fpr(ctx, a->rd);
108    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
109    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
110    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
111
112    gen_set_rm(ctx, a->rm);
113    gen_helper_fmsub_s(dest, cpu_env, src1, src2, src3);
114    gen_set_fpr_hs(ctx, a->rd, dest);
115    mark_fs_dirty(ctx);
116    return true;
117}
118
119static bool trans_fnmsub_s(DisasContext *ctx, arg_fnmsub_s *a)
120{
121    REQUIRE_FPU;
122    REQUIRE_ZFINX_OR_F(ctx);
123
124    TCGv_i64 dest = dest_fpr(ctx, a->rd);
125    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
126    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
127    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
128
129    gen_set_rm(ctx, a->rm);
130    gen_helper_fnmsub_s(dest, cpu_env, src1, src2, src3);
131    gen_set_fpr_hs(ctx, a->rd, dest);
132    mark_fs_dirty(ctx);
133    return true;
134}
135
136static bool trans_fnmadd_s(DisasContext *ctx, arg_fnmadd_s *a)
137{
138    REQUIRE_FPU;
139    REQUIRE_ZFINX_OR_F(ctx);
140
141    TCGv_i64 dest = dest_fpr(ctx, a->rd);
142    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
143    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
144    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
145
146    gen_set_rm(ctx, a->rm);
147    gen_helper_fnmadd_s(dest, cpu_env, src1, src2, src3);
148    gen_set_fpr_hs(ctx, a->rd, dest);
149    mark_fs_dirty(ctx);
150    return true;
151}
152
153static bool trans_fadd_s(DisasContext *ctx, arg_fadd_s *a)
154{
155    REQUIRE_FPU;
156    REQUIRE_ZFINX_OR_F(ctx);
157
158    TCGv_i64 dest = dest_fpr(ctx, a->rd);
159    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
160    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
161
162    gen_set_rm(ctx, a->rm);
163    gen_helper_fadd_s(dest, cpu_env, src1, src2);
164    gen_set_fpr_hs(ctx, a->rd, dest);
165    mark_fs_dirty(ctx);
166    return true;
167}
168
169static bool trans_fsub_s(DisasContext *ctx, arg_fsub_s *a)
170{
171    REQUIRE_FPU;
172    REQUIRE_ZFINX_OR_F(ctx);
173
174    TCGv_i64 dest = dest_fpr(ctx, a->rd);
175    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
176    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
177
178    gen_set_rm(ctx, a->rm);
179    gen_helper_fsub_s(dest, cpu_env, src1, src2);
180    gen_set_fpr_hs(ctx, a->rd, dest);
181    mark_fs_dirty(ctx);
182    return true;
183}
184
185static bool trans_fmul_s(DisasContext *ctx, arg_fmul_s *a)
186{
187    REQUIRE_FPU;
188    REQUIRE_ZFINX_OR_F(ctx);
189
190    TCGv_i64 dest = dest_fpr(ctx, a->rd);
191    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
192    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
193
194    gen_set_rm(ctx, a->rm);
195    gen_helper_fmul_s(dest, cpu_env, src1, src2);
196    gen_set_fpr_hs(ctx, a->rd, dest);
197    mark_fs_dirty(ctx);
198    return true;
199}
200
201static bool trans_fdiv_s(DisasContext *ctx, arg_fdiv_s *a)
202{
203    REQUIRE_FPU;
204    REQUIRE_ZFINX_OR_F(ctx);
205
206    TCGv_i64 dest = dest_fpr(ctx, a->rd);
207    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
208    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
209
210    gen_set_rm(ctx, a->rm);
211    gen_helper_fdiv_s(dest, cpu_env, src1, src2);
212    gen_set_fpr_hs(ctx, a->rd, dest);
213    mark_fs_dirty(ctx);
214    return true;
215}
216
217static bool trans_fsqrt_s(DisasContext *ctx, arg_fsqrt_s *a)
218{
219    REQUIRE_FPU;
220    REQUIRE_ZFINX_OR_F(ctx);
221
222    TCGv_i64 dest = dest_fpr(ctx, a->rd);
223    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
224
225    gen_set_rm(ctx, a->rm);
226    gen_helper_fsqrt_s(dest, cpu_env, src1);
227    gen_set_fpr_hs(ctx, a->rd, dest);
228    mark_fs_dirty(ctx);
229    return true;
230}
231
232static bool trans_fsgnj_s(DisasContext *ctx, arg_fsgnj_s *a)
233{
234    REQUIRE_FPU;
235    REQUIRE_ZFINX_OR_F(ctx);
236
237    TCGv_i64 dest = dest_fpr(ctx, a->rd);
238    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
239
240    if (a->rs1 == a->rs2) { /* FMOV */
241        if (!ctx->cfg_ptr->ext_zfinx) {
242            gen_check_nanbox_s(dest, src1);
243        } else {
244            tcg_gen_ext32s_i64(dest, src1);
245        }
246    } else { /* FSGNJ */
247        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
248
249        if (!ctx->cfg_ptr->ext_zfinx) {
250            TCGv_i64 rs1 = tcg_temp_new_i64();
251            TCGv_i64 rs2 = tcg_temp_new_i64();
252            gen_check_nanbox_s(rs1, src1);
253            gen_check_nanbox_s(rs2, src2);
254
255            /* This formulation retains the nanboxing of rs2 in normal 'F'. */
256            tcg_gen_deposit_i64(dest, rs2, rs1, 0, 31);
257        } else {
258            tcg_gen_deposit_i64(dest, src2, src1, 0, 31);
259            tcg_gen_ext32s_i64(dest, dest);
260        }
261    }
262    gen_set_fpr_hs(ctx, a->rd, dest);
263    mark_fs_dirty(ctx);
264    return true;
265}
266
267static bool trans_fsgnjn_s(DisasContext *ctx, arg_fsgnjn_s *a)
268{
269    TCGv_i64 rs1, rs2, mask;
270
271    REQUIRE_FPU;
272    REQUIRE_ZFINX_OR_F(ctx);
273
274    TCGv_i64 dest = dest_fpr(ctx, a->rd);
275    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
276
277    rs1 = tcg_temp_new_i64();
278    if (!ctx->cfg_ptr->ext_zfinx) {
279        gen_check_nanbox_s(rs1, src1);
280    } else {
281        tcg_gen_mov_i64(rs1, src1);
282    }
283    if (a->rs1 == a->rs2) { /* FNEG */
284        tcg_gen_xori_i64(dest, rs1, MAKE_64BIT_MASK(31, 1));
285    } else {
286        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
287        rs2 = tcg_temp_new_i64();
288        if (!ctx->cfg_ptr->ext_zfinx) {
289            gen_check_nanbox_s(rs2, src2);
290        } else {
291            tcg_gen_mov_i64(rs2, src2);
292        }
293
294        /*
295         * Replace bit 31 in rs1 with inverse in rs2.
296         * This formulation retains the nanboxing of rs1.
297         */
298        mask = tcg_constant_i64(~MAKE_64BIT_MASK(31, 1));
299        tcg_gen_nor_i64(rs2, rs2, mask);
300        tcg_gen_and_i64(dest, mask, rs1);
301        tcg_gen_or_i64(dest, dest, rs2);
302    }
303    /* signed-extended intead of nanboxing for result if enable zfinx */
304    if (ctx->cfg_ptr->ext_zfinx) {
305        tcg_gen_ext32s_i64(dest, dest);
306    }
307    gen_set_fpr_hs(ctx, a->rd, dest);
308    mark_fs_dirty(ctx);
309    return true;
310}
311
312static bool trans_fsgnjx_s(DisasContext *ctx, arg_fsgnjx_s *a)
313{
314    TCGv_i64 rs1, rs2;
315
316    REQUIRE_FPU;
317    REQUIRE_ZFINX_OR_F(ctx);
318
319    TCGv_i64 dest = dest_fpr(ctx, a->rd);
320    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
321    rs1 = tcg_temp_new_i64();
322
323    if (!ctx->cfg_ptr->ext_zfinx) {
324        gen_check_nanbox_s(rs1, src1);
325    } else {
326        tcg_gen_mov_i64(rs1, src1);
327    }
328
329    if (a->rs1 == a->rs2) { /* FABS */
330        tcg_gen_andi_i64(dest, rs1, ~MAKE_64BIT_MASK(31, 1));
331    } else {
332        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
333        rs2 = tcg_temp_new_i64();
334
335        if (!ctx->cfg_ptr->ext_zfinx) {
336            gen_check_nanbox_s(rs2, src2);
337        } else {
338            tcg_gen_mov_i64(rs2, src2);
339        }
340
341        /*
342         * Xor bit 31 in rs1 with that in rs2.
343         * This formulation retains the nanboxing of rs1.
344         */
345        tcg_gen_andi_i64(dest, rs2, MAKE_64BIT_MASK(31, 1));
346        tcg_gen_xor_i64(dest, rs1, dest);
347    }
348    /* signed-extended intead of nanboxing for result if enable zfinx */
349    if (ctx->cfg_ptr->ext_zfinx) {
350        tcg_gen_ext32s_i64(dest, dest);
351    }
352    gen_set_fpr_hs(ctx, a->rd, dest);
353    mark_fs_dirty(ctx);
354    return true;
355}
356
357static bool trans_fmin_s(DisasContext *ctx, arg_fmin_s *a)
358{
359    REQUIRE_FPU;
360    REQUIRE_ZFINX_OR_F(ctx);
361
362    TCGv_i64 dest = dest_fpr(ctx, a->rd);
363    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
364    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
365
366    gen_helper_fmin_s(dest, cpu_env, src1, src2);
367    gen_set_fpr_hs(ctx, a->rd, dest);
368    mark_fs_dirty(ctx);
369    return true;
370}
371
372static bool trans_fmax_s(DisasContext *ctx, arg_fmax_s *a)
373{
374    REQUIRE_FPU;
375    REQUIRE_ZFINX_OR_F(ctx);
376
377    TCGv_i64 dest = dest_fpr(ctx, a->rd);
378    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
379    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
380
381    gen_helper_fmax_s(dest, cpu_env, src1, src2);
382    gen_set_fpr_hs(ctx, a->rd, dest);
383    mark_fs_dirty(ctx);
384    return true;
385}
386
387static bool trans_fcvt_w_s(DisasContext *ctx, arg_fcvt_w_s *a)
388{
389    REQUIRE_FPU;
390    REQUIRE_ZFINX_OR_F(ctx);
391
392    TCGv dest = dest_gpr(ctx, a->rd);
393    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
394
395    gen_set_rm(ctx, a->rm);
396    gen_helper_fcvt_w_s(dest, cpu_env, src1);
397    gen_set_gpr(ctx, a->rd, dest);
398    return true;
399}
400
401static bool trans_fcvt_wu_s(DisasContext *ctx, arg_fcvt_wu_s *a)
402{
403    REQUIRE_FPU;
404    REQUIRE_ZFINX_OR_F(ctx);
405
406    TCGv dest = dest_gpr(ctx, a->rd);
407    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
408
409    gen_set_rm(ctx, a->rm);
410    gen_helper_fcvt_wu_s(dest, cpu_env, src1);
411    gen_set_gpr(ctx, a->rd, dest);
412    return true;
413}
414
415static bool trans_fmv_x_w(DisasContext *ctx, arg_fmv_x_w *a)
416{
417    /* NOTE: This was FMV.X.S in an earlier version of the ISA spec! */
418    REQUIRE_FPU;
419    REQUIRE_ZFINX_OR_F(ctx);
420
421    TCGv dest = dest_gpr(ctx, a->rd);
422    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
423#if defined(TARGET_RISCV64)
424    tcg_gen_ext32s_tl(dest, src1);
425#else
426    tcg_gen_extrl_i64_i32(dest, src1);
427#endif
428
429    gen_set_gpr(ctx, a->rd, dest);
430    return true;
431}
432
433static bool trans_feq_s(DisasContext *ctx, arg_feq_s *a)
434{
435    REQUIRE_FPU;
436    REQUIRE_ZFINX_OR_F(ctx);
437
438    TCGv dest = dest_gpr(ctx, a->rd);
439    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
440    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
441
442    gen_helper_feq_s(dest, cpu_env, src1, src2);
443    gen_set_gpr(ctx, a->rd, dest);
444    return true;
445}
446
447static bool trans_flt_s(DisasContext *ctx, arg_flt_s *a)
448{
449    REQUIRE_FPU;
450    REQUIRE_ZFINX_OR_F(ctx);
451
452    TCGv dest = dest_gpr(ctx, a->rd);
453    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
454    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
455
456    gen_helper_flt_s(dest, cpu_env, src1, src2);
457    gen_set_gpr(ctx, a->rd, dest);
458    return true;
459}
460
461static bool trans_fle_s(DisasContext *ctx, arg_fle_s *a)
462{
463    REQUIRE_FPU;
464    REQUIRE_ZFINX_OR_F(ctx);
465
466    TCGv dest = dest_gpr(ctx, a->rd);
467    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
468    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
469
470    gen_helper_fle_s(dest, cpu_env, src1, src2);
471    gen_set_gpr(ctx, a->rd, dest);
472    return true;
473}
474
475static bool trans_fclass_s(DisasContext *ctx, arg_fclass_s *a)
476{
477    REQUIRE_FPU;
478    REQUIRE_ZFINX_OR_F(ctx);
479
480    TCGv dest = dest_gpr(ctx, a->rd);
481    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
482
483    gen_helper_fclass_s(dest, cpu_env, src1);
484    gen_set_gpr(ctx, a->rd, dest);
485    return true;
486}
487
488static bool trans_fcvt_s_w(DisasContext *ctx, arg_fcvt_s_w *a)
489{
490    REQUIRE_FPU;
491    REQUIRE_ZFINX_OR_F(ctx);
492
493    TCGv_i64 dest = dest_fpr(ctx, a->rd);
494    TCGv src = get_gpr(ctx, a->rs1, EXT_SIGN);
495
496    gen_set_rm(ctx, a->rm);
497    gen_helper_fcvt_s_w(dest, cpu_env, src);
498    gen_set_fpr_hs(ctx, a->rd, dest);
499    mark_fs_dirty(ctx);
500    return true;
501}
502
503static bool trans_fcvt_s_wu(DisasContext *ctx, arg_fcvt_s_wu *a)
504{
505    REQUIRE_FPU;
506    REQUIRE_ZFINX_OR_F(ctx);
507
508    TCGv_i64 dest = dest_fpr(ctx, a->rd);
509    TCGv src = get_gpr(ctx, a->rs1, EXT_ZERO);
510
511    gen_set_rm(ctx, a->rm);
512    gen_helper_fcvt_s_wu(dest, cpu_env, src);
513    gen_set_fpr_hs(ctx, a->rd, dest);
514    mark_fs_dirty(ctx);
515    return true;
516}
517
518static bool trans_fmv_w_x(DisasContext *ctx, arg_fmv_w_x *a)
519{
520    /* NOTE: This was FMV.S.X in an earlier version of the ISA spec! */
521    REQUIRE_FPU;
522    REQUIRE_ZFINX_OR_F(ctx);
523
524    TCGv_i64 dest = dest_fpr(ctx, a->rd);
525    TCGv src = get_gpr(ctx, a->rs1, EXT_ZERO);
526
527    tcg_gen_extu_tl_i64(dest, src);
528    gen_nanbox_s(dest, dest);
529    gen_set_fpr_hs(ctx, a->rd, dest);
530    mark_fs_dirty(ctx);
531    return true;
532}
533
534static bool trans_fcvt_l_s(DisasContext *ctx, arg_fcvt_l_s *a)
535{
536    REQUIRE_64BIT(ctx);
537    REQUIRE_FPU;
538    REQUIRE_ZFINX_OR_F(ctx);
539
540    TCGv dest = dest_gpr(ctx, a->rd);
541    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
542
543    gen_set_rm(ctx, a->rm);
544    gen_helper_fcvt_l_s(dest, cpu_env, src1);
545    gen_set_gpr(ctx, a->rd, dest);
546    return true;
547}
548
549static bool trans_fcvt_lu_s(DisasContext *ctx, arg_fcvt_lu_s *a)
550{
551    REQUIRE_64BIT(ctx);
552    REQUIRE_FPU;
553    REQUIRE_ZFINX_OR_F(ctx);
554
555    TCGv dest = dest_gpr(ctx, a->rd);
556    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
557
558    gen_set_rm(ctx, a->rm);
559    gen_helper_fcvt_lu_s(dest, cpu_env, src1);
560    gen_set_gpr(ctx, a->rd, dest);
561    return true;
562}
563
564static bool trans_fcvt_s_l(DisasContext *ctx, arg_fcvt_s_l *a)
565{
566    REQUIRE_64BIT(ctx);
567    REQUIRE_FPU;
568    REQUIRE_ZFINX_OR_F(ctx);
569
570    TCGv_i64 dest = dest_fpr(ctx, a->rd);
571    TCGv src = get_gpr(ctx, a->rs1, EXT_SIGN);
572
573    gen_set_rm(ctx, a->rm);
574    gen_helper_fcvt_s_l(dest, cpu_env, src);
575    gen_set_fpr_hs(ctx, a->rd, dest);
576    mark_fs_dirty(ctx);
577    return true;
578}
579
580static bool trans_fcvt_s_lu(DisasContext *ctx, arg_fcvt_s_lu *a)
581{
582    REQUIRE_64BIT(ctx);
583    REQUIRE_FPU;
584    REQUIRE_ZFINX_OR_F(ctx);
585
586    TCGv_i64 dest = dest_fpr(ctx, a->rd);
587    TCGv src = get_gpr(ctx, a->rs1, EXT_ZERO);
588
589    gen_set_rm(ctx, a->rm);
590    gen_helper_fcvt_s_lu(dest, cpu_env, src);
591    gen_set_fpr_hs(ctx, a->rd, dest);
592    mark_fs_dirty(ctx);
593    return true;
594}
595