1/*
2 * RISC-V translation routines for the Zk[nd,ne,nh,sed,sh] Standard Extension.
3 *
4 * Copyright (c) 2021 Ruibo Lu, luruibo2000@163.com
5 * Copyright (c) 2021 Zewen Ye, lustrew@foxmail.com
6 *
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms and conditions of the GNU General Public License,
9 * version 2 or later, as published by the Free Software Foundation.
10 *
11 * This program is distributed in the hope it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
14 * more details.
15 *
16 * You should have received a copy of the GNU General Public License along with
17 * this program.  If not, see <http://www.gnu.org/licenses/>.
18 */
19
20#define REQUIRE_ZKND(ctx) do {                  \
21    if (!ctx->cfg_ptr->ext_zknd) {              \
22        return false;                           \
23    }                                           \
24} while (0)
25
26#define REQUIRE_ZKNE(ctx) do {                  \
27    if (!ctx->cfg_ptr->ext_zkne) {              \
28        return false;                           \
29    }                                           \
30} while (0)
31
32#define REQUIRE_ZKNH(ctx) do {                  \
33    if (!ctx->cfg_ptr->ext_zknh) {              \
34        return false;                           \
35    }                                           \
36} while (0)
37
38#define REQUIRE_ZKSED(ctx) do {                 \
39    if (!ctx->cfg_ptr->ext_zksed) {             \
40        return false;                           \
41    }                                           \
42} while (0)
43
44#define REQUIRE_ZKSH(ctx) do {                  \
45    if (!ctx->cfg_ptr->ext_zksh) {              \
46        return false;                           \
47    }                                           \
48} while (0)
49
50static bool gen_aes32_sm4(DisasContext *ctx, arg_k_aes *a,
51                          void (*func)(TCGv, TCGv, TCGv, TCGv))
52{
53    TCGv shamt = tcg_constant_tl(a->shamt);
54    TCGv dest = dest_gpr(ctx, a->rd);
55    TCGv src1 = get_gpr(ctx, a->rs1, EXT_NONE);
56    TCGv src2 = get_gpr(ctx, a->rs2, EXT_NONE);
57
58    func(dest, src1, src2, shamt);
59    gen_set_gpr(ctx, a->rd, dest);
60    return true;
61}
62
63static bool trans_aes32esmi(DisasContext *ctx, arg_aes32esmi *a)
64{
65    REQUIRE_32BIT(ctx);
66    REQUIRE_ZKNE(ctx);
67    return gen_aes32_sm4(ctx, a, gen_helper_aes32esmi);
68}
69
70static bool trans_aes32esi(DisasContext *ctx, arg_aes32esi *a)
71{
72    REQUIRE_32BIT(ctx);
73    REQUIRE_ZKNE(ctx);
74    return gen_aes32_sm4(ctx, a, gen_helper_aes32esi);
75}
76
77static bool trans_aes32dsmi(DisasContext *ctx, arg_aes32dsmi *a)
78{
79    REQUIRE_32BIT(ctx);
80    REQUIRE_ZKND(ctx);
81    return gen_aes32_sm4(ctx, a, gen_helper_aes32dsmi);
82}
83
84static bool trans_aes32dsi(DisasContext *ctx, arg_aes32dsi *a)
85{
86    REQUIRE_32BIT(ctx);
87    REQUIRE_ZKND(ctx);
88    return gen_aes32_sm4(ctx, a, gen_helper_aes32dsi);
89}
90
91static bool trans_aes64es(DisasContext *ctx, arg_aes64es *a)
92{
93    REQUIRE_64BIT(ctx);
94    REQUIRE_ZKNE(ctx);
95    return gen_arith(ctx, a, EXT_NONE, gen_helper_aes64es, NULL);
96}
97
98static bool trans_aes64esm(DisasContext *ctx, arg_aes64esm *a)
99{
100    REQUIRE_64BIT(ctx);
101    REQUIRE_ZKNE(ctx);
102    return gen_arith(ctx, a, EXT_NONE, gen_helper_aes64esm, NULL);
103}
104
105static bool trans_aes64ds(DisasContext *ctx, arg_aes64ds *a)
106{
107    REQUIRE_64BIT(ctx);
108    REQUIRE_ZKND(ctx);
109    return gen_arith(ctx, a, EXT_NONE, gen_helper_aes64ds, NULL);
110}
111
112static bool trans_aes64dsm(DisasContext *ctx, arg_aes64dsm *a)
113{
114    REQUIRE_64BIT(ctx);
115    REQUIRE_ZKND(ctx);
116    return gen_arith(ctx, a, EXT_NONE, gen_helper_aes64dsm, NULL);
117}
118
119static bool trans_aes64ks2(DisasContext *ctx, arg_aes64ks2 *a)
120{
121    REQUIRE_64BIT(ctx);
122    REQUIRE_EITHER_EXT(ctx, zknd, zkne);
123    return gen_arith(ctx, a, EXT_NONE, gen_helper_aes64ks2, NULL);
124}
125
126static bool trans_aes64ks1i(DisasContext *ctx, arg_aes64ks1i *a)
127{
128    REQUIRE_64BIT(ctx);
129    REQUIRE_EITHER_EXT(ctx, zknd, zkne);
130
131    if (a->imm > 0xA) {
132        return false;
133    }
134
135    return gen_arith_imm_tl(ctx, a, EXT_NONE, gen_helper_aes64ks1i, NULL);
136}
137
138static bool trans_aes64im(DisasContext *ctx, arg_aes64im *a)
139{
140    REQUIRE_64BIT(ctx);
141    REQUIRE_ZKND(ctx);
142    return gen_unary(ctx, a, EXT_NONE, gen_helper_aes64im);
143}
144
145static bool gen_sha256(DisasContext *ctx, arg_r2 *a, DisasExtend ext,
146                       void (*func)(TCGv_i32, TCGv_i32, int32_t),
147                       int32_t num1, int32_t num2, int32_t num3)
148{
149    TCGv dest = dest_gpr(ctx, a->rd);
150    TCGv src1 = get_gpr(ctx, a->rs1, ext);
151    TCGv_i32 t0 = tcg_temp_new_i32();
152    TCGv_i32 t1 = tcg_temp_new_i32();
153    TCGv_i32 t2 = tcg_temp_new_i32();
154
155    tcg_gen_trunc_tl_i32(t0, src1);
156    tcg_gen_rotri_i32(t1, t0, num1);
157    tcg_gen_rotri_i32(t2, t0, num2);
158    tcg_gen_xor_i32(t1, t1, t2);
159    func(t2, t0, num3);
160    tcg_gen_xor_i32(t1, t1, t2);
161    tcg_gen_ext_i32_tl(dest, t1);
162
163    gen_set_gpr(ctx, a->rd, dest);
164    return true;
165}
166
167static bool trans_sha256sig0(DisasContext *ctx, arg_sha256sig0 *a)
168{
169    REQUIRE_ZKNH(ctx);
170    return gen_sha256(ctx, a, EXT_NONE, tcg_gen_shri_i32, 7, 18, 3);
171}
172
173static bool trans_sha256sig1(DisasContext *ctx, arg_sha256sig1 *a)
174{
175    REQUIRE_ZKNH(ctx);
176    return gen_sha256(ctx, a, EXT_NONE, tcg_gen_shri_i32, 17, 19, 10);
177}
178
179static bool trans_sha256sum0(DisasContext *ctx, arg_sha256sum0 *a)
180{
181    REQUIRE_ZKNH(ctx);
182    return gen_sha256(ctx, a, EXT_NONE, tcg_gen_rotri_i32, 2, 13, 22);
183}
184
185static bool trans_sha256sum1(DisasContext *ctx, arg_sha256sum1 *a)
186{
187    REQUIRE_ZKNH(ctx);
188    return gen_sha256(ctx, a, EXT_NONE, tcg_gen_rotri_i32, 6, 11, 25);
189}
190
191static bool gen_sha512_rv32(DisasContext *ctx, arg_r *a, DisasExtend ext,
192                            void (*func1)(TCGv_i64, TCGv_i64, int64_t),
193                            void (*func2)(TCGv_i64, TCGv_i64, int64_t),
194                            int64_t num1, int64_t num2, int64_t num3)
195{
196    TCGv dest = dest_gpr(ctx, a->rd);
197    TCGv src1 = get_gpr(ctx, a->rs1, ext);
198    TCGv src2 = get_gpr(ctx, a->rs2, ext);
199    TCGv_i64 t0 = tcg_temp_new_i64();
200    TCGv_i64 t1 = tcg_temp_new_i64();
201    TCGv_i64 t2 = tcg_temp_new_i64();
202
203    tcg_gen_concat_tl_i64(t0, src1, src2);
204    func1(t1, t0, num1);
205    func2(t2, t0, num2);
206    tcg_gen_xor_i64(t1, t1, t2);
207    tcg_gen_rotri_i64(t2, t0, num3);
208    tcg_gen_xor_i64(t1, t1, t2);
209    tcg_gen_trunc_i64_tl(dest, t1);
210
211    gen_set_gpr(ctx, a->rd, dest);
212    return true;
213}
214
215static bool trans_sha512sum0r(DisasContext *ctx, arg_sha512sum0r *a)
216{
217    REQUIRE_32BIT(ctx);
218    REQUIRE_ZKNH(ctx);
219    return gen_sha512_rv32(ctx, a, EXT_NONE, tcg_gen_rotli_i64,
220                           tcg_gen_rotli_i64, 25, 30, 28);
221}
222
223static bool trans_sha512sum1r(DisasContext *ctx, arg_sha512sum1r *a)
224{
225    REQUIRE_32BIT(ctx);
226    REQUIRE_ZKNH(ctx);
227    return gen_sha512_rv32(ctx, a, EXT_NONE, tcg_gen_rotli_i64,
228                           tcg_gen_rotri_i64, 23, 14, 18);
229}
230
231static bool trans_sha512sig0l(DisasContext *ctx, arg_sha512sig0l *a)
232{
233    REQUIRE_32BIT(ctx);
234    REQUIRE_ZKNH(ctx);
235    return gen_sha512_rv32(ctx, a, EXT_NONE, tcg_gen_rotri_i64,
236                           tcg_gen_rotri_i64, 1, 7, 8);
237}
238
239static bool trans_sha512sig1l(DisasContext *ctx, arg_sha512sig1l *a)
240{
241    REQUIRE_32BIT(ctx);
242    REQUIRE_ZKNH(ctx);
243    return gen_sha512_rv32(ctx, a, EXT_NONE, tcg_gen_rotli_i64,
244                           tcg_gen_rotri_i64, 3, 6, 19);
245}
246
247static bool gen_sha512h_rv32(DisasContext *ctx, arg_r *a, DisasExtend ext,
248                             void (*func)(TCGv_i64, TCGv_i64, int64_t),
249                             int64_t num1, int64_t num2, int64_t num3)
250{
251    TCGv dest = dest_gpr(ctx, a->rd);
252    TCGv src1 = get_gpr(ctx, a->rs1, ext);
253    TCGv src2 = get_gpr(ctx, a->rs2, ext);
254    TCGv_i64 t0 = tcg_temp_new_i64();
255    TCGv_i64 t1 = tcg_temp_new_i64();
256    TCGv_i64 t2 = tcg_temp_new_i64();
257
258    tcg_gen_concat_tl_i64(t0, src1, src2);
259    func(t1, t0, num1);
260    tcg_gen_ext32u_i64(t2, t0);
261    tcg_gen_shri_i64(t2, t2, num2);
262    tcg_gen_xor_i64(t1, t1, t2);
263    tcg_gen_rotri_i64(t2, t0, num3);
264    tcg_gen_xor_i64(t1, t1, t2);
265    tcg_gen_trunc_i64_tl(dest, t1);
266
267    gen_set_gpr(ctx, a->rd, dest);
268    return true;
269}
270
271static bool trans_sha512sig0h(DisasContext *ctx, arg_sha512sig0h *a)
272{
273    REQUIRE_32BIT(ctx);
274    REQUIRE_ZKNH(ctx);
275    return gen_sha512h_rv32(ctx, a, EXT_NONE, tcg_gen_rotri_i64, 1, 7, 8);
276}
277
278static bool trans_sha512sig1h(DisasContext *ctx, arg_sha512sig1h *a)
279{
280    REQUIRE_32BIT(ctx);
281    REQUIRE_ZKNH(ctx);
282    return gen_sha512h_rv32(ctx, a, EXT_NONE, tcg_gen_rotli_i64, 3, 6, 19);
283}
284
285static bool gen_sha512_rv64(DisasContext *ctx, arg_r2 *a, DisasExtend ext,
286                            void (*func)(TCGv_i64, TCGv_i64, int64_t),
287                            int64_t num1, int64_t num2, int64_t num3)
288{
289    TCGv dest = dest_gpr(ctx, a->rd);
290    TCGv src1 = get_gpr(ctx, a->rs1, ext);
291    TCGv_i64 t0 = tcg_temp_new_i64();
292    TCGv_i64 t1 = tcg_temp_new_i64();
293    TCGv_i64 t2 = tcg_temp_new_i64();
294
295    tcg_gen_extu_tl_i64(t0, src1);
296    tcg_gen_rotri_i64(t1, t0, num1);
297    tcg_gen_rotri_i64(t2, t0, num2);
298    tcg_gen_xor_i64(t1, t1, t2);
299    func(t2, t0, num3);
300    tcg_gen_xor_i64(t1, t1, t2);
301    tcg_gen_trunc_i64_tl(dest, t1);
302
303    gen_set_gpr(ctx, a->rd, dest);
304    return true;
305}
306
307static bool trans_sha512sig0(DisasContext *ctx, arg_sha512sig0 *a)
308{
309    REQUIRE_64BIT(ctx);
310    REQUIRE_ZKNH(ctx);
311    return gen_sha512_rv64(ctx, a, EXT_NONE, tcg_gen_shri_i64, 1, 8, 7);
312}
313
314static bool trans_sha512sig1(DisasContext *ctx, arg_sha512sig1 *a)
315{
316    REQUIRE_64BIT(ctx);
317    REQUIRE_ZKNH(ctx);
318    return gen_sha512_rv64(ctx, a, EXT_NONE, tcg_gen_shri_i64, 19, 61, 6);
319}
320
321static bool trans_sha512sum0(DisasContext *ctx, arg_sha512sum0 *a)
322{
323    REQUIRE_64BIT(ctx);
324    REQUIRE_ZKNH(ctx);
325    return gen_sha512_rv64(ctx, a, EXT_NONE, tcg_gen_rotri_i64, 28, 34, 39);
326}
327
328static bool trans_sha512sum1(DisasContext *ctx, arg_sha512sum1 *a)
329{
330    REQUIRE_64BIT(ctx);
331    REQUIRE_ZKNH(ctx);
332    return gen_sha512_rv64(ctx, a, EXT_NONE, tcg_gen_rotri_i64, 14, 18, 41);
333}
334
335/* SM3 */
336static bool gen_sm3(DisasContext *ctx, arg_r2 *a, int32_t b, int32_t c)
337{
338    TCGv dest = dest_gpr(ctx, a->rd);
339    TCGv src1 = get_gpr(ctx, a->rs1, EXT_NONE);
340    TCGv_i32 t0 = tcg_temp_new_i32();
341    TCGv_i32 t1 = tcg_temp_new_i32();
342
343    tcg_gen_trunc_tl_i32(t0, src1);
344    tcg_gen_rotli_i32(t1, t0, b);
345    tcg_gen_xor_i32(t1, t0, t1);
346    tcg_gen_rotli_i32(t0, t0, c);
347    tcg_gen_xor_i32(t1, t1, t0);
348    tcg_gen_ext_i32_tl(dest, t1);
349    gen_set_gpr(ctx, a->rd, dest);
350    return true;
351}
352
353static bool trans_sm3p0(DisasContext *ctx, arg_sm3p0 *a)
354{
355    REQUIRE_ZKSH(ctx);
356    return gen_sm3(ctx, a, 9, 17);
357}
358
359static bool trans_sm3p1(DisasContext *ctx, arg_sm3p1 *a)
360{
361    REQUIRE_ZKSH(ctx);
362    return gen_sm3(ctx, a, 15, 23);
363}
364
365/* SM4 */
366static bool trans_sm4ed(DisasContext *ctx, arg_sm4ed *a)
367{
368    REQUIRE_ZKSED(ctx);
369    return gen_aes32_sm4(ctx, a, gen_helper_sm4ed);
370}
371
372static bool trans_sm4ks(DisasContext *ctx, arg_sm4ks *a)
373{
374    REQUIRE_ZKSED(ctx);
375    return gen_aes32_sm4(ctx, a, gen_helper_sm4ks);
376}
377