xref: /linux/arch/arm64/crypto/aes-ce-core.S (revision 0e89640b)
1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
4 */
5
6#include <linux/linkage.h>
7#include <asm/assembler.h>
8
9	.arch		armv8-a+crypto
10
11SYM_FUNC_START(__aes_ce_encrypt)
12	sub		w3, w3, #2
13	ld1		{v0.16b}, [x2]
14	ld1		{v1.4s}, [x0], #16
15	cmp		w3, #10
16	bmi		0f
17	bne		3f
18	mov		v3.16b, v1.16b
19	b		2f
200:	mov		v2.16b, v1.16b
21	ld1		{v3.4s}, [x0], #16
221:	aese		v0.16b, v2.16b
23	aesmc		v0.16b, v0.16b
242:	ld1		{v1.4s}, [x0], #16
25	aese		v0.16b, v3.16b
26	aesmc		v0.16b, v0.16b
273:	ld1		{v2.4s}, [x0], #16
28	subs		w3, w3, #3
29	aese		v0.16b, v1.16b
30	aesmc		v0.16b, v0.16b
31	ld1		{v3.4s}, [x0], #16
32	bpl		1b
33	aese		v0.16b, v2.16b
34	eor		v0.16b, v0.16b, v3.16b
35	st1		{v0.16b}, [x1]
36	ret
37SYM_FUNC_END(__aes_ce_encrypt)
38
39SYM_FUNC_START(__aes_ce_decrypt)
40	sub		w3, w3, #2
41	ld1		{v0.16b}, [x2]
42	ld1		{v1.4s}, [x0], #16
43	cmp		w3, #10
44	bmi		0f
45	bne		3f
46	mov		v3.16b, v1.16b
47	b		2f
480:	mov		v2.16b, v1.16b
49	ld1		{v3.4s}, [x0], #16
501:	aesd		v0.16b, v2.16b
51	aesimc		v0.16b, v0.16b
522:	ld1		{v1.4s}, [x0], #16
53	aesd		v0.16b, v3.16b
54	aesimc		v0.16b, v0.16b
553:	ld1		{v2.4s}, [x0], #16
56	subs		w3, w3, #3
57	aesd		v0.16b, v1.16b
58	aesimc		v0.16b, v0.16b
59	ld1		{v3.4s}, [x0], #16
60	bpl		1b
61	aesd		v0.16b, v2.16b
62	eor		v0.16b, v0.16b, v3.16b
63	st1		{v0.16b}, [x1]
64	ret
65SYM_FUNC_END(__aes_ce_decrypt)
66
67/*
68 * __aes_ce_sub() - use the aese instruction to perform the AES sbox
69 *                  substitution on each byte in 'input'
70 */
71SYM_FUNC_START(__aes_ce_sub)
72	dup		v1.4s, w0
73	movi		v0.16b, #0
74	aese		v0.16b, v1.16b
75	umov		w0, v0.s[0]
76	ret
77SYM_FUNC_END(__aes_ce_sub)
78
79SYM_FUNC_START(__aes_ce_invert)
80	ld1		{v0.4s}, [x1]
81	aesimc		v1.16b, v0.16b
82	st1		{v1.4s}, [x0]
83	ret
84SYM_FUNC_END(__aes_ce_invert)
85