xref: /linux/arch/riscv/kernel/copy-unaligned.S (revision d642ef71)
1/* SPDX-License-Identifier: GPL-2.0 */
2/* Copyright (C) 2023 Rivos Inc. */
3
4#include <linux/linkage.h>
5#include <asm/asm.h>
6
7	.text
8
9/* void __riscv_copy_words_unaligned(void *, const void *, size_t) */
10/* Performs a memcpy without aligning buffers, using word loads and stores. */
11/* Note: The size is truncated to a multiple of 8 * SZREG */
12SYM_FUNC_START(__riscv_copy_words_unaligned)
13	andi  a4, a2, ~((8*SZREG)-1)
14	beqz  a4, 2f
15	add   a3, a1, a4
161:
17	REG_L a4,       0(a1)
18	REG_L a5,   SZREG(a1)
19	REG_L a6, 2*SZREG(a1)
20	REG_L a7, 3*SZREG(a1)
21	REG_L t0, 4*SZREG(a1)
22	REG_L t1, 5*SZREG(a1)
23	REG_L t2, 6*SZREG(a1)
24	REG_L t3, 7*SZREG(a1)
25	REG_S a4,       0(a0)
26	REG_S a5,   SZREG(a0)
27	REG_S a6, 2*SZREG(a0)
28	REG_S a7, 3*SZREG(a0)
29	REG_S t0, 4*SZREG(a0)
30	REG_S t1, 5*SZREG(a0)
31	REG_S t2, 6*SZREG(a0)
32	REG_S t3, 7*SZREG(a0)
33	addi  a0, a0, 8*SZREG
34	addi  a1, a1, 8*SZREG
35	bltu  a1, a3, 1b
36
372:
38	ret
39SYM_FUNC_END(__riscv_copy_words_unaligned)
40
41/* void __riscv_copy_bytes_unaligned(void *, const void *, size_t) */
42/* Performs a memcpy without aligning buffers, using only byte accesses. */
43/* Note: The size is truncated to a multiple of 8 */
44SYM_FUNC_START(__riscv_copy_bytes_unaligned)
45	andi a4, a2, ~(8-1)
46	beqz a4, 2f
47	add  a3, a1, a4
481:
49	lb   a4, 0(a1)
50	lb   a5, 1(a1)
51	lb   a6, 2(a1)
52	lb   a7, 3(a1)
53	lb   t0, 4(a1)
54	lb   t1, 5(a1)
55	lb   t2, 6(a1)
56	lb   t3, 7(a1)
57	sb   a4, 0(a0)
58	sb   a5, 1(a0)
59	sb   a6, 2(a0)
60	sb   a7, 3(a0)
61	sb   t0, 4(a0)
62	sb   t1, 5(a0)
63	sb   t2, 6(a0)
64	sb   t3, 7(a0)
65	addi a0, a0, 8
66	addi a1, a1, 8
67	bltu a1, a3, 1b
68
692:
70	ret
71SYM_FUNC_END(__riscv_copy_bytes_unaligned)
72