xref: /linux/arch/loongarch/lib/memset.S (revision dd093fb0)
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4 */
5
6#include <asm/alternative-asm.h>
7#include <asm/asm.h>
8#include <asm/asmmacro.h>
9#include <asm/cpu.h>
10#include <asm/export.h>
11#include <asm/regdef.h>
12
13.macro fill_to_64 r0
14	bstrins.d \r0, \r0, 15, 8
15	bstrins.d \r0, \r0, 31, 16
16	bstrins.d \r0, \r0, 63, 32
17.endm
18
19SYM_FUNC_START(memset)
20	/*
21	 * Some CPUs support hardware unaligned access
22	 */
23	ALTERNATIVE	"b __memset_generic", \
24			"b __memset_fast", CPU_FEATURE_UAL
25SYM_FUNC_END(memset)
26
27EXPORT_SYMBOL(memset)
28
29/*
30 * void *__memset_generic(void *s, int c, size_t n)
31 *
32 * a0: s
33 * a1: c
34 * a2: n
35 */
36SYM_FUNC_START(__memset_generic)
37	move	a3, a0
38	beqz	a2, 2f
39
401:	st.b	a1, a0, 0
41	addi.d	a0, a0, 1
42	addi.d	a2, a2, -1
43	bgt	a2, zero, 1b
44
452:	move	a0, a3
46	jr	ra
47SYM_FUNC_END(__memset_generic)
48
49/*
50 * void *__memset_fast(void *s, int c, size_t n)
51 *
52 * a0: s
53 * a1: c
54 * a2: n
55 */
56SYM_FUNC_START(__memset_fast)
57	move	a3, a0
58	beqz	a2, 3f
59
60	ori	a4, zero, 64
61	blt	a2, a4, 2f
62
63	/* fill a1 to 64 bits */
64	fill_to_64 a1
65
66	/* set 64 bytes at a time */
671:	st.d	a1, a0, 0
68	st.d	a1, a0, 8
69	st.d	a1, a0, 16
70	st.d	a1, a0, 24
71	st.d	a1, a0, 32
72	st.d	a1, a0, 40
73	st.d	a1, a0, 48
74	st.d	a1, a0, 56
75
76	addi.d	a0, a0, 64
77	addi.d	a2, a2, -64
78	bge	a2, a4, 1b
79
80	beqz	a2, 3f
81
82	/* set the remaining bytes */
832:	st.b	a1, a0, 0
84	addi.d	a0, a0, 1
85	addi.d	a2, a2, -1
86	bgt	a2, zero, 2b
87
88	/* return */
893:	move	a0, a3
90	jr	ra
91SYM_FUNC_END(__memset_fast)
92