xref: /netbsd/sys/lib/libkern/arch/sh3/movstr_i4.S (revision 6550d01e)
1/*	$NetBSD: movstr_i4.S,v 1.6 2009/01/07 22:15:18 uwe Exp $	*/
2
3/*-
4 * Copyright (C) 2002 SHIMIZU Ryo.  All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 *    notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 *    notice, this list of conditions and the following disclaimer in the
13 *    documentation and/or other materials provided with the distribution.
14 * 3. The name of the author may not be used to endorse or promote products
15 *    derived from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29#include <machine/asm.h>
30
31
32#ifdef __ELF__
33	.hidden __movstr_i4_odd, __movmem_i4_odd
34	.hidden __movstr_i4_even, __movmem_i4_even
35#endif
36
37
38NENTRY(__movstr_i4_odd)
39	add	#-8,r4
40	nop
41odd_loop:
42	mov.l	@r5+,r0
43	add	#8,r4
44	mov.l	@r5+,r1
45	dt	r6
46	mov.l	r0,@(0,r4)
47	bf/s	odd_loop
48	 mov.l	r1,@(4,r4)
49
50	mov.l	@r5+,r0
51	mov.l	@r5+,r1
52	mov.l	@r5+,r2
53	mov.l	r0,@(8,r4)
54	mov.l	r1,@(12,r4)
55	rts
56	 mov.l	r2,@(16,r4)
57
58
59NENTRY(__movstr_i4_even)
60	add	#-8,r4
61	nop
62even_loop:
63	mov.l	@r5+,r0
64	add	#8,r4
65	mov.l	@r5+,r1
66	dt	r6
67	mov.l	r0,@(0,r4)
68	bf/s	even_loop
69	 mov.l	r1,@(4,r4)
70
71	mov.l	@r5+,r0
72	mov.l	@r5+,r1
73	mov.l	r0,@(8,r4)
74	rts
75	 mov.l	r1,@(12,r4)
76
77
78/* gcc4 uses movmem, older versions use movstr */
79STRONG_ALIAS(__movmem_i4_odd, __movstr_i4_odd)
80STRONG_ALIAS(__movmem_i4_even, __movstr_i4_even)
81