xref: /openbsd/sys/lib/libkern/arch/sh/movstr_i4.S (revision cecf84d4)
1/*	$NetBSD: movstr_i4.S,v 1.5 2006/05/22 21:34:08 uwe Exp $	*/
2
3/*-
4 * Copyright (C) 2002 SHIMIZU Ryo.  All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 *    notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 *    notice, this list of conditions and the following disclaimer in the
13 *    documentation and/or other materials provided with the distribution.
14 * 3. The name of the author may not be used to endorse or promote products
15 *    derived from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29#include <machine/asm.h>
30
31
32NENTRY(__movstr_i4_odd)
33	add	#-8,r4
34	nop
35odd_loop:
36	mov.l	@r5+,r0
37	add	#8,r4
38	mov.l	@r5+,r1
39	dt	r6
40	mov.l	r0,@(0,r4)
41	bf/s	odd_loop
42	 mov.l	r1,@(4,r4)
43
44	mov.l	@r5+,r0
45	mov.l	@r5+,r1
46	mov.l	@r5+,r2
47	mov.l	r0,@(8,r4)
48	mov.l	r1,@(12,r4)
49	rts
50	 mov.l	r2,@(16,r4)
51
52
53NENTRY(__movstr_i4_even)
54	add	#-8,r4
55	nop
56even_loop:
57	mov.l	@r5+,r0
58	add	#8,r4
59	mov.l	@r5+,r1
60	dt	r6
61	mov.l	r0,@(0,r4)
62	bf/s	even_loop
63	 mov.l	r1,@(4,r4)
64
65	mov.l	@r5+,r0
66	mov.l	@r5+,r1
67	mov.l	r0,@(8,r4)
68	rts
69	 mov.l	r1,@(12,r4)
70
71
72/* gcc4 uses movmem, older versions use movstr */
73STRONG_ALIAS(__movmem_i4_odd, __movstr_i4_odd)
74STRONG_ALIAS(__movmem_i4_even, __movstr_i4_even)
75