1/* $NetBSD: movstr_i4.S,v 1.1 2002/09/05 08:35:16 msaitoh Exp $ */ 2 3/*- 4 * Copyright (C) 2002 SHIMIZU Ryo. All rights reserved. 5 * 6 * Redistribution and use in source and binary forms, with or without 7 * modification, are permitted provided that the following conditions 8 * are met: 9 * 1. Redistributions of source code must retain the above copyright 10 * notice, this list of conditions and the following disclaimer. 11 * 2. Redistributions in binary form must reproduce the above copyright 12 * notice, this list of conditions and the following disclaimer in the 13 * documentation and/or other materials provided with the distribution. 14 * 3. The name of the author may not be used to endorse or promote products 15 * derived from this software without specific prior written permission. 16 * 17 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 18 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 19 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 20 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 21 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 22 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 */ 28 29#include <machine/asm.h> 30 31ENTRY(__movstr_i4_odd) 32 add #-8,r4 33 nop 34odd_loop: 35 mov.l @r5+,r0 36 add #8,r4 37 mov.l @r5+,r1 38 dt r6 39 mov.l r0,@(0,r4) 40 bf/s odd_loop 41 mov.l r1,@(4,r4) 42 43 mov.l @r5+,r0 44 mov.l @r5+,r1 45 mov.l @r5+,r2 46 mov.l r0,@(8,r4) 47 mov.l r1,@(12,r4) 48 rts 49 mov.l r2,@(16,r4) 50 51 52 53ENTRY(__movstr_i4_even) 54 add #-8,r4 55 nop 56even_loop: 57 mov.l @r5+,r0 58 add #8,r4 59 mov.l @r5+,r1 60 dt r6 61 mov.l r0,@(0,r4) 62 bf/s even_loop 63 mov.l r1,@(4,r4) 64 65 mov.l @r5+,r0 66 mov.l @r5+,r1 67 mov.l r0,@(8,r4) 68 rts 69 mov.l r1,@(12,r4) 70 71 72 73