1/* $NetBSD: atomic_add_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */ 2 3/*- 4 * Copyright (c) 2013 The NetBSD Foundation, Inc. 5 * All rights reserved. 6 * 7 * This code is derived from software contributed to The NetBSD Foundation 8 * by Matt Thomas <matt@3am-software.com> 9 * 10 * Redistribution and use in source and binary forms, with or without 11 * modification, are permitted provided that the following conditions 12 * are met: 13 * 1. Redistributions of source code must retain the above copyright 14 * notice, this list of conditions and the following disclaimer. 15 * 2. Redistributions in binary form must reproduce the above copyright 16 * notice, this list of conditions and the following disclaimer in the 17 * documentation and/or other materials provided with the distribution. 18 * 19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 29 * POSSIBILITY OF SUCH DAMAGE. 30 */ 31 32#include "atomic_op_asm.h" 33 34#ifdef _ARM_ARCH_6 35 36ENTRY_NP(_atomic_sub_8) 37 negs r1, r1 38 /* FALLTHROUGH */ 39ENTRY_NP(_atomic_add_8) 40 mov ip, r0 411: ldrexb r0, [ip] /* load old value */ 42 adds r3, r0, r1 /* calculate new value */ 43 strexb r2, r3, [ip] /* try to store */ 44 cmp r2, #0 /* succeed? */ 45 bne 1b /* no, try again */ 46#ifdef _ARM_ARCH_7 47 dmb 48#else 49 mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ 50#endif 51 RET /* return old value */ 52END(_atomic_add_8) 53END(_atomic_sub_8) 54 55ATOMIC_OP_ALIAS(atomic_add_8,_atomic_add_8) 56ATOMIC_OP_ALIAS(atomic_add_char,_atomic_add_8) 57ATOMIC_OP_ALIAS(atomic_add_uchar,_atomic_add_8) 58STRONG_ALIAS(__sync_fetch_and_add_1,_atomic_add_8) 59STRONG_ALIAS(_atomic_add_char,_atomic_add_8) 60STRONG_ALIAS(_atomic_add_uchar,_atomic_add_8) 61 62ATOMIC_OP_ALIAS(atomic_sub_8,_atomic_sub_8) 63ATOMIC_OP_ALIAS(atomic_sub_char,_atomic_sub_8) 64ATOMIC_OP_ALIAS(atomic_sub_uchar,_atomic_sub_8) 65STRONG_ALIAS(__sync_fetch_and_sub_1,_atomic_sub_8) 66STRONG_ALIAS(_atomic_sub_char,_atomic_sub_8) 67STRONG_ALIAS(_atomic_sub_uchar,_atomic_sub_8) 68 69ENTRY_NP(_atomic_sub_8_nv) 70 negs r1, r1 71 /* FALLTHROUGH */ 72ENTRY_NP(_atomic_add_8_nv) 73 mov ip, r0 /* need r0 for return value */ 741: ldrexb r0, [ip] /* load old value */ 75 adds r0, r0, r1 /* calculate new value (return value) */ 76 strexb r2, r0, [ip] /* try to store */ 77 cmp r2, #0 /* succeed? */ 78 bne 1b /* no, try again? */ 79#ifdef _ARM_ARCH_7 80 dmb 81#else 82 mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ 83#endif 84 RET /* return new value */ 85END(_atomic_add_8_nv) 86END(_atomic_sub_8_nv) 87ATOMIC_OP_ALIAS(atomic_add_8_nv,_atomic_add_8_nv) 88ATOMIC_OP_ALIAS(atomic_add_char_nv,_atomic_add_8_nv) 89ATOMIC_OP_ALIAS(atomic_add_uchar_nv,_atomic_add_8_nv) 90STRONG_ALIAS(__sync_add_and_fetch_1,_atomic_add_8_nv) 91STRONG_ALIAS(_atomic_add_char_nv,_atomic_add_8_nv) 92STRONG_ALIAS(_atomic_add_uchar_nv,_atomic_add_8_nv) 93 94ATOMIC_OP_ALIAS(atomic_sub_8_nv,_atomic_sub_8_nv) 95ATOMIC_OP_ALIAS(atomic_sub_char_nv,_atomic_sub_8_nv) 96ATOMIC_OP_ALIAS(atomic_sub_uchar_nv,_atomic_sub_8_nv) 97STRONG_ALIAS(__sync_sub_and_fetch_1,_atomic_sub_8_nv) 98STRONG_ALIAS(_atomic_sub_char_nv,_atomic_sub_8_nv) 99STRONG_ALIAS(_atomic_sub_uchar_nv,_atomic_sub_8_nv) 100#endif /* _ARM_ARCH_6 */ 101