xref: /netbsd/sys/arch/sparc/sparc/asm.h (revision 5f1c88d7)
1 /*	$NetBSD: asm.h,v 1.11 2005/12/24 20:07:37 perry Exp $ */
2 
3 /*
4  * Copyright (c) 1992, 1993
5  *	The Regents of the University of California.  All rights reserved.
6  *
7  * This software was developed by the Computer Systems Engineering group
8  * at Lawrence Berkeley Laboratory under DARPA contract BG 91-66 and
9  * contributed to Berkeley.
10  *
11  * All advertising materials mentioning features or use of this software
12  * must display the following acknowledgement:
13  *	This product includes software developed by the University of
14  *	California, Lawrence Berkeley Laboratory.
15  *
16  * Redistribution and use in source and binary forms, with or without
17  * modification, are permitted provided that the following conditions
18  * are met:
19  * 1. Redistributions of source code must retain the above copyright
20  *    notice, this list of conditions and the following disclaimer.
21  * 2. Redistributions in binary form must reproduce the above copyright
22  *    notice, this list of conditions and the following disclaimer in the
23  *    documentation and/or other materials provided with the distribution.
24  * 3. Neither the name of the University nor the names of its contributors
25  *    may be used to endorse or promote products derived from this software
26  *    without specific prior written permission.
27  *
28  * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
29  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
30  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
31  * ARE DISCLAIMED.  IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
32  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
33  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
34  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
35  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
36  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
37  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
38  * SUCH DAMAGE.
39  *
40  *	@(#)asm.h	8.1 (Berkeley) 6/11/93
41  */
42 
43 /*
44  * GCC __asm constructs for doing assembly stuff.
45  */
46 
47 /*
48  * ``Routines'' to load and store from/to alternate address space.
49  * The location can be a variable, the asi value (address space indicator)
50  * must be a constant.
51  *
52  * N.B.: You can put as many special functions here as you like, since
53  * they cost no kernel space or time if they are not used.
54  *
55  * These were static inline functions, but gcc screws up the constraints
56  * on the address space identifiers (the "n"umeric value part) because
57  * it inlines too late, so we have to use the funny valued-macro syntax.
58  */
59 
60 /* load byte from alternate address space */
61 #define	lduba(loc, asi) ({ \
62 	register int _lduba_v; \
63 	__asm volatile("lduba [%1]%2,%0" : "=r" (_lduba_v) : \
64 	    "r" ((int)(loc)), "n" (asi)); \
65 	_lduba_v; \
66 })
67 
68 /* load half-word from alternate address space */
69 #define	lduha(loc, asi) ({ \
70 	register int _lduha_v; \
71 	__asm volatile("lduha [%1]%2,%0" : "=r" (_lduha_v) : \
72 	    "r" ((int)(loc)), "n" (asi)); \
73 	_lduha_v; \
74 })
75 
76 /* load int from alternate address space */
77 #define	lda(loc, asi) ({ \
78 	register int _lda_v; \
79 	__asm volatile("lda [%1]%2,%0" : "=r" (_lda_v) : \
80 	    "r" ((int)(loc)), "n" (asi)); \
81 	_lda_v; \
82 })
83 
84 /* store byte to alternate address space */
85 #define	stba(loc, asi, value) ({ \
86 	__asm volatile("stba %0,[%1]%2" : : \
87 	    "r" ((int)(value)), "r" ((int)(loc)), "n" (asi)); \
88 })
89 
90 /* store half-word to alternate address space */
91 #define	stha(loc, asi, value) ({ \
92 	__asm volatile("stha %0,[%1]%2" : : \
93 	    "r" ((int)(value)), "r" ((int)(loc)), "n" (asi)); \
94 })
95 
96 /* store int to alternate address space */
97 #define	sta(loc, asi, value) ({ \
98 	__asm volatile("sta %0,[%1]%2" : : \
99 	    "r" ((int)(value)), "r" ((int)(loc)), "n" (asi)); \
100 })
101 
102 /* load 64-bit int from alternate address space */
103 #define	ldda(loc, asi) ({ \
104 	register long long _lda_v; \
105 	__asm volatile("ldda [%1]%2,%0" : "=r" (_lda_v) : \
106 	    "r" ((int)(loc)), "n" (asi)); \
107 	_lda_v; \
108 })
109 
110 /* store 64-bit int to alternate address space */
111 #define	stda(loc, asi, value) ({ \
112 	__asm volatile("stda %0,[%1]%2" : : \
113 	    "r" ((long long)(value)), "r" ((int)(loc)), "n" (asi)); \
114 })
115 
116 /* atomic swap of a word between a register and memory */
117 #define	swap(loc, val) ({ \
118 	__asm volatile("swap [%2],%0" : "=&r" (val) : "0" (val), "r" (loc)); \
119 })
120 
121 /* atomic load/store of a byte in memory */
122 #define	ldstub(loc) ({ \
123 	int _v; \
124 	__asm volatile("ldstub [%1],%0" : "=r" (_v) : "r" (loc) : "memory"); \
125 	_v; \
126 })
127 
128 /* read ancillary state register */
129 #define	rdasr(asr) _rdasr(asr)
130 #define	_rdasr(asr) ({ \
131 	register int _rdasr_v; \
132 	__asm volatile("rd %%asr" #asr ",%0" : "=r" (_rdasr_v)); \
133 	_rdasr_v; \
134 })
135 
136 /* write ancillary state register */
137 #define	wrasr(value, asr) _wrasr(value, asr)
138 #define	_wrasr(value, asr) ({ \
139 	__asm volatile("wr %0,%%asr" #asr : : "r" ((int)(value))); \
140 })
141