1 /* asm.h -- CR16 architecture intrinsic functions
2  *
3  * Copyright (c) 2012 National Semiconductor Corporation
4  *
5  * The authors hereby grant permission to use, copy, modify, distribute,
6  * and license this software and its documentation for any purpose, provided
7  * that existing copyright notices are retained in all copies and that this
8  * notice is included verbatim in any distributions. No written agreement,
9  * license, or royalty fee is required for any of the authorized uses.
10  * Modifications to this software may be copyrighted by their authors
11  * and need not follow the licensing terms described here, provided that
12  * the new terms are clearly indicated on the first page of each file where
13  * they apply.
14  */
15 
16 #ifndef	_ASM
17 #define _ASM
18 
19 /* Note that immediate input values are not checked for validity. It is
20    the user's responsibility to use the intrinsic functions with appropriate
21    immediate values. */
22 
23 /* Addition Instructions */
24 #define _addb_(src, dest)	__asm__("addb %1, %0" : "=r" (dest) : \
25 					"ri" ((unsigned char)src), "0" (dest) : "cc")
26 #define _addub_(src, dest)	__asm__("addub	%1, %0" : "=r" (dest) : \
27 					"ri" ((unsigned char)src), "0" (dest) : "cc")
28 #define _addw_(src, dest)	__asm__("addw %1, %0" : "=r" (dest) : \
29 					"ri" ((unsigned short)src), "0" (dest) : "cc")
30 #define _adduw_(src, dest)	__asm__("adduw	%1, %0" : "=r" (dest) : \
31 					"ri" ((unsigned short)src), "0" (dest) : "cc")
32 #define _addd_(src, dest)	__asm__("addd %1, %0" : "=r" (dest) : \
33 					"ri" ((unsigned long)src), "0" (dest) : "cc")
34 
35 /* Add with Carry */
36 #define _addcb_(src, dest)	__asm__("addcb	%1, %0" : "=r" (dest) : \
37 					"ri" ((unsigned char)src), "0" (dest) : "cc")
38 #define _addcw_(src, dest)	__asm__("addcw	%1, %0" : "=r" (dest) : \
39 					"ri" ((unsigned short)src), "0" (dest) : "cc")
40 
41 /* Bitwise Logical AND */
42 #define _andb_(src, dest)	__asm__("andb %1,%0" : "=r" (dest) : \
43 					"ri" ((unsigned char)src) , "0" (dest))
44 #define _andw_(src, dest)	__asm__("andw %1,%0" : "=r" (dest) : \
45 					"ri" ((unsigned short)src) , "0" (dest))
46 #define _andd_(src, dest)	__asm__("andd %1,%0" : "=r" (dest) : \
47 		 			"ri" ((unsigned long)src) , "0" (dest))
48 
49 /* Arithmetic shift Instructions */
50 #define _ashub_(count, dest)	__asm__("ashub %1,%0" : "=r" (dest) : \
51 					"ri" ((char)count) , "0" (dest) )
52 #define _ashuw_(count, dest)	__asm__("ashuw %1,%0" : "=r" (dest) : \
53 					"ri" ((char)count) , "0" (dest) )
54 #define _ashud_(count, dest)	__asm__("ashud %1,%0" : "=r" (dest) : \
55 					"ri" ((char)count) , "0" (dest) )
56 
57 /* cbit (clear bit) Instructions */
58 #define _cbitb_(pos, dest) 	__asm__("cbitb %1,%0" : "=mr" (dest) : \
59 					"i" ((unsigned char)pos) , "0" (dest) : "cc")
60 #define _cbitw_(pos, dest) 	__asm__("cbitw %1,%0" : "=mr" (dest) : \
61 					"i" ((unsigned char)pos) , "0" (dest) : "cc")
62 
63 /* Compare Instructions */
64 #define _cmpb_(src1, src2)	__asm__("cmpb %0,%1" : /* no output */ : \
65 					"ri" ((unsigned char)src1) , "r" (src2) : "cc")
66 #define _cmpw_(src1, src2)	__asm__("cmpw %0,%1" : /* no output */ : \
67 					"ri" ((unsigned short)src1) , "r" (src2) : "cc")
68 #define _cmpd_(src1, src2)	__asm__("cmpd %0,%1" : /* no output */ : \
69 					"ri" ((unsigned long)src1) , "r" (src2) : "cc")
70 
71 /* Disable Inerrupts instructions */
72 #define _di_()		__asm__ volatile ("di\n" :  :  : "cc")
73 #define _disable_()	__asm__ volatile ("di\n" :  :  : "cc")
74 #define _disable_interrupt_()	_di_
75 
76 /* Enable Inerrupts instructions */
77 #define _ei_()		__asm__ volatile ("ei\n" :  :  : "cc")
78 #define _enable_()	__asm__ volatile ("ei\n" :  :  : "cc")
79 #define _enable_interrupt_()	_ei_
80 
81 /* Enable Inerrupts instructions and Wait */
82 #define _eiwait_()	__asm__ volatile ("eiwait" :  :  : "cc")
83 
84 /* excp Instructions */
85 #define _excp_(vector)	__asm__ volatile ("excp " # vector)
86 
87 /* lpr and lprd Instructions */
88 #define _lpr_(procreg, src)	__asm__("lpr\t%0," procreg : \
89 				  	/* no output */ : "r" (src) : "cc")
90 #define _lprd_(procregd, src)	__asm__("lprd\t%0," procregd : \
91 					/* no output */ : "r" (src) : "cc")
92 /* Left Shift Instructions */
93 #define _lshb_(count, dest)	__asm__("lshb %1,%0" : "=r" (dest) : \
94 					"ri" ((char)count) , "0" (dest) )
95 #define _lshw_(count, dest)	__asm__("lshw %1,%0" : "=r" (dest) : \
96 					"ri" ((char)count) , "0" (dest) )
97 #define _lshd_(count, dest)	__asm__("lshd %1,%0" : "=r" (dest) : \
98 					"ri" ((char)count) , "0" (dest) )
99 
100 /* Load Instructions */
101 #define _loadb_(base, dest)	__asm__("loadb %1,%0" : "=r" (dest) : \
102 					"m" (base) , "0" (dest))
103 #define _loadw_(base, dest)	__asm__("loadw %1,%0" : "=r" (dest) : \
104 					"m" (base) , "0" (dest))
105 #define _loadd_(base, dest)	__asm__("loadd %1,%0" : "=r" (dest) : \
106 					"m" (base) , "0" (dest))
107 
108 /* Load Multiple Instructions */
109 #define _loadm_(src, mask) 	__asm__("loadm %0,%1" : /* No output */ : \
110 					"r" ((unsigned int)src) , "i" (mask))
111 #define _loadmp_(src, mask) 	__asm__("loadmp %0,%1" : /* No output */ : \
112 					"r" ((unsigned int)src) , "i" (mask))
113 
114 /* Multiply Accumulate Instrutions */
115 #define _macsw_(hi, lo, src1, src2)  	__asm__("macsw %1,%0" 				\
116 						: =l (lo), =h (hi) 			\
117 						: "r" ((short)src1) , "r" (src2))
118 #define _macuw_(hi, lo, src1, src2)  	__asm__("macuw %1,%0" 				\
119   						: =l (lo), =h (hi) 			\
120 						: "r" ((unsigned short)src1) , "r" (src2))
121 #define _macqw_(src1, src2)  		__asm__("macqw %1,%0" 				\
122   						: =l (lo), =h (hi) 			\
123 						:"r" ((short)src1) , "r" (src2))
124 
125 /* Move Instructions */
126 #define _movb_(src, dest)  	__asm__("movb %1,%0" : "=r" (dest) : \
127 					"ri" ((unsigned char)src) , "0" (dest))
128 #define _movw_(src, dest)  	__asm__("movw %1,%0" : "=r" (dest) : \
129 					"ri" ((unsigned short)src) , "0" (dest))
130 #define _movd_(src, dest)  	__asm__("movd %1,%0" : "=r" (dest)  : \
131 					"ri" ((unsigned int)src) , "0" (dest))
132 #define _movxb_(src, dest)	__asm__("movxb %1,%0" : "=r" (dest) : \
133 					"r" (src), "0" (dest) )
134 #define _movzb_(src, dest)	__asm__("movzb %1,%0" : "=r" (dest) : \
135 					"r" (src), "0" (dest) )
136 #define _movxw_(src, dest)	__asm__("movxw %1,%0" : "=r" (dest) : \
137 					"r" (src), "0" (dest) )
138 #define _movzw_(src, dest)	__asm__("movzw %1,%0" : "=r" (dest) : \
139 					"r" (src), "0" (dest) )
140 
141 /* Multiplication Instructions */
142 #define _mulb_(src, dest)  	__asm__("mulb %1,%0" : "=r" (dest) : \
143 					"ri" ((char)src) , "0" (dest))
144 #define _mulw_(src, dest)  	__asm__("mulw %1,%0" : "=r" (dest) : \
145 					"ri" ((short)src) , "0" (dest))
146 #define _mulsb_(src, dest)	__asm__("mulsb %1,%0" : "=r" (dest) : \
147 					"r" ((char)src) , "0" (dest))
148 #define _mulsw_(src, dest)	__asm__("mulsw %1,%0" : "=r" (dest) : \
149 					"r" ((short)src) , "0" (dest))
150 #define _muluw_(src, dest)	__asm__("muluw %1,%0" : "=r" (dest) : \
151 					"r" ((unsigned short)src) , "0" (dest))
152 
153 /* nop Instruction */
154 #define _nop_()  		__asm__("nop")
155 
156 /* or Instructions */
157 #define _orb_(src, dest)  	__asm__("orb %1,%0" : "=r" (dest) : \
158                    			"ri" ((unsigned char)src) , "0" (dest))
159 #define _orw_(src, dest)  	__asm__("orw %1,%0" : "=r" (dest) : \
160 					"ri" ((unsigned short)src) , "0" (dest))
161 #define _ord_(src, dest)  	__asm__("ord %1,%0" : "=r" (dest)  : \
162 					"ri" ((unsigned int)src) , "0" (dest))
163 
164 /* retx Instruction */
165 #define _retx_()  		__asm__("retx")
166 
167 /* Set Bit Instructions */
168 #define _sbitb_(pos, dest)	__asm__("sbitb %1,%0" : "=mr" (dest) : \
169 					"i" ((unsigned char)pos) , "0" (dest) : "cc")
170 #define _sbitw_(pos, dest)	__asm__("sbitw %1,%0" : "=mr" (dest) : \
171 					"i" ((unsigned char)pos) , "0" (dest) : "cc")
172 
173 /* spr and sprd Instructions */
174 #define _spr_(procreg, dest)	__asm__("spr\t" procreg ",%0" : \
175 				        "=r" (dest) : /* no input */ "0" (dest) : "cc")
176 #define _sprd_(procregd, dest)	__asm__("sprd\t" procregd ",%0" : \
177 				        "=r" (dest) : /* no input */ "0" (dest) : "cc")
178 
179 /* Store Instructions */
180 #define _storb_(src, address)  	__asm__("storb %1,%0" : "=m" (address) : \
181 					"ri" ((unsigned int)src))
182 #define _storw_(src, address)  	__asm__("storw %1,%0" : "=m" (address) : \
183 					"ri" ((unsigned int)src))
184 #define _stord_(src, address)  	__asm__("stord %1,%0" : "=m" (address) : \
185 					"ri" ((unsigned int)src))
186 
187 /* Store Multiple Instructions */
188 #define _storm_(mask, src)  	__asm__("storm %1,%0" : /* No output here */ : \
189 					"i" (mask) , "r" ((unsigned int)src))
190 #define _stormp_(mask, src)  	__asm__("stormp %1,%0" : /* No output here */ : \
191 					"i" (mask) , "r" ((unsigned int)src))
192 
193 /* Substruct Instructions */
194 #define _subb_(src, dest)	__asm__("subb %1, %0" : "=r" (dest) : \
195 					"ri" ((unsigned char)src), "0" (dest) : "cc")
196 #define _subw_(src, dest)	__asm__("subw %1, %0" : "=r" (dest) : \
197 					"ri" ((unsigned short)src), "0" (dest) : "cc")
198 #define _subd_(src, dest)	__asm__("subd %1, %0" : "=r" (dest) : \
199 					"ri" ((unsigned long)src), "0" (dest) : "cc")
200 
201 /* Substruct with Carry Instructions */
202 #define _subcb_(src, dest)	__asm__("subcb %1, %0" : "=r" (dest) : \
203 					"ri" ((unsigned char)src), "0" (dest) : "cc")
204 #define _subcw_(src, dest)	__asm__("subcw %1, %0" : "=r" (dest) : \
205 					"ri" ((unsigned short)src), "0" (dest) : "cc")
206 
207 /* Test Bit Instructions */
208 #define _tbit_(offset, base)	__asm__("tbit %0,%1" : /* no output */ : \
209 					"ri" ((unsigned char)offset) , "r" (base) : "cc")
210 #define _tbitb_(pos, dest)	__asm__("tbitb %0,%1" : /* No output */ : \
211 					"i" ((unsigned char)pos) , "m" (dest) : "cc")
212 #define _tbitw_(pos, dest)	__asm__("tbitw %0,%1" : /* No output */ : \
213 					"i" ((unsigned char)pos) , "m" (dest) : "cc")
214 
215 /* wait Instruction*/
216 #define _wait_()  		__asm__ volatile ("wait" :  :  : "cc")
217 
218 /* xor Instructions */
219 #define _xorb_(src, dest)  	__asm__("xorb %1,%0" : "=r" (dest) : \
220 					"ri" ((unsigned char)src) , "0" (dest))
221 #define _xorw_(src, dest)  	__asm__("xorw %1,%0" : "=r" (dest) : \
222 					"ri" ((unsigned short)src) , "0" (dest))
223 #define _xord_(src, dest)  	__asm__("xord %1,%0" : "=r" (dest)  : \
224 					"ri" ((unsigned long)src) , "0" (dest))
225 
226 #if !defined (__CR16C__)
227 #define _di_()       		__asm__ volatile ("di\n" :  :  : "cc")
228 #else
229 /* In CR16C architecture the "nop" instruction is required after the di instruction
230    in order to be sure the interrupts are indeed disabled.
231    For details, refer the the CR16C Programmers Reference Manual. */
232 #define _di_()			__asm__ volatile ("di\n\tnop" :  :  : "cc")
233 #endif
234 
235 /* mtgpr Instruction */
236 #define _mtgpr_(src, gpr) 				  \
237 __asm__ volatile ("movd\t%[_src], " gpr : /* no output */ \
238 		  : [_src] "ri" (src) 			  \
239 		  : gpr )
240 
241 /* mfgpr Instruction */
242 #define _mfgpr_(gpr, dest) 					  \
243 __asm__ volatile ("movd\t" gpr ", %[_dest]" : [_dest] "=r" (dest) \
244 		  : /* no inputs */ )
245 
246 /* set_i_bit Operation Definition */
247 #define set_i_bit() 		\
248   do 				\
249   { 				\
250     unsigned short tpsr; 	\
251     _spr_("psr", tpsr); 	\
252     tpsr |= 0x0800;    		\
253     _lpr_("psr",tpsr); 		\
254   } while(0)
255 
256 /* set_i_bit Macro Definition */
257 #define _enable_global_interrupt_	set_i_bit
258 
259 /* clear_i_bit Operation Definition */
260 #define clear_i_bit() 		\
261   do 				\
262   { 				\
263     unsigned short tpsr; 	\
264     _spr_("psr", tpsr); 	\
265     tpsr &= 0xf7ff; 		\
266     _lpr_("psr",tpsr); 		\
267   } while(0)
268 
269 /* clear_i_bit Macro Definition */
270 #define _disbale_global_interrupt_	clear_i_bit
271 
272 #define _save_asm_(x) 						\
273   __asm__ volatile (x ::: "memory","cc", 			\
274 		    "r0","r1","r2","r3","r4","r5","r6","r7", 	\
275 		    "r8","r9","r10","r11","r12","r13")
276 
277 #endif  /* _ASM */
278 
279 
280 
281