1 /**************************************************************************//**
2  * @file     core_cmInstr.h
3  * @brief    CMSIS Cortex-M Core Instruction Access Header File
4  * @version  V4.00
5  * @date     28. August 2014
6  *
7  * @note
8  *
9  ******************************************************************************/
10 /* Copyright (c) 2009 - 2014 ARM LIMITED
11 
12    All rights reserved.
13    Redistribution and use in source and binary forms, with or without
14    modification, are permitted provided that the following conditions are met:
15    - Redistributions of source code must retain the above copyright
16      notice, this list of conditions and the following disclaimer.
17    - Redistributions in binary form must reproduce the above copyright
18      notice, this list of conditions and the following disclaimer in the
19      documentation and/or other materials provided with the distribution.
20    - Neither the name of ARM nor the names of its contributors may be used
21      to endorse or promote products derived from this software without
22      specific prior written permission.
23    *
24    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
25    AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
26    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
27    ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
28    LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
29    CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
30    SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
31    INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
32    CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
33    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34    POSSIBILITY OF SUCH DAMAGE.
35    ---------------------------------------------------------------------------*/
36 
37 
38 #ifndef __CORE_CMINSTR_H
39 #define __CORE_CMINSTR_H
40 
41 
42 /* ##########################  Core Instruction Access  ######################### */
43 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
44   Access to dedicated instructions
45   @{
46 */
47 
48 #if   defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
49 /* ARM armcc specific functions */
50 
51 #if (__ARMCC_VERSION < 400677)
52   #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
53 #endif
54 
55 
56 /** \brief  No Operation
57 
58     No Operation does nothing. This instruction can be used for code alignment purposes.
59  */
60 #define __NOP                             __nop
61 
62 
63 /** \brief  Wait For Interrupt
64 
65     Wait For Interrupt is a hint instruction that suspends execution
66     until one of a number of events occurs.
67  */
68 #define __WFI                             __wfi
69 
70 
71 /** \brief  Wait For Event
72 
73     Wait For Event is a hint instruction that permits the processor to enter
74     a low-power state until one of a number of events occurs.
75  */
76 #define __WFE                             __wfe
77 
78 
79 /** \brief  Send Event
80 
81     Send Event is a hint instruction. It causes an event to be signaled to the CPU.
82  */
83 #define __SEV                             __sev
84 
85 
86 /** \brief  Instruction Synchronization Barrier
87 
88     Instruction Synchronization Barrier flushes the pipeline in the processor,
89     so that all instructions following the ISB are fetched from cache or
90     memory, after the instruction has been completed.
91  */
92 #define __ISB()                           __isb(0xF)
93 
94 
95 /** \brief  Data Synchronization Barrier
96 
97     This function acts as a special kind of Data Memory Barrier.
98     It completes when all explicit memory accesses before this instruction complete.
99  */
100 #define __DSB()                           __dsb(0xF)
101 
102 
103 /** \brief  Data Memory Barrier
104 
105     This function ensures the apparent order of the explicit memory operations before
106     and after the instruction, without ensuring their completion.
107  */
108 #define __DMB()                           __dmb(0xF)
109 
110 
111 /** \brief  Reverse byte order (32 bit)
112 
113     This function reverses the byte order in integer value.
114 
115     \param [in]    value  Value to reverse
116     \return               Reversed value
117  */
118 #define __REV                             __rev
119 
120 
121 /** \brief  Reverse byte order (16 bit)
122 
123     This function reverses the byte order in two unsigned short values.
124 
125     \param [in]    value  Value to reverse
126     \return               Reversed value
127  */
128 #ifndef __NO_EMBEDDED_ASM
__REV16(uint32_t value)129 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
130 {
131   rev16 r0, r0
132   bx lr
133 }
134 #endif
135 
136 /** \brief  Reverse byte order in signed short value
137 
138     This function reverses the byte order in a signed short value with sign extension to integer.
139 
140     \param [in]    value  Value to reverse
141     \return               Reversed value
142  */
143 #ifndef __NO_EMBEDDED_ASM
__REVSH(int32_t value)144 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
145 {
146   revsh r0, r0
147   bx lr
148 }
149 #endif
150 
151 
152 /** \brief  Rotate Right in unsigned value (32 bit)
153 
154     This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
155 
156     \param [in]    value  Value to rotate
157     \param [in]    value  Number of Bits to rotate
158     \return               Rotated value
159  */
160 #define __ROR                             __ror
161 
162 
163 /** \brief  Breakpoint
164 
165     This function causes the processor to enter Debug state.
166     Debug tools can use this to investigate system state when the instruction at a particular address is reached.
167 
168     \param [in]    value  is ignored by the processor.
169                    If required, a debugger can use it to store additional information about the breakpoint.
170  */
171 #define __BKPT(value)                       __breakpoint(value)
172 
173 
174 #if       (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300)
175 
176 /** \brief  Reverse bit order of value
177 
178     This function reverses the bit order of the given value.
179 
180     \param [in]    value  Value to reverse
181     \return               Reversed value
182  */
183 #define __RBIT                            __rbit
184 
185 
186 /** \brief  LDR Exclusive (8 bit)
187 
188     This function executes a exclusive LDR instruction for 8 bit value.
189 
190     \param [in]    ptr  Pointer to data
191     \return             value of type uint8_t at (*ptr)
192  */
193 #define __LDREXB(ptr)                     ((uint8_t ) __ldrex(ptr))
194 
195 
196 /** \brief  LDR Exclusive (16 bit)
197 
198     This function executes a exclusive LDR instruction for 16 bit values.
199 
200     \param [in]    ptr  Pointer to data
201     \return        value of type uint16_t at (*ptr)
202  */
203 #define __LDREXH(ptr)                     ((uint16_t) __ldrex(ptr))
204 
205 
206 /** \brief  LDR Exclusive (32 bit)
207 
208     This function executes a exclusive LDR instruction for 32 bit values.
209 
210     \param [in]    ptr  Pointer to data
211     \return        value of type uint32_t at (*ptr)
212  */
213 #define __LDREXW(ptr)                     ((uint32_t ) __ldrex(ptr))
214 
215 
216 /** \brief  STR Exclusive (8 bit)
217 
218     This function executes a exclusive STR instruction for 8 bit values.
219 
220     \param [in]  value  Value to store
221     \param [in]    ptr  Pointer to location
222     \return          0  Function succeeded
223     \return          1  Function failed
224  */
225 #define __STREXB(value, ptr)              __strex(value, ptr)
226 
227 
228 /** \brief  STR Exclusive (16 bit)
229 
230     This function executes a exclusive STR instruction for 16 bit values.
231 
232     \param [in]  value  Value to store
233     \param [in]    ptr  Pointer to location
234     \return          0  Function succeeded
235     \return          1  Function failed
236  */
237 #define __STREXH(value, ptr)              __strex(value, ptr)
238 
239 
240 /** \brief  STR Exclusive (32 bit)
241 
242     This function executes a exclusive STR instruction for 32 bit values.
243 
244     \param [in]  value  Value to store
245     \param [in]    ptr  Pointer to location
246     \return          0  Function succeeded
247     \return          1  Function failed
248  */
249 #define __STREXW(value, ptr)              __strex(value, ptr)
250 
251 
252 /** \brief  Remove the exclusive lock
253 
254     This function removes the exclusive lock which is created by LDREX.
255 
256  */
257 #define __CLREX                           __clrex
258 
259 
260 /** \brief  Signed Saturate
261 
262     This function saturates a signed value.
263 
264     \param [in]  value  Value to be saturated
265     \param [in]    sat  Bit position to saturate to (1..32)
266     \return             Saturated value
267  */
268 #define __SSAT                            __ssat
269 
270 
271 /** \brief  Unsigned Saturate
272 
273     This function saturates an unsigned value.
274 
275     \param [in]  value  Value to be saturated
276     \param [in]    sat  Bit position to saturate to (0..31)
277     \return             Saturated value
278  */
279 #define __USAT                            __usat
280 
281 
282 /** \brief  Count leading zeros
283 
284     This function counts the number of leading zeros of a data value.
285 
286     \param [in]  value  Value to count the leading zeros
287     \return             number of leading zeros in value
288  */
289 #define __CLZ                             __clz
290 
291 
292 /** \brief  Rotate Right with Extend (32 bit)
293 
294     This function moves each bit of a bitstring right by one bit. The carry input is shifted in at the left end of the bitstring.
295 
296     \param [in]    value  Value to rotate
297     \return               Rotated value
298  */
299 #ifndef __NO_EMBEDDED_ASM
__RRX(uint32_t value)300 __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint32_t value)
301 {
302   rrx r0, r0
303   bx lr
304 }
305 #endif
306 
307 
308 /** \brief  LDRT Unprivileged (8 bit)
309 
310     This function executes a Unprivileged LDRT instruction for 8 bit value.
311 
312     \param [in]    ptr  Pointer to data
313     \return             value of type uint8_t at (*ptr)
314  */
315 #define __LDRBT(ptr)                      ((uint8_t )  __ldrt(ptr))
316 
317 
318 /** \brief  LDRT Unprivileged (16 bit)
319 
320     This function executes a Unprivileged LDRT instruction for 16 bit values.
321 
322     \param [in]    ptr  Pointer to data
323     \return        value of type uint16_t at (*ptr)
324  */
325 #define __LDRHT(ptr)                      ((uint16_t)  __ldrt(ptr))
326 
327 
328 /** \brief  LDRT Unprivileged (32 bit)
329 
330     This function executes a Unprivileged LDRT instruction for 32 bit values.
331 
332     \param [in]    ptr  Pointer to data
333     \return        value of type uint32_t at (*ptr)
334  */
335 #define __LDRT(ptr)                       ((uint32_t ) __ldrt(ptr))
336 
337 
338 /** \brief  STRT Unprivileged (8 bit)
339 
340     This function executes a Unprivileged STRT instruction for 8 bit values.
341 
342     \param [in]  value  Value to store
343     \param [in]    ptr  Pointer to location
344  */
345 #define __STRBT(value, ptr)               __strt(value, ptr)
346 
347 
348 /** \brief  STRT Unprivileged (16 bit)
349 
350     This function executes a Unprivileged STRT instruction for 16 bit values.
351 
352     \param [in]  value  Value to store
353     \param [in]    ptr  Pointer to location
354  */
355 #define __STRHT(value, ptr)               __strt(value, ptr)
356 
357 
358 /** \brief  STRT Unprivileged (32 bit)
359 
360     This function executes a Unprivileged STRT instruction for 32 bit values.
361 
362     \param [in]  value  Value to store
363     \param [in]    ptr  Pointer to location
364  */
365 #define __STRT(value, ptr)                __strt(value, ptr)
366 
367 #endif /* (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) */
368 
369 
370 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
371 /* GNU gcc specific functions */
372 
373 /* Define macros for porting to both thumb1 and thumb2.
374  * For thumb1, use low register (r0-r7), specified by constrant "l"
375  * Otherwise, use general registers, specified by constrant "r" */
376 #if defined (__thumb__) && !defined (__thumb2__)
377 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
378 #define __CMSIS_GCC_USE_REG(r) "l" (r)
379 #else
380 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
381 #define __CMSIS_GCC_USE_REG(r) "r" (r)
382 #endif
383 
384 /** \brief  No Operation
385 
386     No Operation does nothing. This instruction can be used for code alignment purposes.
387  */
__NOP(void)388 __attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
389 {
390   __ASM volatile ("nop");
391 }
392 
393 
394 /** \brief  Wait For Interrupt
395 
396     Wait For Interrupt is a hint instruction that suspends execution
397     until one of a number of events occurs.
398  */
__WFI(void)399 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
400 {
401   __ASM volatile ("wfi");
402 }
403 
404 
405 /** \brief  Wait For Event
406 
407     Wait For Event is a hint instruction that permits the processor to enter
408     a low-power state until one of a number of events occurs.
409  */
__WFE(void)410 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
411 {
412   __ASM volatile ("wfe");
413 }
414 
415 
416 /** \brief  Send Event
417 
418     Send Event is a hint instruction. It causes an event to be signaled to the CPU.
419  */
__SEV(void)420 __attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
421 {
422   __ASM volatile ("sev");
423 }
424 
425 
426 /** \brief  Instruction Synchronization Barrier
427 
428     Instruction Synchronization Barrier flushes the pipeline in the processor,
429     so that all instructions following the ISB are fetched from cache or
430     memory, after the instruction has been completed.
431  */
__ISB(void)432 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
433 {
434   __ASM volatile ("isb");
435 }
436 
437 
438 /** \brief  Data Synchronization Barrier
439 
440     This function acts as a special kind of Data Memory Barrier.
441     It completes when all explicit memory accesses before this instruction complete.
442  */
__DSB(void)443 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
444 {
445   __ASM volatile ("dsb");
446 }
447 
448 
449 /** \brief  Data Memory Barrier
450 
451     This function ensures the apparent order of the explicit memory operations before
452     and after the instruction, without ensuring their completion.
453  */
__DMB(void)454 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
455 {
456   __ASM volatile ("dmb");
457 }
458 
459 
460 /** \brief  Reverse byte order (32 bit)
461 
462     This function reverses the byte order in integer value.
463 
464     \param [in]    value  Value to reverse
465     \return               Reversed value
466  */
__REV(uint32_t value)467 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
468 {
469 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
470   return __builtin_bswap32(value);
471 #else
472   uint32_t result;
473 
474   __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
475   return(result);
476 #endif
477 }
478 
479 
480 /** \brief  Reverse byte order (16 bit)
481 
482     This function reverses the byte order in two unsigned short values.
483 
484     \param [in]    value  Value to reverse
485     \return               Reversed value
486  */
__REV16(uint32_t value)487 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
488 {
489   uint32_t result;
490 
491   __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
492   return(result);
493 }
494 
495 
496 /** \brief  Reverse byte order in signed short value
497 
498     This function reverses the byte order in a signed short value with sign extension to integer.
499 
500     \param [in]    value  Value to reverse
501     \return               Reversed value
502  */
__REVSH(int32_t value)503 __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
504 {
505 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
506   return (short)__builtin_bswap16(value);
507 #else
508   uint32_t result;
509 
510   __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
511   return(result);
512 #endif
513 }
514 
515 
516 /** \brief  Rotate Right in unsigned value (32 bit)
517 
518     This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
519 
520     \param [in]    value  Value to rotate
521     \param [in]    value  Number of Bits to rotate
522     \return               Rotated value
523  */
__ROR(uint32_t op1,uint32_t op2)524 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
525 {
526   return (op1 >> op2) | (op1 << (32 - op2));
527 }
528 
529 
530 /** \brief  Breakpoint
531 
532     This function causes the processor to enter Debug state.
533     Debug tools can use this to investigate system state when the instruction at a particular address is reached.
534 
535     \param [in]    value  is ignored by the processor.
536                    If required, a debugger can use it to store additional information about the breakpoint.
537  */
538 #define __BKPT(value)                       __ASM volatile ("bkpt "#value)
539 
540 
541 #if       (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300)
542 
543 /** \brief  Reverse bit order of value
544 
545     This function reverses the bit order of the given value.
546 
547     \param [in]    value  Value to reverse
548     \return               Reversed value
549  */
__RBIT(uint32_t value)550 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
551 {
552   uint32_t result;
553 
554    __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
555    return(result);
556 }
557 
558 
559 /** \brief  LDR Exclusive (8 bit)
560 
561     This function executes a exclusive LDR instruction for 8 bit value.
562 
563     \param [in]    ptr  Pointer to data
564     \return             value of type uint8_t at (*ptr)
565  */
__LDREXB(volatile uint8_t * addr)566 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
567 {
568     uint32_t result;
569 
570 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
571    __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
572 #else
573     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
574        accepted by assembler. So has to use following less efficient pattern.
575     */
576    __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
577 #endif
578    return ((uint8_t) result);    /* Add explicit type cast here */
579 }
580 
581 
582 /** \brief  LDR Exclusive (16 bit)
583 
584     This function executes a exclusive LDR instruction for 16 bit values.
585 
586     \param [in]    ptr  Pointer to data
587     \return        value of type uint16_t at (*ptr)
588  */
__LDREXH(volatile uint16_t * addr)589 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
590 {
591     uint32_t result;
592 
593 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
594    __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
595 #else
596     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
597        accepted by assembler. So has to use following less efficient pattern.
598     */
599    __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
600 #endif
601    return ((uint16_t) result);    /* Add explicit type cast here */
602 }
603 
604 
605 /** \brief  LDR Exclusive (32 bit)
606 
607     This function executes a exclusive LDR instruction for 32 bit values.
608 
609     \param [in]    ptr  Pointer to data
610     \return        value of type uint32_t at (*ptr)
611  */
__LDREXW(volatile uint32_t * addr)612 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
613 {
614     uint32_t result;
615 
616    __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
617    return(result);
618 }
619 
620 
621 /** \brief  STR Exclusive (8 bit)
622 
623     This function executes a exclusive STR instruction for 8 bit values.
624 
625     \param [in]  value  Value to store
626     \param [in]    ptr  Pointer to location
627     \return          0  Function succeeded
628     \return          1  Function failed
629  */
__STREXB(uint8_t value,volatile uint8_t * addr)630 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
631 {
632    uint32_t result;
633 
634    __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
635    return(result);
636 }
637 
638 
639 /** \brief  STR Exclusive (16 bit)
640 
641     This function executes a exclusive STR instruction for 16 bit values.
642 
643     \param [in]  value  Value to store
644     \param [in]    ptr  Pointer to location
645     \return          0  Function succeeded
646     \return          1  Function failed
647  */
__STREXH(uint16_t value,volatile uint16_t * addr)648 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
649 {
650    uint32_t result;
651 
652    __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
653    return(result);
654 }
655 
656 
657 /** \brief  STR Exclusive (32 bit)
658 
659     This function executes a exclusive STR instruction for 32 bit values.
660 
661     \param [in]  value  Value to store
662     \param [in]    ptr  Pointer to location
663     \return          0  Function succeeded
664     \return          1  Function failed
665  */
__STREXW(uint32_t value,volatile uint32_t * addr)666 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
667 {
668    uint32_t result;
669 
670    __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
671    return(result);
672 }
673 
674 
675 /** \brief  Remove the exclusive lock
676 
677     This function removes the exclusive lock which is created by LDREX.
678 
679  */
__CLREX(void)680 __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
681 {
682   __ASM volatile ("clrex" ::: "memory");
683 }
684 
685 
686 /** \brief  Signed Saturate
687 
688     This function saturates a signed value.
689 
690     \param [in]  value  Value to be saturated
691     \param [in]    sat  Bit position to saturate to (1..32)
692     \return             Saturated value
693  */
694 #define __SSAT(ARG1,ARG2) \
695 ({                          \
696   uint32_t __RES, __ARG1 = (ARG1); \
697   __ASM ("ssat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
698   __RES; \
699  })
700 
701 
702 /** \brief  Unsigned Saturate
703 
704     This function saturates an unsigned value.
705 
706     \param [in]  value  Value to be saturated
707     \param [in]    sat  Bit position to saturate to (0..31)
708     \return             Saturated value
709  */
710 #define __USAT(ARG1,ARG2) \
711 ({                          \
712   uint32_t __RES, __ARG1 = (ARG1); \
713   __ASM ("usat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
714   __RES; \
715  })
716 
717 
718 /** \brief  Count leading zeros
719 
720     This function counts the number of leading zeros of a data value.
721 
722     \param [in]  value  Value to count the leading zeros
723     \return             number of leading zeros in value
724  */
__CLZ(uint32_t value)725 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
726 {
727   uint32_t result;
728 
729   __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
730    return ((uint8_t) result);    /* Add explicit type cast here */
731 }
732 
733 
734 /** \brief  Rotate Right with Extend (32 bit)
735 
736     This function moves each bit of a bitstring right by one bit. The carry input is shifted in at the left end of the bitstring.
737 
738     \param [in]    value  Value to rotate
739     \return               Rotated value
740  */
__RRX(uint32_t value)741 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RRX(uint32_t value)
742 {
743   uint32_t result;
744 
745   __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
746   return(result);
747 }
748 
749 
750 /** \brief  LDRT Unprivileged (8 bit)
751 
752     This function executes a Unprivileged LDRT instruction for 8 bit value.
753 
754     \param [in]    ptr  Pointer to data
755     \return             value of type uint8_t at (*ptr)
756  */
__LDRBT(volatile uint8_t * addr)757 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDRBT(volatile uint8_t *addr)
758 {
759     uint32_t result;
760 
761 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
762    __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*addr) );
763 #else
764     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
765        accepted by assembler. So has to use following less efficient pattern.
766     */
767    __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
768 #endif
769    return ((uint8_t) result);    /* Add explicit type cast here */
770 }
771 
772 
773 /** \brief  LDRT Unprivileged (16 bit)
774 
775     This function executes a Unprivileged LDRT instruction for 16 bit values.
776 
777     \param [in]    ptr  Pointer to data
778     \return        value of type uint16_t at (*ptr)
779  */
__LDRHT(volatile uint16_t * addr)780 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDRHT(volatile uint16_t *addr)
781 {
782     uint32_t result;
783 
784 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
785    __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*addr) );
786 #else
787     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
788        accepted by assembler. So has to use following less efficient pattern.
789     */
790    __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
791 #endif
792    return ((uint16_t) result);    /* Add explicit type cast here */
793 }
794 
795 
796 /** \brief  LDRT Unprivileged (32 bit)
797 
798     This function executes a Unprivileged LDRT instruction for 32 bit values.
799 
800     \param [in]    ptr  Pointer to data
801     \return        value of type uint32_t at (*ptr)
802  */
__LDRT(volatile uint32_t * addr)803 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDRT(volatile uint32_t *addr)
804 {
805     uint32_t result;
806 
807    __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*addr) );
808    return(result);
809 }
810 
811 
812 /** \brief  STRT Unprivileged (8 bit)
813 
814     This function executes a Unprivileged STRT instruction for 8 bit values.
815 
816     \param [in]  value  Value to store
817     \param [in]    ptr  Pointer to location
818  */
__STRBT(uint8_t value,volatile uint8_t * addr)819 __attribute__( ( always_inline ) ) __STATIC_INLINE void __STRBT(uint8_t value, volatile uint8_t *addr)
820 {
821    __ASM volatile ("strbt %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );
822 }
823 
824 
825 /** \brief  STRT Unprivileged (16 bit)
826 
827     This function executes a Unprivileged STRT instruction for 16 bit values.
828 
829     \param [in]  value  Value to store
830     \param [in]    ptr  Pointer to location
831  */
__STRHT(uint16_t value,volatile uint16_t * addr)832 __attribute__( ( always_inline ) ) __STATIC_INLINE void __STRHT(uint16_t value, volatile uint16_t *addr)
833 {
834    __ASM volatile ("strht %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );
835 }
836 
837 
838 /** \brief  STRT Unprivileged (32 bit)
839 
840     This function executes a Unprivileged STRT instruction for 32 bit values.
841 
842     \param [in]  value  Value to store
843     \param [in]    ptr  Pointer to location
844  */
__STRT(uint32_t value,volatile uint32_t * addr)845 __attribute__( ( always_inline ) ) __STATIC_INLINE void __STRT(uint32_t value, volatile uint32_t *addr)
846 {
847    __ASM volatile ("strt %1, %0" : "=Q" (*addr) : "r" (value) );
848 }
849 
850 #endif /* (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) */
851 
852 
853 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
854 /* IAR iccarm specific functions */
855 #include <cmsis_iar.h>
856 
857 
858 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
859 /* TI CCS specific functions */
860 #include <cmsis_ccs.h>
861 
862 
863 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
864 /* TASKING carm specific functions */
865 /*
866  * The CMSIS functions have been implemented as intrinsics in the compiler.
867  * Please use "carm -?i" to get an up to date list of all intrinsics,
868  * Including the CMSIS ones.
869  */
870 
871 
872 #elif defined ( __CSMC__ ) /*------------------ COSMIC Compiler -------------------*/
873 /* Cosmic specific functions */
874 #include <cmsis_csm.h>
875 
876 #endif
877 
878 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
879 
880 #endif /* __CORE_CMINSTR_H */
881