38
|
1 /**************************************************************************//**
|
|
2 * @file core_cmInstr.h
|
|
3 * @brief CMSIS Cortex-M Core Instruction Access Header File
|
|
4 * @version V2.10
|
|
5 * @date 19. July 2011
|
|
6 *
|
|
7 * @note
|
|
8 * Copyright (C) 2009-2011 ARM Limited. All rights reserved.
|
|
9 *
|
|
10 * @par
|
|
11 * ARM Limited (ARM) is supplying this software for use with Cortex-M
|
|
12 * processor based microcontrollers. This file can be freely distributed
|
|
13 * within development tools that are supporting such ARM based processors.
|
|
14 *
|
|
15 * @par
|
|
16 * THIS SOFTWARE IS PROVIDED "AS IS". NO WARRANTIES, WHETHER EXPRESS, IMPLIED
|
|
17 * OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF
|
|
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE APPLY TO THIS SOFTWARE.
|
|
19 * ARM SHALL NOT, IN ANY CIRCUMSTANCES, BE LIABLE FOR SPECIAL, INCIDENTAL, OR
|
|
20 * CONSEQUENTIAL DAMAGES, FOR ANY REASON WHATSOEVER.
|
|
21 *
|
|
22 ******************************************************************************/
|
|
23
|
|
24 #ifndef __CORE_CMINSTR_H
|
|
25 #define __CORE_CMINSTR_H
|
|
26
|
|
27
|
|
28 /* ########################## Core Instruction Access ######################### */
|
|
29 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
|
|
30 Access to dedicated instructions
|
|
31 @{
|
|
32 */
|
|
33
|
|
34 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
|
|
35 /* ARM armcc specific functions */
|
|
36
|
|
37 #if (__ARMCC_VERSION < 400677)
|
|
38 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
|
|
39 #endif
|
|
40
|
|
41
|
|
42 /** \brief No Operation
|
|
43
|
|
44 No Operation does nothing. This instruction can be used for code alignment purposes.
|
|
45 */
|
|
46 #define __NOP __nop
|
|
47
|
|
48
|
|
49 /** \brief Wait For Interrupt
|
|
50
|
|
51 Wait For Interrupt is a hint instruction that suspends execution
|
|
52 until one of a number of events occurs.
|
|
53 */
|
|
54 #define __WFI __wfi
|
|
55
|
|
56
|
|
57 /** \brief Wait For Event
|
|
58
|
|
59 Wait For Event is a hint instruction that permits the processor to enter
|
|
60 a low-power state until one of a number of events occurs.
|
|
61 */
|
|
62 #define __WFE __wfe
|
|
63
|
|
64
|
|
65 /** \brief Send Event
|
|
66
|
|
67 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
|
|
68 */
|
|
69 #define __SEV __sev
|
|
70
|
|
71
|
|
72 /** \brief Instruction Synchronization Barrier
|
|
73
|
|
74 Instruction Synchronization Barrier flushes the pipeline in the processor,
|
|
75 so that all instructions following the ISB are fetched from cache or
|
|
76 memory, after the instruction has been completed.
|
|
77 */
|
|
78 #define __ISB() __isb(0xF)
|
|
79
|
|
80
|
|
81 /** \brief Data Synchronization Barrier
|
|
82
|
|
83 This function acts as a special kind of Data Memory Barrier.
|
|
84 It completes when all explicit memory accesses before this instruction complete.
|
|
85 */
|
|
86 #define __DSB() __dsb(0xF)
|
|
87
|
|
88
|
|
89 /** \brief Data Memory Barrier
|
|
90
|
|
91 This function ensures the apparent order of the explicit memory operations before
|
|
92 and after the instruction, without ensuring their completion.
|
|
93 */
|
|
94 #define __DMB() __dmb(0xF)
|
|
95
|
|
96
|
|
97 /** \brief Reverse byte order (32 bit)
|
|
98
|
|
99 This function reverses the byte order in integer value.
|
|
100
|
|
101 \param [in] value Value to reverse
|
|
102 \return Reversed value
|
|
103 */
|
|
104 #define __REV __rev
|
|
105
|
|
106
|
|
107 /** \brief Reverse byte order (16 bit)
|
|
108
|
|
109 This function reverses the byte order in two unsigned short values.
|
|
110
|
|
111 \param [in] value Value to reverse
|
|
112 \return Reversed value
|
|
113 */
|
|
114 static __INLINE __ASM uint32_t __REV16(uint32_t value)
|
|
115 {
|
|
116 rev16 r0, r0
|
|
117 bx lr
|
|
118 }
|
|
119
|
|
120
|
|
121 /** \brief Reverse byte order in signed short value
|
|
122
|
|
123 This function reverses the byte order in a signed short value with sign extension to integer.
|
|
124
|
|
125 \param [in] value Value to reverse
|
|
126 \return Reversed value
|
|
127 */
|
|
128 static __INLINE __ASM int32_t __REVSH(int32_t value)
|
|
129 {
|
|
130 revsh r0, r0
|
|
131 bx lr
|
|
132 }
|
|
133
|
|
134
|
|
135 #if (__CORTEX_M >= 0x03)
|
|
136
|
|
137 /** \brief Reverse bit order of value
|
|
138
|
|
139 This function reverses the bit order of the given value.
|
|
140
|
|
141 \param [in] value Value to reverse
|
|
142 \return Reversed value
|
|
143 */
|
|
144 #define __RBIT __rbit
|
|
145
|
|
146
|
|
147 /** \brief LDR Exclusive (8 bit)
|
|
148
|
|
149 This function performs a exclusive LDR command for 8 bit value.
|
|
150
|
|
151 \param [in] ptr Pointer to data
|
|
152 \return value of type uint8_t at (*ptr)
|
|
153 */
|
|
154 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
|
|
155
|
|
156
|
|
157 /** \brief LDR Exclusive (16 bit)
|
|
158
|
|
159 This function performs a exclusive LDR command for 16 bit values.
|
|
160
|
|
161 \param [in] ptr Pointer to data
|
|
162 \return value of type uint16_t at (*ptr)
|
|
163 */
|
|
164 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
|
|
165
|
|
166
|
|
167 /** \brief LDR Exclusive (32 bit)
|
|
168
|
|
169 This function performs a exclusive LDR command for 32 bit values.
|
|
170
|
|
171 \param [in] ptr Pointer to data
|
|
172 \return value of type uint32_t at (*ptr)
|
|
173 */
|
|
174 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
|
|
175
|
|
176
|
|
177 /** \brief STR Exclusive (8 bit)
|
|
178
|
|
179 This function performs a exclusive STR command for 8 bit values.
|
|
180
|
|
181 \param [in] value Value to store
|
|
182 \param [in] ptr Pointer to location
|
|
183 \return 0 Function succeeded
|
|
184 \return 1 Function failed
|
|
185 */
|
|
186 #define __STREXB(value, ptr) __strex(value, ptr)
|
|
187
|
|
188
|
|
189 /** \brief STR Exclusive (16 bit)
|
|
190
|
|
191 This function performs a exclusive STR command for 16 bit values.
|
|
192
|
|
193 \param [in] value Value to store
|
|
194 \param [in] ptr Pointer to location
|
|
195 \return 0 Function succeeded
|
|
196 \return 1 Function failed
|
|
197 */
|
|
198 #define __STREXH(value, ptr) __strex(value, ptr)
|
|
199
|
|
200
|
|
201 /** \brief STR Exclusive (32 bit)
|
|
202
|
|
203 This function performs a exclusive STR command for 32 bit values.
|
|
204
|
|
205 \param [in] value Value to store
|
|
206 \param [in] ptr Pointer to location
|
|
207 \return 0 Function succeeded
|
|
208 \return 1 Function failed
|
|
209 */
|
|
210 #define __STREXW(value, ptr) __strex(value, ptr)
|
|
211
|
|
212
|
|
213 /** \brief Remove the exclusive lock
|
|
214
|
|
215 This function removes the exclusive lock which is created by LDREX.
|
|
216
|
|
217 */
|
|
218 #define __CLREX __clrex
|
|
219
|
|
220
|
|
221 /** \brief Signed Saturate
|
|
222
|
|
223 This function saturates a signed value.
|
|
224
|
|
225 \param [in] value Value to be saturated
|
|
226 \param [in] sat Bit position to saturate to (1..32)
|
|
227 \return Saturated value
|
|
228 */
|
|
229 #define __SSAT __ssat
|
|
230
|
|
231
|
|
232 /** \brief Unsigned Saturate
|
|
233
|
|
234 This function saturates an unsigned value.
|
|
235
|
|
236 \param [in] value Value to be saturated
|
|
237 \param [in] sat Bit position to saturate to (0..31)
|
|
238 \return Saturated value
|
|
239 */
|
|
240 #define __USAT __usat
|
|
241
|
|
242
|
|
243 /** \brief Count leading zeros
|
|
244
|
|
245 This function counts the number of leading zeros of a data value.
|
|
246
|
|
247 \param [in] value Value to count the leading zeros
|
|
248 \return number of leading zeros in value
|
|
249 */
|
|
250 #define __CLZ __clz
|
|
251
|
|
252 #endif /* (__CORTEX_M >= 0x03) */
|
|
253
|
|
254
|
|
255
|
|
256 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
|
|
257 /* IAR iccarm specific functions */
|
|
258
|
|
259 #include <cmsis_iar.h>
|
|
260
|
|
261
|
|
262 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
|
|
263 /* GNU gcc specific functions */
|
|
264
|
|
265 /** \brief No Operation
|
|
266
|
|
267 No Operation does nothing. This instruction can be used for code alignment purposes.
|
|
268 */
|
|
269 __attribute__( ( always_inline ) ) static __INLINE void __NOP(void)
|
|
270 {
|
|
271 __ASM volatile ("nop");
|
|
272 }
|
|
273
|
|
274
|
|
275 /** \brief Wait For Interrupt
|
|
276
|
|
277 Wait For Interrupt is a hint instruction that suspends execution
|
|
278 until one of a number of events occurs.
|
|
279 */
|
|
280 __attribute__( ( always_inline ) ) static __INLINE void __WFI(void)
|
|
281 {
|
|
282 __ASM volatile ("wfi");
|
|
283 }
|
|
284
|
|
285
|
|
286 /** \brief Wait For Event
|
|
287
|
|
288 Wait For Event is a hint instruction that permits the processor to enter
|
|
289 a low-power state until one of a number of events occurs.
|
|
290 */
|
|
291 __attribute__( ( always_inline ) ) static __INLINE void __WFE(void)
|
|
292 {
|
|
293 __ASM volatile ("wfe");
|
|
294 }
|
|
295
|
|
296
|
|
297 /** \brief Send Event
|
|
298
|
|
299 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
|
|
300 */
|
|
301 __attribute__( ( always_inline ) ) static __INLINE void __SEV(void)
|
|
302 {
|
|
303 __ASM volatile ("sev");
|
|
304 }
|
|
305
|
|
306
|
|
307 /** \brief Instruction Synchronization Barrier
|
|
308
|
|
309 Instruction Synchronization Barrier flushes the pipeline in the processor,
|
|
310 so that all instructions following the ISB are fetched from cache or
|
|
311 memory, after the instruction has been completed.
|
|
312 */
|
|
313 __attribute__( ( always_inline ) ) static __INLINE void __ISB(void)
|
|
314 {
|
|
315 __ASM volatile ("isb");
|
|
316 }
|
|
317
|
|
318
|
|
319 /** \brief Data Synchronization Barrier
|
|
320
|
|
321 This function acts as a special kind of Data Memory Barrier.
|
|
322 It completes when all explicit memory accesses before this instruction complete.
|
|
323 */
|
|
324 __attribute__( ( always_inline ) ) static __INLINE void __DSB(void)
|
|
325 {
|
|
326 __ASM volatile ("dsb");
|
|
327 }
|
|
328
|
|
329
|
|
330 /** \brief Data Memory Barrier
|
|
331
|
|
332 This function ensures the apparent order of the explicit memory operations before
|
|
333 and after the instruction, without ensuring their completion.
|
|
334 */
|
|
335 __attribute__( ( always_inline ) ) static __INLINE void __DMB(void)
|
|
336 {
|
|
337 __ASM volatile ("dmb");
|
|
338 }
|
|
339
|
|
340
|
|
341 /** \brief Reverse byte order (32 bit)
|
|
342
|
|
343 This function reverses the byte order in integer value.
|
|
344
|
|
345 \param [in] value Value to reverse
|
|
346 \return Reversed value
|
|
347 */
|
|
348 __attribute__( ( always_inline ) ) static __INLINE uint32_t __REV(uint32_t value)
|
|
349 {
|
|
350 uint32_t result;
|
|
351
|
|
352 __ASM volatile ("rev %0, %1" : "=r" (result) : "r" (value) );
|
|
353 return(result);
|
|
354 }
|
|
355
|
|
356
|
|
357 /** \brief Reverse byte order (16 bit)
|
|
358
|
|
359 This function reverses the byte order in two unsigned short values.
|
|
360
|
|
361 \param [in] value Value to reverse
|
|
362 \return Reversed value
|
|
363 */
|
|
364 __attribute__( ( always_inline ) ) static __INLINE uint32_t __REV16(uint32_t value)
|
|
365 {
|
|
366 uint32_t result;
|
|
367
|
|
368 __ASM volatile ("rev16 %0, %1" : "=r" (result) : "r" (value) );
|
|
369 return(result);
|
|
370 }
|
|
371
|
|
372
|
|
373 /** \brief Reverse byte order in signed short value
|
|
374
|
|
375 This function reverses the byte order in a signed short value with sign extension to integer.
|
|
376
|
|
377 \param [in] value Value to reverse
|
|
378 \return Reversed value
|
|
379 */
|
|
380 __attribute__( ( always_inline ) ) static __INLINE int32_t __REVSH(int32_t value)
|
|
381 {
|
|
382 uint32_t result;
|
|
383
|
|
384 __ASM volatile ("revsh %0, %1" : "=r" (result) : "r" (value) );
|
|
385 return(result);
|
|
386 }
|
|
387
|
|
388
|
|
389 #if (__CORTEX_M >= 0x03)
|
|
390
|
|
391 /** \brief Reverse bit order of value
|
|
392
|
|
393 This function reverses the bit order of the given value.
|
|
394
|
|
395 \param [in] value Value to reverse
|
|
396 \return Reversed value
|
|
397 */
|
|
398 __attribute__( ( always_inline ) ) static __INLINE uint32_t __RBIT(uint32_t value)
|
|
399 {
|
|
400 uint32_t result;
|
|
401
|
|
402 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
|
|
403 return(result);
|
|
404 }
|
|
405
|
|
406
|
|
407 /** \brief LDR Exclusive (8 bit)
|
|
408
|
|
409 This function performs a exclusive LDR command for 8 bit value.
|
|
410
|
|
411 \param [in] ptr Pointer to data
|
|
412 \return value of type uint8_t at (*ptr)
|
|
413 */
|
|
414 __attribute__( ( always_inline ) ) static __INLINE uint8_t __LDREXB(volatile uint8_t *addr)
|
|
415 {
|
|
416 uint8_t result;
|
|
417
|
|
418 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) );
|
|
419 return(result);
|
|
420 }
|
|
421
|
|
422
|
|
423 /** \brief LDR Exclusive (16 bit)
|
|
424
|
|
425 This function performs a exclusive LDR command for 16 bit values.
|
|
426
|
|
427 \param [in] ptr Pointer to data
|
|
428 \return value of type uint16_t at (*ptr)
|
|
429 */
|
|
430 __attribute__( ( always_inline ) ) static __INLINE uint16_t __LDREXH(volatile uint16_t *addr)
|
|
431 {
|
|
432 uint16_t result;
|
|
433
|
|
434 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) );
|
|
435 return(result);
|
|
436 }
|
|
437
|
|
438
|
|
439 /** \brief LDR Exclusive (32 bit)
|
|
440
|
|
441 This function performs a exclusive LDR command for 32 bit values.
|
|
442
|
|
443 \param [in] ptr Pointer to data
|
|
444 \return value of type uint32_t at (*ptr)
|
|
445 */
|
|
446 __attribute__( ( always_inline ) ) static __INLINE uint32_t __LDREXW(volatile uint32_t *addr)
|
|
447 {
|
|
448 uint32_t result;
|
|
449
|
|
450 __ASM volatile ("ldrex %0, [%1]" : "=r" (result) : "r" (addr) );
|
|
451 return(result);
|
|
452 }
|
|
453
|
|
454
|
|
455 /** \brief STR Exclusive (8 bit)
|
|
456
|
|
457 This function performs a exclusive STR command for 8 bit values.
|
|
458
|
|
459 \param [in] value Value to store
|
|
460 \param [in] ptr Pointer to location
|
|
461 \return 0 Function succeeded
|
|
462 \return 1 Function failed
|
|
463 */
|
|
464 __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
|
|
465 {
|
|
466 uint32_t result;
|
|
467
|
|
468 __ASM volatile ("strexb %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
|
|
469 return(result);
|
|
470 }
|
|
471
|
|
472
|
|
473 /** \brief STR Exclusive (16 bit)
|
|
474
|
|
475 This function performs a exclusive STR command for 16 bit values.
|
|
476
|
|
477 \param [in] value Value to store
|
|
478 \param [in] ptr Pointer to location
|
|
479 \return 0 Function succeeded
|
|
480 \return 1 Function failed
|
|
481 */
|
|
482 __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
|
|
483 {
|
|
484 uint32_t result;
|
|
485
|
|
486 __ASM volatile ("strexh %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
|
|
487 return(result);
|
|
488 }
|
|
489
|
|
490
|
|
491 /** \brief STR Exclusive (32 bit)
|
|
492
|
|
493 This function performs a exclusive STR command for 32 bit values.
|
|
494
|
|
495 \param [in] value Value to store
|
|
496 \param [in] ptr Pointer to location
|
|
497 \return 0 Function succeeded
|
|
498 \return 1 Function failed
|
|
499 */
|
|
500 __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
|
|
501 {
|
|
502 uint32_t result;
|
|
503
|
|
504 __ASM volatile ("strex %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
|
|
505 return(result);
|
|
506 }
|
|
507
|
|
508
|
|
509 /** \brief Remove the exclusive lock
|
|
510
|
|
511 This function removes the exclusive lock which is created by LDREX.
|
|
512
|
|
513 */
|
|
514 __attribute__( ( always_inline ) ) static __INLINE void __CLREX(void)
|
|
515 {
|
|
516 __ASM volatile ("clrex");
|
|
517 }
|
|
518
|
|
519
|
|
520 /** \brief Signed Saturate
|
|
521
|
|
522 This function saturates a signed value.
|
|
523
|
|
524 \param [in] value Value to be saturated
|
|
525 \param [in] sat Bit position to saturate to (1..32)
|
|
526 \return Saturated value
|
|
527 */
|
|
528 #define __SSAT(ARG1,ARG2) \
|
|
529 ({ \
|
|
530 uint32_t __RES, __ARG1 = (ARG1); \
|
|
531 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
|
|
532 __RES; \
|
|
533 })
|
|
534
|
|
535
|
|
536 /** \brief Unsigned Saturate
|
|
537
|
|
538 This function saturates an unsigned value.
|
|
539
|
|
540 \param [in] value Value to be saturated
|
|
541 \param [in] sat Bit position to saturate to (0..31)
|
|
542 \return Saturated value
|
|
543 */
|
|
544 #define __USAT(ARG1,ARG2) \
|
|
545 ({ \
|
|
546 uint32_t __RES, __ARG1 = (ARG1); \
|
|
547 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
|
|
548 __RES; \
|
|
549 })
|
|
550
|
|
551
|
|
552 /** \brief Count leading zeros
|
|
553
|
|
554 This function counts the number of leading zeros of a data value.
|
|
555
|
|
556 \param [in] value Value to count the leading zeros
|
|
557 \return number of leading zeros in value
|
|
558 */
|
|
559 __attribute__( ( always_inline ) ) static __INLINE uint8_t __CLZ(uint32_t value)
|
|
560 {
|
|
561 uint8_t result;
|
|
562
|
|
563 __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
|
|
564 return(result);
|
|
565 }
|
|
566
|
|
567 #endif /* (__CORTEX_M >= 0x03) */
|
|
568
|
|
569
|
|
570
|
|
571
|
|
572 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
|
|
573 /* TASKING carm specific functions */
|
|
574
|
|
575 /*
|
|
576 * The CMSIS functions have been implemented as intrinsics in the compiler.
|
|
577 * Please use "carm -?i" to get an up to date list of all intrinsics,
|
|
578 * Including the CMSIS ones.
|
|
579 */
|
|
580
|
|
581 #endif
|
|
582
|
|
583 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
|
|
584
|
|
585 #endif /* __CORE_CMINSTR_H */
|