1 /**************************************************************************//**
2  * @file     cmsis_armclang_ltm.h
3  * @brief    CMSIS compiler armclang (Arm Compiler 6) header file
4  * @version  V1.6.0
5  * @date     20. January 2023
6  ******************************************************************************/
7 /*
8  * Copyright (c) 2018-2023 Arm Limited. All rights reserved.
9  *
10  * SPDX-License-Identifier: Apache-2.0
11  *
12  * Licensed under the Apache License, Version 2.0 (the License); you may
13  * not use this file except in compliance with the License.
14  * You may obtain a copy of the License at
15  *
16  * www.apache.org/licenses/LICENSE-2.0
17  *
18  * Unless required by applicable law or agreed to in writing, software
19  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21  * See the License for the specific language governing permissions and
22  * limitations under the License.
23  */
24 
25 /*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
26 
27 #ifndef __CMSIS_ARMCLANG_H
28 #define __CMSIS_ARMCLANG_H
29 
30 #pragma clang system_header   /* treat file as system include file */
31 
32 /* CMSIS compiler specific defines */
33 #ifndef   __ASM
34   #define __ASM                                  __asm
35 #endif
36 #ifndef   __INLINE
37   #define __INLINE                               __inline
38 #endif
39 #ifndef   __STATIC_INLINE
40   #define __STATIC_INLINE                        static __inline
41 #endif
42 #ifndef   __STATIC_FORCEINLINE
43   #define __STATIC_FORCEINLINE                   __attribute__((always_inline)) static __inline
44 #endif
45 #ifndef   __NO_RETURN
46   #define __NO_RETURN                            __attribute__((__noreturn__))
47 #endif
48 #ifndef   __USED
49   #define __USED                                 __attribute__((used))
50 #endif
51 #ifndef   __WEAK
52   #define __WEAK                                 __attribute__((weak))
53 #endif
54 #ifndef   __PACKED
55   #define __PACKED                               __attribute__((packed, aligned(1)))
56 #endif
57 #ifndef   __PACKED_STRUCT
58   #define __PACKED_STRUCT                        struct __attribute__((packed, aligned(1)))
59 #endif
60 #ifndef   __PACKED_UNION
61   #define __PACKED_UNION                         union __attribute__((packed, aligned(1)))
62 #endif
63 #ifndef   __UNALIGNED_UINT32        /* deprecated */
64   #pragma clang diagnostic push
65   #pragma clang diagnostic ignored "-Wpacked"
66 /*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
67   struct __attribute__((packed)) T_UINT32 { uint32_t v; };
68   #pragma clang diagnostic pop
69   #define __UNALIGNED_UINT32(x)                  (((struct T_UINT32 *)(x))->v)
70 #endif
71 #ifndef   __UNALIGNED_UINT16_WRITE
72   #pragma clang diagnostic push
73   #pragma clang diagnostic ignored "-Wpacked"
74 /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
75   __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
76   #pragma clang diagnostic pop
77   #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
78 #endif
79 #ifndef   __UNALIGNED_UINT16_READ
80   #pragma clang diagnostic push
81   #pragma clang diagnostic ignored "-Wpacked"
82 /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
83   __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
84   #pragma clang diagnostic pop
85   #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
86 #endif
87 #ifndef   __UNALIGNED_UINT32_WRITE
88   #pragma clang diagnostic push
89   #pragma clang diagnostic ignored "-Wpacked"
90 /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
91   __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
92   #pragma clang diagnostic pop
93   #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
94 #endif
95 #ifndef   __UNALIGNED_UINT32_READ
96   #pragma clang diagnostic push
97   #pragma clang diagnostic ignored "-Wpacked"
98 /*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
99   __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
100   #pragma clang diagnostic pop
101   #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
102 #endif
103 #ifndef   __ALIGNED
104   #define __ALIGNED(x)                           __attribute__((aligned(x)))
105 #endif
106 #ifndef   __RESTRICT
107   #define __RESTRICT                             __restrict
108 #endif
109 #ifndef   __COMPILER_BARRIER
110   #define __COMPILER_BARRIER()                   __ASM volatile("":::"memory")
111 #endif
112 #ifndef __NO_INIT
113   #define __NO_INIT                              __attribute__ ((section (".bss.noinit")))
114 #endif
115 #ifndef __ALIAS
116   #define __ALIAS(x)                             __attribute__ ((alias(x)))
117 #endif
118 
119 /* #########################  Startup and Lowlevel Init  ######################## */
120 
121 #ifndef __PROGRAM_START
122 #define __PROGRAM_START           __main
123 #endif
124 
125 #ifndef __INITIAL_SP
126 #define __INITIAL_SP              Image$$ARM_LIB_STACK$$ZI$$Limit
127 #endif
128 
129 #ifndef __STACK_LIMIT
130 #define __STACK_LIMIT             Image$$ARM_LIB_STACK$$ZI$$Base
131 #endif
132 
133 #ifndef __VECTOR_TABLE
134 #define __VECTOR_TABLE            __Vectors
135 #endif
136 
137 #ifndef __VECTOR_TABLE_ATTRIBUTE
138 #define __VECTOR_TABLE_ATTRIBUTE  __attribute__((used, section("RESET")))
139 #endif
140 
141 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U)
142 #ifndef __STACK_SEAL
143 #define __STACK_SEAL              Image$$STACKSEAL$$ZI$$Base
144 #endif
145 
146 #ifndef __TZ_STACK_SEAL_SIZE
147 #define __TZ_STACK_SEAL_SIZE      8U
148 #endif
149 
150 #ifndef __TZ_STACK_SEAL_VALUE
151 #define __TZ_STACK_SEAL_VALUE     0xFEF5EDA5FEF5EDA5ULL
152 #endif
153 
154 
__TZ_set_STACKSEAL_S(uint32_t * stackTop)155 __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
156   *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
157 }
158 #endif
159 
160 
161 /* ##########################  Core Instruction Access  ######################### */
162 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
163   Access to dedicated instructions
164   @{
165 */
166 
167 /* Define macros for porting to both thumb1 and thumb2.
168  * For thumb1, use low register (r0-r7), specified by constraint "l"
169  * Otherwise, use general registers, specified by constraint "r" */
170 #if defined (__thumb__) && !defined (__thumb2__)
171 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
172 #define __CMSIS_GCC_USE_REG(r) "l" (r)
173 #else
174 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
175 #define __CMSIS_GCC_USE_REG(r) "r" (r)
176 #endif
177 
178 /**
179   \brief   No Operation
180   \details No Operation does nothing. This instruction can be used for code alignment purposes.
181  */
182 #define __NOP          __builtin_arm_nop
183 
184 /**
185   \brief   Wait For Interrupt
186   \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
187  */
188 #define __WFI          __builtin_arm_wfi
189 
190 
191 /**
192   \brief   Wait For Event
193   \details Wait For Event is a hint instruction that permits the processor to enter
194            a low-power state until one of a number of events occurs.
195  */
196 #define __WFE          __builtin_arm_wfe
197 
198 
199 /**
200   \brief   Send Event
201   \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
202  */
203 #define __SEV          __builtin_arm_sev
204 
205 
206 /**
207   \brief   Instruction Synchronization Barrier
208   \details Instruction Synchronization Barrier flushes the pipeline in the processor,
209            so that all instructions following the ISB are fetched from cache or memory,
210            after the instruction has been completed.
211  */
212 #define __ISB()        __builtin_arm_isb(0xF)
213 
214 /**
215   \brief   Data Synchronization Barrier
216   \details Acts as a special kind of Data Memory Barrier.
217            It completes when all explicit memory accesses before this instruction complete.
218  */
219 #define __DSB()        __builtin_arm_dsb(0xF)
220 
221 
222 /**
223   \brief   Data Memory Barrier
224   \details Ensures the apparent order of the explicit memory operations before
225            and after the instruction, without ensuring their completion.
226  */
227 #define __DMB()        __builtin_arm_dmb(0xF)
228 
229 
230 /**
231   \brief   Reverse byte order (32 bit)
232   \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
233   \param [in]    value  Value to reverse
234   \return               Reversed value
235  */
236 #define __REV(value)   __builtin_bswap32(value)
237 
238 
239 /**
240   \brief   Reverse byte order (16 bit)
241   \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
242   \param [in]    value  Value to reverse
243   \return               Reversed value
244  */
245 #define __REV16(value) __ROR(__REV(value), 16)
246 
247 
248 /**
249   \brief   Reverse byte order (16 bit)
250   \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
251   \param [in]    value  Value to reverse
252   \return               Reversed value
253  */
254 #define __REVSH(value) (int16_t)__builtin_bswap16(value)
255 
256 
257 /**
258   \brief   Rotate Right in unsigned value (32 bit)
259   \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
260   \param [in]    op1  Value to rotate
261   \param [in]    op2  Number of Bits to rotate
262   \return               Rotated value
263  */
__ROR(uint32_t op1,uint32_t op2)264 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
265 {
266   op2 %= 32U;
267   if (op2 == 0U)
268   {
269     return op1;
270   }
271   return (op1 >> op2) | (op1 << (32U - op2));
272 }
273 
274 
275 /**
276   \brief   Breakpoint
277   \details Causes the processor to enter Debug state.
278            Debug tools can use this to investigate system state when the instruction at a particular address is reached.
279   \param [in]    value  is ignored by the processor.
280                  If required, a debugger can use it to store additional information about the breakpoint.
281  */
282 #define __BKPT(value)     __ASM volatile ("bkpt "#value)
283 
284 
285 /**
286   \brief   Reverse bit order of value
287   \details Reverses the bit order of the given value.
288   \param [in]    value  Value to reverse
289   \return               Reversed value
290  */
291 #define __RBIT            __builtin_arm_rbit
292 
293 /**
294   \brief   Count leading zeros
295   \details Counts the number of leading zeros of a data value.
296   \param [in]  value  Value to count the leading zeros
297   \return             number of leading zeros in value
298  */
__CLZ(uint32_t value)299 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
300 {
301   /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
302      __builtin_clz(0) is undefined behaviour, so handle this case specially.
303      This guarantees ARM-compatible results if happening to compile on a non-ARM
304      target, and ensures the compiler doesn't decide to activate any
305      optimisations using the logic "value was passed to __builtin_clz, so it
306      is non-zero".
307      ARM Compiler 6.10 and possibly earlier will optimise this test away, leaving a
308      single CLZ instruction.
309    */
310   if (value == 0U)
311   {
312     return 32U;
313   }
314   return __builtin_clz(value);
315 }
316 
317 
318 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
319      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
320      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
321      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
322 /**
323   \brief   LDR Exclusive (8 bit)
324   \details Executes a exclusive LDR instruction for 8 bit value.
325   \param [in]    ptr  Pointer to data
326   \return             value of type uint8_t at (*ptr)
327  */
328 #define __LDREXB        (uint8_t)__builtin_arm_ldrex
329 
330 
331 /**
332   \brief   LDR Exclusive (16 bit)
333   \details Executes a exclusive LDR instruction for 16 bit values.
334   \param [in]    ptr  Pointer to data
335   \return        value of type uint16_t at (*ptr)
336  */
337 #define __LDREXH        (uint16_t)__builtin_arm_ldrex
338 
339 
340 /**
341   \brief   LDR Exclusive (32 bit)
342   \details Executes a exclusive LDR instruction for 32 bit values.
343   \param [in]    ptr  Pointer to data
344   \return        value of type uint32_t at (*ptr)
345  */
346 #define __LDREXW        (uint32_t)__builtin_arm_ldrex
347 
348 
349 /**
350   \brief   STR Exclusive (8 bit)
351   \details Executes a exclusive STR instruction for 8 bit values.
352   \param [in]  value  Value to store
353   \param [in]    ptr  Pointer to location
354   \return          0  Function succeeded
355   \return          1  Function failed
356  */
357 #define __STREXB        (uint32_t)__builtin_arm_strex
358 
359 
360 /**
361   \brief   STR Exclusive (16 bit)
362   \details Executes a exclusive STR instruction for 16 bit values.
363   \param [in]  value  Value to store
364   \param [in]    ptr  Pointer to location
365   \return          0  Function succeeded
366   \return          1  Function failed
367  */
368 #define __STREXH        (uint32_t)__builtin_arm_strex
369 
370 
371 /**
372   \brief   STR Exclusive (32 bit)
373   \details Executes a exclusive STR instruction for 32 bit values.
374   \param [in]  value  Value to store
375   \param [in]    ptr  Pointer to location
376   \return          0  Function succeeded
377   \return          1  Function failed
378  */
379 #define __STREXW        (uint32_t)__builtin_arm_strex
380 
381 
382 /**
383   \brief   Remove the exclusive lock
384   \details Removes the exclusive lock which is created by LDREX.
385  */
386 #define __CLREX             __builtin_arm_clrex
387 
388 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
389            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
390            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
391            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
392 
393 
394 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
395      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
396      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
397 
398 /**
399   \brief   Signed Saturate
400   \details Saturates a signed value.
401   \param [in]  value  Value to be saturated
402   \param [in]    sat  Bit position to saturate to (1..32)
403   \return             Saturated value
404  */
405 #define __SSAT             __builtin_arm_ssat
406 
407 
408 /**
409   \brief   Unsigned Saturate
410   \details Saturates an unsigned value.
411   \param [in]  value  Value to be saturated
412   \param [in]    sat  Bit position to saturate to (0..31)
413   \return             Saturated value
414  */
415 #define __USAT             __builtin_arm_usat
416 
417 
418 /**
419   \brief   Rotate Right with Extend (32 bit)
420   \details Moves each bit of a bitstring right by one bit.
421            The carry input is shifted in at the left end of the bitstring.
422   \param [in]    value  Value to rotate
423   \return               Rotated value
424  */
__RRX(uint32_t value)425 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
426 {
427   uint32_t result;
428 
429   __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
430   return(result);
431 }
432 
433 
434 /**
435   \brief   LDRT Unprivileged (8 bit)
436   \details Executes a Unprivileged LDRT instruction for 8 bit value.
437   \param [in]    ptr  Pointer to data
438   \return             value of type uint8_t at (*ptr)
439  */
__LDRBT(volatile uint8_t * ptr)440 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
441 {
442   uint32_t result;
443 
444   __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
445   return ((uint8_t) result);    /* Add explicit type cast here */
446 }
447 
448 
449 /**
450   \brief   LDRT Unprivileged (16 bit)
451   \details Executes a Unprivileged LDRT instruction for 16 bit values.
452   \param [in]    ptr  Pointer to data
453   \return        value of type uint16_t at (*ptr)
454  */
__LDRHT(volatile uint16_t * ptr)455 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
456 {
457   uint32_t result;
458 
459   __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
460   return ((uint16_t) result);    /* Add explicit type cast here */
461 }
462 
463 
464 /**
465   \brief   LDRT Unprivileged (32 bit)
466   \details Executes a Unprivileged LDRT instruction for 32 bit values.
467   \param [in]    ptr  Pointer to data
468   \return        value of type uint32_t at (*ptr)
469  */
__LDRT(volatile uint32_t * ptr)470 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
471 {
472   uint32_t result;
473 
474   __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
475   return(result);
476 }
477 
478 
479 /**
480   \brief   STRT Unprivileged (8 bit)
481   \details Executes a Unprivileged STRT instruction for 8 bit values.
482   \param [in]  value  Value to store
483   \param [in]    ptr  Pointer to location
484  */
__STRBT(uint8_t value,volatile uint8_t * ptr)485 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
486 {
487   __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
488 }
489 
490 
491 /**
492   \brief   STRT Unprivileged (16 bit)
493   \details Executes a Unprivileged STRT instruction for 16 bit values.
494   \param [in]  value  Value to store
495   \param [in]    ptr  Pointer to location
496  */
__STRHT(uint16_t value,volatile uint16_t * ptr)497 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
498 {
499   __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
500 }
501 
502 
503 /**
504   \brief   STRT Unprivileged (32 bit)
505   \details Executes a Unprivileged STRT instruction for 32 bit values.
506   \param [in]  value  Value to store
507   \param [in]    ptr  Pointer to location
508  */
__STRT(uint32_t value,volatile uint32_t * ptr)509 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
510 {
511   __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
512 }
513 
514 #else  /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
515            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
516            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
517 
518 /**
519   \brief   Signed Saturate
520   \details Saturates a signed value.
521   \param [in]  value  Value to be saturated
522   \param [in]    sat  Bit position to saturate to (1..32)
523   \return             Saturated value
524  */
__SSAT(int32_t val,uint32_t sat)525 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
526 {
527   if ((sat >= 1U) && (sat <= 32U))
528   {
529     const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
530     const int32_t min = -1 - max ;
531     if (val > max)
532     {
533       return max;
534     }
535     else if (val < min)
536     {
537       return min;
538     }
539   }
540   return val;
541 }
542 
543 /**
544   \brief   Unsigned Saturate
545   \details Saturates an unsigned value.
546   \param [in]  value  Value to be saturated
547   \param [in]    sat  Bit position to saturate to (0..31)
548   \return             Saturated value
549  */
__USAT(int32_t val,uint32_t sat)550 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
551 {
552   if (sat <= 31U)
553   {
554     const uint32_t max = ((1U << sat) - 1U);
555     if (val > (int32_t)max)
556     {
557       return max;
558     }
559     else if (val < 0)
560     {
561       return 0U;
562     }
563   }
564   return (uint32_t)val;
565 }
566 
567 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
568            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
569            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
570 
571 
572 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
573      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
574 /**
575   \brief   Load-Acquire (8 bit)
576   \details Executes a LDAB instruction for 8 bit value.
577   \param [in]    ptr  Pointer to data
578   \return             value of type uint8_t at (*ptr)
579  */
__LDAB(volatile uint8_t * ptr)580 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
581 {
582   uint32_t result;
583 
584   __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
585   return ((uint8_t) result);
586 }
587 
588 
589 /**
590   \brief   Load-Acquire (16 bit)
591   \details Executes a LDAH instruction for 16 bit values.
592   \param [in]    ptr  Pointer to data
593   \return        value of type uint16_t at (*ptr)
594  */
__LDAH(volatile uint16_t * ptr)595 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
596 {
597   uint32_t result;
598 
599   __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
600   return ((uint16_t) result);
601 }
602 
603 
604 /**
605   \brief   Load-Acquire (32 bit)
606   \details Executes a LDA instruction for 32 bit values.
607   \param [in]    ptr  Pointer to data
608   \return        value of type uint32_t at (*ptr)
609  */
__LDA(volatile uint32_t * ptr)610 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
611 {
612   uint32_t result;
613 
614   __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
615   return(result);
616 }
617 
618 
619 /**
620   \brief   Store-Release (8 bit)
621   \details Executes a STLB instruction for 8 bit values.
622   \param [in]  value  Value to store
623   \param [in]    ptr  Pointer to location
624  */
__STLB(uint8_t value,volatile uint8_t * ptr)625 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
626 {
627   __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
628 }
629 
630 
631 /**
632   \brief   Store-Release (16 bit)
633   \details Executes a STLH instruction for 16 bit values.
634   \param [in]  value  Value to store
635   \param [in]    ptr  Pointer to location
636  */
__STLH(uint16_t value,volatile uint16_t * ptr)637 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
638 {
639   __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
640 }
641 
642 
643 /**
644   \brief   Store-Release (32 bit)
645   \details Executes a STL instruction for 32 bit values.
646   \param [in]  value  Value to store
647   \param [in]    ptr  Pointer to location
648  */
__STL(uint32_t value,volatile uint32_t * ptr)649 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
650 {
651   __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
652 }
653 
654 
655 /**
656   \brief   Load-Acquire Exclusive (8 bit)
657   \details Executes a LDAB exclusive instruction for 8 bit value.
658   \param [in]    ptr  Pointer to data
659   \return             value of type uint8_t at (*ptr)
660  */
661 #define     __LDAEXB                 (uint8_t)__builtin_arm_ldaex
662 
663 
664 /**
665   \brief   Load-Acquire Exclusive (16 bit)
666   \details Executes a LDAH exclusive instruction for 16 bit values.
667   \param [in]    ptr  Pointer to data
668   \return        value of type uint16_t at (*ptr)
669  */
670 #define     __LDAEXH                 (uint16_t)__builtin_arm_ldaex
671 
672 
673 /**
674   \brief   Load-Acquire Exclusive (32 bit)
675   \details Executes a LDA exclusive instruction for 32 bit values.
676   \param [in]    ptr  Pointer to data
677   \return        value of type uint32_t at (*ptr)
678  */
679 #define     __LDAEX                  (uint32_t)__builtin_arm_ldaex
680 
681 
682 /**
683   \brief   Store-Release Exclusive (8 bit)
684   \details Executes a STLB exclusive instruction for 8 bit values.
685   \param [in]  value  Value to store
686   \param [in]    ptr  Pointer to location
687   \return          0  Function succeeded
688   \return          1  Function failed
689  */
690 #define     __STLEXB                 (uint32_t)__builtin_arm_stlex
691 
692 
693 /**
694   \brief   Store-Release Exclusive (16 bit)
695   \details Executes a STLH exclusive instruction for 16 bit values.
696   \param [in]  value  Value to store
697   \param [in]    ptr  Pointer to location
698   \return          0  Function succeeded
699   \return          1  Function failed
700  */
701 #define     __STLEXH                 (uint32_t)__builtin_arm_stlex
702 
703 
704 /**
705   \brief   Store-Release Exclusive (32 bit)
706   \details Executes a STL exclusive instruction for 32 bit values.
707   \param [in]  value  Value to store
708   \param [in]    ptr  Pointer to location
709   \return          0  Function succeeded
710   \return          1  Function failed
711  */
712 #define     __STLEX                  (uint32_t)__builtin_arm_stlex
713 
714 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
715            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
716 
717 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
718 
719 
720 /* ###########################  Core Function Access  ########################### */
721 /** \ingroup  CMSIS_Core_FunctionInterface
722     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
723   @{
724  */
725 
726 /**
727   \brief   Enable IRQ Interrupts
728   \details Enables IRQ interrupts by clearing special-purpose register PRIMASK.
729            Can only be executed in Privileged modes.
730  */
731 #ifndef __ARM_COMPAT_H
__enable_irq(void)732 __STATIC_FORCEINLINE void __enable_irq(void)
733 {
734   __ASM volatile ("cpsie i" : : : "memory");
735 }
736 #endif
737 
738 
739 /**
740   \brief   Disable IRQ Interrupts
741   \details Disables IRQ interrupts by setting special-purpose register PRIMASK.
742            Can only be executed in Privileged modes.
743  */
744 #ifndef __ARM_COMPAT_H
__disable_irq(void)745 __STATIC_FORCEINLINE void __disable_irq(void)
746 {
747   __ASM volatile ("cpsid i" : : : "memory");
748 }
749 #endif
750 
751 
752 /**
753   \brief   Get Control Register
754   \details Returns the content of the Control Register.
755   \return               Control Register value
756  */
__get_CONTROL(void)757 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
758 {
759   uint32_t result;
760 
761   __ASM volatile ("MRS %0, control" : "=r" (result) );
762   return(result);
763 }
764 
765 
766 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
767 /**
768   \brief   Get Control Register (non-secure)
769   \details Returns the content of the non-secure Control Register when in secure mode.
770   \return               non-secure Control Register value
771  */
__TZ_get_CONTROL_NS(void)772 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
773 {
774   uint32_t result;
775 
776   __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
777   return(result);
778 }
779 #endif
780 
781 
782 /**
783   \brief   Set Control Register
784   \details Writes the given value to the Control Register.
785   \param [in]    control  Control Register value to set
786  */
__set_CONTROL(uint32_t control)787 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
788 {
789   __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
790   __ISB();
791 }
792 
793 
794 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
795 /**
796   \brief   Set Control Register (non-secure)
797   \details Writes the given value to the non-secure Control Register when in secure state.
798   \param [in]    control  Control Register value to set
799  */
__TZ_set_CONTROL_NS(uint32_t control)800 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
801 {
802   __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
803   __ISB();
804 }
805 #endif
806 
807 
808 /**
809   \brief   Get IPSR Register
810   \details Returns the content of the IPSR Register.
811   \return               IPSR Register value
812  */
__get_IPSR(void)813 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
814 {
815   uint32_t result;
816 
817   __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
818   return(result);
819 }
820 
821 
822 /**
823   \brief   Get APSR Register
824   \details Returns the content of the APSR Register.
825   \return               APSR Register value
826  */
__get_APSR(void)827 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
828 {
829   uint32_t result;
830 
831   __ASM volatile ("MRS %0, apsr" : "=r" (result) );
832   return(result);
833 }
834 
835 
836 /**
837   \brief   Get xPSR Register
838   \details Returns the content of the xPSR Register.
839   \return               xPSR Register value
840  */
__get_xPSR(void)841 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
842 {
843   uint32_t result;
844 
845   __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
846   return(result);
847 }
848 
849 
850 /**
851   \brief   Get Process Stack Pointer
852   \details Returns the current value of the Process Stack Pointer (PSP).
853   \return               PSP Register value
854  */
__get_PSP(void)855 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
856 {
857   uint32_t result;
858 
859   __ASM volatile ("MRS %0, psp"  : "=r" (result) );
860   return(result);
861 }
862 
863 
864 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
865 /**
866   \brief   Get Process Stack Pointer (non-secure)
867   \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
868   \return               PSP Register value
869  */
__TZ_get_PSP_NS(void)870 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
871 {
872   uint32_t result;
873 
874   __ASM volatile ("MRS %0, psp_ns"  : "=r" (result) );
875   return(result);
876 }
877 #endif
878 
879 
880 /**
881   \brief   Set Process Stack Pointer
882   \details Assigns the given value to the Process Stack Pointer (PSP).
883   \param [in]    topOfProcStack  Process Stack Pointer value to set
884  */
__set_PSP(uint32_t topOfProcStack)885 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
886 {
887   __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
888 }
889 
890 
891 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
892 /**
893   \brief   Set Process Stack Pointer (non-secure)
894   \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
895   \param [in]    topOfProcStack  Process Stack Pointer value to set
896  */
__TZ_set_PSP_NS(uint32_t topOfProcStack)897 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
898 {
899   __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
900 }
901 #endif
902 
903 
904 /**
905   \brief   Get Main Stack Pointer
906   \details Returns the current value of the Main Stack Pointer (MSP).
907   \return               MSP Register value
908  */
__get_MSP(void)909 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
910 {
911   uint32_t result;
912 
913   __ASM volatile ("MRS %0, msp" : "=r" (result) );
914   return(result);
915 }
916 
917 
918 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
919 /**
920   \brief   Get Main Stack Pointer (non-secure)
921   \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
922   \return               MSP Register value
923  */
__TZ_get_MSP_NS(void)924 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
925 {
926   uint32_t result;
927 
928   __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
929   return(result);
930 }
931 #endif
932 
933 
934 /**
935   \brief   Set Main Stack Pointer
936   \details Assigns the given value to the Main Stack Pointer (MSP).
937   \param [in]    topOfMainStack  Main Stack Pointer value to set
938  */
__set_MSP(uint32_t topOfMainStack)939 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
940 {
941   __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
942 }
943 
944 
945 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
946 /**
947   \brief   Set Main Stack Pointer (non-secure)
948   \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
949   \param [in]    topOfMainStack  Main Stack Pointer value to set
950  */
__TZ_set_MSP_NS(uint32_t topOfMainStack)951 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
952 {
953   __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
954 }
955 #endif
956 
957 
958 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
959 /**
960   \brief   Get Stack Pointer (non-secure)
961   \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
962   \return               SP Register value
963  */
__TZ_get_SP_NS(void)964 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
965 {
966   uint32_t result;
967 
968   __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
969   return(result);
970 }
971 
972 
973 /**
974   \brief   Set Stack Pointer (non-secure)
975   \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
976   \param [in]    topOfStack  Stack Pointer value to set
977  */
__TZ_set_SP_NS(uint32_t topOfStack)978 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
979 {
980   __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
981 }
982 #endif
983 
984 
985 /**
986   \brief   Get Priority Mask
987   \details Returns the current state of the priority mask bit from the Priority Mask Register.
988   \return               Priority Mask value
989  */
__get_PRIMASK(void)990 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
991 {
992   uint32_t result;
993 
994   __ASM volatile ("MRS %0, primask" : "=r" (result) );
995   return(result);
996 }
997 
998 
999 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1000 /**
1001   \brief   Get Priority Mask (non-secure)
1002   \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
1003   \return               Priority Mask value
1004  */
__TZ_get_PRIMASK_NS(void)1005 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
1006 {
1007   uint32_t result;
1008 
1009   __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
1010   return(result);
1011 }
1012 #endif
1013 
1014 
1015 /**
1016   \brief   Set Priority Mask
1017   \details Assigns the given value to the Priority Mask Register.
1018   \param [in]    priMask  Priority Mask
1019  */
__set_PRIMASK(uint32_t priMask)1020 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
1021 {
1022   __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
1023 }
1024 
1025 
1026 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1027 /**
1028   \brief   Set Priority Mask (non-secure)
1029   \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
1030   \param [in]    priMask  Priority Mask
1031  */
__TZ_set_PRIMASK_NS(uint32_t priMask)1032 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
1033 {
1034   __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
1035 }
1036 #endif
1037 
1038 
1039 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1040      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1041      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
1042 /**
1043   \brief   Enable FIQ
1044   \details Enables FIQ interrupts by clearing special-purpose register FAULTMASK.
1045            Can only be executed in Privileged modes.
1046  */
__enable_fault_irq(void)1047 __STATIC_FORCEINLINE void __enable_fault_irq(void)
1048 {
1049   __ASM volatile ("cpsie f" : : : "memory");
1050 }
1051 
1052 
1053 /**
1054   \brief   Disable FIQ
1055   \details Disables FIQ interrupts by setting special-purpose register FAULTMASK.
1056            Can only be executed in Privileged modes.
1057  */
__disable_fault_irq(void)1058 __STATIC_FORCEINLINE void __disable_fault_irq(void)
1059 {
1060   __ASM volatile ("cpsid f" : : : "memory");
1061 }
1062 
1063 
1064 /**
1065   \brief   Get Base Priority
1066   \details Returns the current value of the Base Priority register.
1067   \return               Base Priority register value
1068  */
__get_BASEPRI(void)1069 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
1070 {
1071   uint32_t result;
1072 
1073   __ASM volatile ("MRS %0, basepri" : "=r" (result) );
1074   return(result);
1075 }
1076 
1077 
1078 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1079 /**
1080   \brief   Get Base Priority (non-secure)
1081   \details Returns the current value of the non-secure Base Priority register when in secure state.
1082   \return               Base Priority register value
1083  */
__TZ_get_BASEPRI_NS(void)1084 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
1085 {
1086   uint32_t result;
1087 
1088   __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
1089   return(result);
1090 }
1091 #endif
1092 
1093 
1094 /**
1095   \brief   Set Base Priority
1096   \details Assigns the given value to the Base Priority register.
1097   \param [in]    basePri  Base Priority value to set
1098  */
__set_BASEPRI(uint32_t basePri)1099 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
1100 {
1101   __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
1102 }
1103 
1104 
1105 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1106 /**
1107   \brief   Set Base Priority (non-secure)
1108   \details Assigns the given value to the non-secure Base Priority register when in secure state.
1109   \param [in]    basePri  Base Priority value to set
1110  */
__TZ_set_BASEPRI_NS(uint32_t basePri)1111 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
1112 {
1113   __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
1114 }
1115 #endif
1116 
1117 
1118 /**
1119   \brief   Set Base Priority with condition
1120   \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
1121            or the new value increases the BASEPRI priority level.
1122   \param [in]    basePri  Base Priority value to set
1123  */
__set_BASEPRI_MAX(uint32_t basePri)1124 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
1125 {
1126   __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
1127 }
1128 
1129 
1130 /**
1131   \brief   Get Fault Mask
1132   \details Returns the current value of the Fault Mask register.
1133   \return               Fault Mask register value
1134  */
__get_FAULTMASK(void)1135 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
1136 {
1137   uint32_t result;
1138 
1139   __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
1140   return(result);
1141 }
1142 
1143 
1144 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1145 /**
1146   \brief   Get Fault Mask (non-secure)
1147   \details Returns the current value of the non-secure Fault Mask register when in secure state.
1148   \return               Fault Mask register value
1149  */
__TZ_get_FAULTMASK_NS(void)1150 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
1151 {
1152   uint32_t result;
1153 
1154   __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
1155   return(result);
1156 }
1157 #endif
1158 
1159 
1160 /**
1161   \brief   Set Fault Mask
1162   \details Assigns the given value to the Fault Mask register.
1163   \param [in]    faultMask  Fault Mask value to set
1164  */
__set_FAULTMASK(uint32_t faultMask)1165 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
1166 {
1167   __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
1168 }
1169 
1170 
1171 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1172 /**
1173   \brief   Set Fault Mask (non-secure)
1174   \details Assigns the given value to the non-secure Fault Mask register when in secure state.
1175   \param [in]    faultMask  Fault Mask value to set
1176  */
__TZ_set_FAULTMASK_NS(uint32_t faultMask)1177 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
1178 {
1179   __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
1180 }
1181 #endif
1182 
1183 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1184            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1185            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
1186 
1187 
1188 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1189      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
1190 
1191 /**
1192   \brief   Get Process Stack Pointer Limit
1193   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1194   Stack Pointer Limit register hence zero is returned always in non-secure
1195   mode.
1196 
1197   \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
1198   \return               PSPLIM Register value
1199  */
__get_PSPLIM(void)1200 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
1201 {
1202 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1203     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1204     // without main extensions, the non-secure PSPLIM is RAZ/WI
1205   return 0U;
1206 #else
1207   uint32_t result;
1208   __ASM volatile ("MRS %0, psplim"  : "=r" (result) );
1209   return result;
1210 #endif
1211 }
1212 
1213 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
1214 /**
1215   \brief   Get Process Stack Pointer Limit (non-secure)
1216   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1217   Stack Pointer Limit register hence zero is returned always in non-secure
1218   mode.
1219 
1220   \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
1221   \return               PSPLIM Register value
1222  */
__TZ_get_PSPLIM_NS(void)1223 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
1224 {
1225 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1226   // without main extensions, the non-secure PSPLIM is RAZ/WI
1227   return 0U;
1228 #else
1229   uint32_t result;
1230   __ASM volatile ("MRS %0, psplim_ns"  : "=r" (result) );
1231   return result;
1232 #endif
1233 }
1234 #endif
1235 
1236 
1237 /**
1238   \brief   Set Process Stack Pointer Limit
1239   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1240   Stack Pointer Limit register hence the write is silently ignored in non-secure
1241   mode.
1242 
1243   \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
1244   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
1245  */
__set_PSPLIM(uint32_t ProcStackPtrLimit)1246 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
1247 {
1248 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1249     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1250   // without main extensions, the non-secure PSPLIM is RAZ/WI
1251   (void)ProcStackPtrLimit;
1252 #else
1253   __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
1254 #endif
1255 }
1256 
1257 
1258 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
1259 /**
1260   \brief   Set Process Stack Pointer (non-secure)
1261   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1262   Stack Pointer Limit register hence the write is silently ignored in non-secure
1263   mode.
1264 
1265   \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
1266   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
1267  */
__TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)1268 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
1269 {
1270 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1271   // without main extensions, the non-secure PSPLIM is RAZ/WI
1272   (void)ProcStackPtrLimit;
1273 #else
1274   __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
1275 #endif
1276 }
1277 #endif
1278 
1279 
1280 /**
1281   \brief   Get Main Stack Pointer Limit
1282   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1283   Stack Pointer Limit register hence zero is returned always.
1284 
1285   \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
1286   \return               MSPLIM Register value
1287  */
__get_MSPLIM(void)1288 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
1289 {
1290 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1291     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1292   // without main extensions, the non-secure MSPLIM is RAZ/WI
1293   return 0U;
1294 #else
1295   uint32_t result;
1296   __ASM volatile ("MRS %0, msplim" : "=r" (result) );
1297   return result;
1298 #endif
1299 }
1300 
1301 
1302 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
1303 /**
1304   \brief   Get Main Stack Pointer Limit (non-secure)
1305   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1306   Stack Pointer Limit register hence zero is returned always.
1307 
1308   \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
1309   \return               MSPLIM Register value
1310  */
__TZ_get_MSPLIM_NS(void)1311 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
1312 {
1313 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1314   // without main extensions, the non-secure MSPLIM is RAZ/WI
1315   return 0U;
1316 #else
1317   uint32_t result;
1318   __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
1319   return result;
1320 #endif
1321 }
1322 #endif
1323 
1324 
1325 /**
1326   \brief   Set Main Stack Pointer Limit
1327   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1328   Stack Pointer Limit register hence the write is silently ignored.
1329 
1330   \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
1331   \param [in]    MainStackPtrLimit  Main Stack Pointer Limit value to set
1332  */
__set_MSPLIM(uint32_t MainStackPtrLimit)1333 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
1334 {
1335 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1336     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1337   // without main extensions, the non-secure MSPLIM is RAZ/WI
1338   (void)MainStackPtrLimit;
1339 #else
1340   __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
1341 #endif
1342 }
1343 
1344 
1345 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
1346 /**
1347   \brief   Set Main Stack Pointer Limit (non-secure)
1348   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1349   Stack Pointer Limit register hence the write is silently ignored.
1350 
1351   \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
1352   \param [in]    MainStackPtrLimit  Main Stack Pointer value to set
1353  */
__TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)1354 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
1355 {
1356 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1357   // without main extensions, the non-secure MSPLIM is RAZ/WI
1358   (void)MainStackPtrLimit;
1359 #else
1360   __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
1361 #endif
1362 }
1363 #endif
1364 
1365 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1366            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
1367 
1368 /**
1369   \brief   Get FPSCR
1370   \details Returns the current value of the Floating Point Status/Control register.
1371   \return               Floating Point Status/Control register value
1372  */
1373 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1374      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
1375 #define __get_FPSCR      (uint32_t)__builtin_arm_get_fpscr
1376 #else
1377 #define __get_FPSCR()      ((uint32_t)0U)
1378 #endif
1379 
1380 /**
1381   \brief   Set FPSCR
1382   \details Assigns the given value to the Floating Point Status/Control register.
1383   \param [in]    fpscr  Floating Point Status/Control value to set
1384  */
1385 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1386      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
1387 #define __set_FPSCR      __builtin_arm_set_fpscr
1388 #else
1389 #define __set_FPSCR(x)      ((void)(x))
1390 #endif
1391 
1392 
1393 /*@} end of CMSIS_Core_RegAccFunctions */
1394 
1395 
1396 /* ###################  Compiler specific Intrinsics  ########################### */
1397 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1398   Access to dedicated SIMD instructions
1399   @{
1400 */
1401 
1402 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1403 
__SADD8(uint32_t op1,uint32_t op2)1404 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1405 {
1406   uint32_t result;
1407 
1408   __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1409   return(result);
1410 }
1411 
__QADD8(uint32_t op1,uint32_t op2)1412 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1413 {
1414   uint32_t result;
1415 
1416   __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1417   return(result);
1418 }
1419 
__SHADD8(uint32_t op1,uint32_t op2)1420 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1421 {
1422   uint32_t result;
1423 
1424   __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1425   return(result);
1426 }
1427 
__UADD8(uint32_t op1,uint32_t op2)1428 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1429 {
1430   uint32_t result;
1431 
1432   __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1433   return(result);
1434 }
1435 
__UQADD8(uint32_t op1,uint32_t op2)1436 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1437 {
1438   uint32_t result;
1439 
1440   __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1441   return(result);
1442 }
1443 
__UHADD8(uint32_t op1,uint32_t op2)1444 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1445 {
1446   uint32_t result;
1447 
1448   __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1449   return(result);
1450 }
1451 
1452 
__SSUB8(uint32_t op1,uint32_t op2)1453 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1454 {
1455   uint32_t result;
1456 
1457   __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1458   return(result);
1459 }
1460 
__QSUB8(uint32_t op1,uint32_t op2)1461 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1462 {
1463   uint32_t result;
1464 
1465   __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1466   return(result);
1467 }
1468 
__SHSUB8(uint32_t op1,uint32_t op2)1469 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1470 {
1471   uint32_t result;
1472 
1473   __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1474   return(result);
1475 }
1476 
__USUB8(uint32_t op1,uint32_t op2)1477 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1478 {
1479   uint32_t result;
1480 
1481   __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1482   return(result);
1483 }
1484 
__UQSUB8(uint32_t op1,uint32_t op2)1485 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1486 {
1487   uint32_t result;
1488 
1489   __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1490   return(result);
1491 }
1492 
__UHSUB8(uint32_t op1,uint32_t op2)1493 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1494 {
1495   uint32_t result;
1496 
1497   __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1498   return(result);
1499 }
1500 
1501 
__SADD16(uint32_t op1,uint32_t op2)1502 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1503 {
1504   uint32_t result;
1505 
1506   __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1507   return(result);
1508 }
1509 
__QADD16(uint32_t op1,uint32_t op2)1510 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1511 {
1512   uint32_t result;
1513 
1514   __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1515   return(result);
1516 }
1517 
__SHADD16(uint32_t op1,uint32_t op2)1518 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1519 {
1520   uint32_t result;
1521 
1522   __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1523   return(result);
1524 }
1525 
__UADD16(uint32_t op1,uint32_t op2)1526 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1527 {
1528   uint32_t result;
1529 
1530   __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1531   return(result);
1532 }
1533 
__UQADD16(uint32_t op1,uint32_t op2)1534 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1535 {
1536   uint32_t result;
1537 
1538   __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1539   return(result);
1540 }
1541 
__UHADD16(uint32_t op1,uint32_t op2)1542 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1543 {
1544   uint32_t result;
1545 
1546   __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1547   return(result);
1548 }
1549 
__SSUB16(uint32_t op1,uint32_t op2)1550 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1551 {
1552   uint32_t result;
1553 
1554   __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1555   return(result);
1556 }
1557 
__QSUB16(uint32_t op1,uint32_t op2)1558 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1559 {
1560   uint32_t result;
1561 
1562   __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1563   return(result);
1564 }
1565 
__SHSUB16(uint32_t op1,uint32_t op2)1566 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1567 {
1568   uint32_t result;
1569 
1570   __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1571   return(result);
1572 }
1573 
__USUB16(uint32_t op1,uint32_t op2)1574 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1575 {
1576   uint32_t result;
1577 
1578   __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1579   return(result);
1580 }
1581 
__UQSUB16(uint32_t op1,uint32_t op2)1582 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1583 {
1584   uint32_t result;
1585 
1586   __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1587   return(result);
1588 }
1589 
__UHSUB16(uint32_t op1,uint32_t op2)1590 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1591 {
1592   uint32_t result;
1593 
1594   __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1595   return(result);
1596 }
1597 
__SASX(uint32_t op1,uint32_t op2)1598 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1599 {
1600   uint32_t result;
1601 
1602   __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1603   return(result);
1604 }
1605 
__QASX(uint32_t op1,uint32_t op2)1606 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1607 {
1608   uint32_t result;
1609 
1610   __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1611   return(result);
1612 }
1613 
__SHASX(uint32_t op1,uint32_t op2)1614 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1615 {
1616   uint32_t result;
1617 
1618   __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1619   return(result);
1620 }
1621 
__UASX(uint32_t op1,uint32_t op2)1622 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1623 {
1624   uint32_t result;
1625 
1626   __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1627   return(result);
1628 }
1629 
__UQASX(uint32_t op1,uint32_t op2)1630 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1631 {
1632   uint32_t result;
1633 
1634   __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1635   return(result);
1636 }
1637 
__UHASX(uint32_t op1,uint32_t op2)1638 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1639 {
1640   uint32_t result;
1641 
1642   __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1643   return(result);
1644 }
1645 
__SSAX(uint32_t op1,uint32_t op2)1646 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1647 {
1648   uint32_t result;
1649 
1650   __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1651   return(result);
1652 }
1653 
__QSAX(uint32_t op1,uint32_t op2)1654 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1655 {
1656   uint32_t result;
1657 
1658   __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1659   return(result);
1660 }
1661 
__SHSAX(uint32_t op1,uint32_t op2)1662 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1663 {
1664   uint32_t result;
1665 
1666   __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1667   return(result);
1668 }
1669 
__USAX(uint32_t op1,uint32_t op2)1670 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1671 {
1672   uint32_t result;
1673 
1674   __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1675   return(result);
1676 }
1677 
__UQSAX(uint32_t op1,uint32_t op2)1678 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1679 {
1680   uint32_t result;
1681 
1682   __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1683   return(result);
1684 }
1685 
__UHSAX(uint32_t op1,uint32_t op2)1686 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1687 {
1688   uint32_t result;
1689 
1690   __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1691   return(result);
1692 }
1693 
__USAD8(uint32_t op1,uint32_t op2)1694 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1695 {
1696   uint32_t result;
1697 
1698   __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1699   return(result);
1700 }
1701 
__USADA8(uint32_t op1,uint32_t op2,uint32_t op3)1702 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1703 {
1704   uint32_t result;
1705 
1706   __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1707   return(result);
1708 }
1709 
1710 #define __SSAT16(ARG1,ARG2) \
1711 ({                          \
1712   int32_t __RES, __ARG1 = (ARG1); \
1713   __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1714   __RES; \
1715  })
1716 
1717 #define __USAT16(ARG1,ARG2) \
1718 ({                          \
1719   uint32_t __RES, __ARG1 = (ARG1); \
1720   __ASM ("usat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1721   __RES; \
1722  })
1723 
__UXTB16(uint32_t op1)1724 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1725 {
1726   uint32_t result;
1727 
1728   __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1729   return(result);
1730 }
1731 
__UXTAB16(uint32_t op1,uint32_t op2)1732 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1733 {
1734   uint32_t result;
1735 
1736   __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1737   return(result);
1738 }
1739 
__SXTB16(uint32_t op1)1740 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1741 {
1742   uint32_t result;
1743 
1744   __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1745   return(result);
1746 }
1747 
__SXTAB16(uint32_t op1,uint32_t op2)1748 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1749 {
1750   uint32_t result;
1751 
1752   __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1753   return(result);
1754 }
1755 
__SMUAD(uint32_t op1,uint32_t op2)1756 __STATIC_FORCEINLINE uint32_t __SMUAD  (uint32_t op1, uint32_t op2)
1757 {
1758   uint32_t result;
1759 
1760   __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1761   return(result);
1762 }
1763 
__SMUADX(uint32_t op1,uint32_t op2)1764 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1765 {
1766   uint32_t result;
1767 
1768   __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1769   return(result);
1770 }
1771 
__SMLAD(uint32_t op1,uint32_t op2,uint32_t op3)1772 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1773 {
1774   uint32_t result;
1775 
1776   __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1777   return(result);
1778 }
1779 
__SMLADX(uint32_t op1,uint32_t op2,uint32_t op3)1780 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1781 {
1782   uint32_t result;
1783 
1784   __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1785   return(result);
1786 }
1787 
__SMLALD(uint32_t op1,uint32_t op2,uint64_t acc)1788 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1789 {
1790   union llreg_u{
1791     uint32_t w32[2];
1792     uint64_t w64;
1793   } llr;
1794   llr.w64 = acc;
1795 
1796 #ifndef __ARMEB__   /* Little endian */
1797   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1798 #else               /* Big endian */
1799   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1800 #endif
1801 
1802   return(llr.w64);
1803 }
1804 
__SMLALDX(uint32_t op1,uint32_t op2,uint64_t acc)1805 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1806 {
1807   union llreg_u{
1808     uint32_t w32[2];
1809     uint64_t w64;
1810   } llr;
1811   llr.w64 = acc;
1812 
1813 #ifndef __ARMEB__   /* Little endian */
1814   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1815 #else               /* Big endian */
1816   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1817 #endif
1818 
1819   return(llr.w64);
1820 }
1821 
__SMUSD(uint32_t op1,uint32_t op2)1822 __STATIC_FORCEINLINE uint32_t __SMUSD  (uint32_t op1, uint32_t op2)
1823 {
1824   uint32_t result;
1825 
1826   __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1827   return(result);
1828 }
1829 
__SMUSDX(uint32_t op1,uint32_t op2)1830 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1831 {
1832   uint32_t result;
1833 
1834   __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1835   return(result);
1836 }
1837 
__SMLSD(uint32_t op1,uint32_t op2,uint32_t op3)1838 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1839 {
1840   uint32_t result;
1841 
1842   __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1843   return(result);
1844 }
1845 
__SMLSDX(uint32_t op1,uint32_t op2,uint32_t op3)1846 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1847 {
1848   uint32_t result;
1849 
1850   __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1851   return(result);
1852 }
1853 
__SMLSLD(uint32_t op1,uint32_t op2,uint64_t acc)1854 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1855 {
1856   union llreg_u{
1857     uint32_t w32[2];
1858     uint64_t w64;
1859   } llr;
1860   llr.w64 = acc;
1861 
1862 #ifndef __ARMEB__   /* Little endian */
1863   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1864 #else               /* Big endian */
1865   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1866 #endif
1867 
1868   return(llr.w64);
1869 }
1870 
__SMLSLDX(uint32_t op1,uint32_t op2,uint64_t acc)1871 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1872 {
1873   union llreg_u{
1874     uint32_t w32[2];
1875     uint64_t w64;
1876   } llr;
1877   llr.w64 = acc;
1878 
1879 #ifndef __ARMEB__   /* Little endian */
1880   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1881 #else               /* Big endian */
1882   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1883 #endif
1884 
1885   return(llr.w64);
1886 }
1887 
__SEL(uint32_t op1,uint32_t op2)1888 __STATIC_FORCEINLINE uint32_t __SEL  (uint32_t op1, uint32_t op2)
1889 {
1890   uint32_t result;
1891 
1892   __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1893   return(result);
1894 }
1895 
__QADD(int32_t op1,int32_t op2)1896 __STATIC_FORCEINLINE  int32_t __QADD( int32_t op1,  int32_t op2)
1897 {
1898   int32_t result;
1899 
1900   __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1901   return(result);
1902 }
1903 
__QSUB(int32_t op1,int32_t op2)1904 __STATIC_FORCEINLINE  int32_t __QSUB( int32_t op1,  int32_t op2)
1905 {
1906   int32_t result;
1907 
1908   __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1909   return(result);
1910 }
1911 
1912 #define __PKHBT(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0x0000FFFFUL) |  \
1913                                            ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)  )
1914 
1915 #define __PKHTB(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0xFFFF0000UL) |  \
1916                                            ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)  )
1917 
1918 #define __SXTB16_RORn(ARG1, ARG2)        __SXTB16(__ROR(ARG1, ARG2))
1919 
1920 #define __SXTAB16_RORn(ARG1, ARG2, ARG3) __SXTAB16(ARG1, __ROR(ARG2, ARG3))
1921 
__SMMLA(int32_t op1,int32_t op2,int32_t op3)1922 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1923 {
1924   int32_t result;
1925 
1926   __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
1927   return(result);
1928 }
1929 
1930 #endif /* (__ARM_FEATURE_DSP == 1) */
1931 /*@} end of group CMSIS_SIMD_intrinsics */
1932 
1933 
1934 #endif /* __CMSIS_ARMCLANG_H */
1935