xref: /btstack/port/stm32-l476rg-nucleo-sx1280/Drivers/CMSIS/Include/cmsis_armclang.h (revision 6b8177c56d8d42c688f52897394f8b5eac7ee972)
1*6b8177c5SMatthias Ringwald /**************************************************************************//**
2*6b8177c5SMatthias Ringwald  * @file     cmsis_armclang.h
3*6b8177c5SMatthias Ringwald  * @brief    CMSIS compiler armclang (Arm Compiler 6) header file
4*6b8177c5SMatthias Ringwald  * @version  V5.2.0
5*6b8177c5SMatthias Ringwald  * @date     08. May 2019
6*6b8177c5SMatthias Ringwald  ******************************************************************************/
7*6b8177c5SMatthias Ringwald /*
8*6b8177c5SMatthias Ringwald  * Copyright (c) 2009-2019 Arm Limited. All rights reserved.
9*6b8177c5SMatthias Ringwald  *
10*6b8177c5SMatthias Ringwald  * SPDX-License-Identifier: Apache-2.0
11*6b8177c5SMatthias Ringwald  *
12*6b8177c5SMatthias Ringwald  * Licensed under the Apache License, Version 2.0 (the License); you may
13*6b8177c5SMatthias Ringwald  * not use this file except in compliance with the License.
14*6b8177c5SMatthias Ringwald  * You may obtain a copy of the License at
15*6b8177c5SMatthias Ringwald  *
16*6b8177c5SMatthias Ringwald  * www.apache.org/licenses/LICENSE-2.0
17*6b8177c5SMatthias Ringwald  *
18*6b8177c5SMatthias Ringwald  * Unless required by applicable law or agreed to in writing, software
19*6b8177c5SMatthias Ringwald  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20*6b8177c5SMatthias Ringwald  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21*6b8177c5SMatthias Ringwald  * See the License for the specific language governing permissions and
22*6b8177c5SMatthias Ringwald  * limitations under the License.
23*6b8177c5SMatthias Ringwald  */
24*6b8177c5SMatthias Ringwald 
25*6b8177c5SMatthias Ringwald /*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
26*6b8177c5SMatthias Ringwald 
27*6b8177c5SMatthias Ringwald #ifndef __CMSIS_ARMCLANG_H
28*6b8177c5SMatthias Ringwald #define __CMSIS_ARMCLANG_H
29*6b8177c5SMatthias Ringwald 
30*6b8177c5SMatthias Ringwald #pragma clang system_header   /* treat file as system include file */
31*6b8177c5SMatthias Ringwald 
32*6b8177c5SMatthias Ringwald #ifndef __ARM_COMPAT_H
33*6b8177c5SMatthias Ringwald #include <arm_compat.h>    /* Compatibility header for Arm Compiler 5 intrinsics */
34*6b8177c5SMatthias Ringwald #endif
35*6b8177c5SMatthias Ringwald 
36*6b8177c5SMatthias Ringwald /* CMSIS compiler specific defines */
37*6b8177c5SMatthias Ringwald #ifndef   __ASM
38*6b8177c5SMatthias Ringwald   #define __ASM                                  __asm
39*6b8177c5SMatthias Ringwald #endif
40*6b8177c5SMatthias Ringwald #ifndef   __INLINE
41*6b8177c5SMatthias Ringwald   #define __INLINE                               __inline
42*6b8177c5SMatthias Ringwald #endif
43*6b8177c5SMatthias Ringwald #ifndef   __STATIC_INLINE
44*6b8177c5SMatthias Ringwald   #define __STATIC_INLINE                        static __inline
45*6b8177c5SMatthias Ringwald #endif
46*6b8177c5SMatthias Ringwald #ifndef   __STATIC_FORCEINLINE
47*6b8177c5SMatthias Ringwald   #define __STATIC_FORCEINLINE                   __attribute__((always_inline)) static __inline
48*6b8177c5SMatthias Ringwald #endif
49*6b8177c5SMatthias Ringwald #ifndef   __NO_RETURN
50*6b8177c5SMatthias Ringwald   #define __NO_RETURN                            __attribute__((__noreturn__))
51*6b8177c5SMatthias Ringwald #endif
52*6b8177c5SMatthias Ringwald #ifndef   __USED
53*6b8177c5SMatthias Ringwald   #define __USED                                 __attribute__((used))
54*6b8177c5SMatthias Ringwald #endif
55*6b8177c5SMatthias Ringwald #ifndef   __WEAK
56*6b8177c5SMatthias Ringwald   #define __WEAK                                 __attribute__((weak))
57*6b8177c5SMatthias Ringwald #endif
58*6b8177c5SMatthias Ringwald #ifndef   __PACKED
59*6b8177c5SMatthias Ringwald   #define __PACKED                               __attribute__((packed, aligned(1)))
60*6b8177c5SMatthias Ringwald #endif
61*6b8177c5SMatthias Ringwald #ifndef   __PACKED_STRUCT
62*6b8177c5SMatthias Ringwald   #define __PACKED_STRUCT                        struct __attribute__((packed, aligned(1)))
63*6b8177c5SMatthias Ringwald #endif
64*6b8177c5SMatthias Ringwald #ifndef   __PACKED_UNION
65*6b8177c5SMatthias Ringwald   #define __PACKED_UNION                         union __attribute__((packed, aligned(1)))
66*6b8177c5SMatthias Ringwald #endif
67*6b8177c5SMatthias Ringwald #ifndef   __UNALIGNED_UINT32        /* deprecated */
68*6b8177c5SMatthias Ringwald   #pragma clang diagnostic push
69*6b8177c5SMatthias Ringwald   #pragma clang diagnostic ignored "-Wpacked"
70*6b8177c5SMatthias Ringwald /*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
71*6b8177c5SMatthias Ringwald   struct __attribute__((packed)) T_UINT32 { uint32_t v; };
72*6b8177c5SMatthias Ringwald   #pragma clang diagnostic pop
73*6b8177c5SMatthias Ringwald   #define __UNALIGNED_UINT32(x)                  (((struct T_UINT32 *)(x))->v)
74*6b8177c5SMatthias Ringwald #endif
75*6b8177c5SMatthias Ringwald #ifndef   __UNALIGNED_UINT16_WRITE
76*6b8177c5SMatthias Ringwald   #pragma clang diagnostic push
77*6b8177c5SMatthias Ringwald   #pragma clang diagnostic ignored "-Wpacked"
78*6b8177c5SMatthias Ringwald /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
79*6b8177c5SMatthias Ringwald   __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
80*6b8177c5SMatthias Ringwald   #pragma clang diagnostic pop
81*6b8177c5SMatthias Ringwald   #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
82*6b8177c5SMatthias Ringwald #endif
83*6b8177c5SMatthias Ringwald #ifndef   __UNALIGNED_UINT16_READ
84*6b8177c5SMatthias Ringwald   #pragma clang diagnostic push
85*6b8177c5SMatthias Ringwald   #pragma clang diagnostic ignored "-Wpacked"
86*6b8177c5SMatthias Ringwald /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
87*6b8177c5SMatthias Ringwald   __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
88*6b8177c5SMatthias Ringwald   #pragma clang diagnostic pop
89*6b8177c5SMatthias Ringwald   #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
90*6b8177c5SMatthias Ringwald #endif
91*6b8177c5SMatthias Ringwald #ifndef   __UNALIGNED_UINT32_WRITE
92*6b8177c5SMatthias Ringwald   #pragma clang diagnostic push
93*6b8177c5SMatthias Ringwald   #pragma clang diagnostic ignored "-Wpacked"
94*6b8177c5SMatthias Ringwald /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
95*6b8177c5SMatthias Ringwald   __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
96*6b8177c5SMatthias Ringwald   #pragma clang diagnostic pop
97*6b8177c5SMatthias Ringwald   #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
98*6b8177c5SMatthias Ringwald #endif
99*6b8177c5SMatthias Ringwald #ifndef   __UNALIGNED_UINT32_READ
100*6b8177c5SMatthias Ringwald   #pragma clang diagnostic push
101*6b8177c5SMatthias Ringwald   #pragma clang diagnostic ignored "-Wpacked"
102*6b8177c5SMatthias Ringwald /*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
103*6b8177c5SMatthias Ringwald   __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
104*6b8177c5SMatthias Ringwald   #pragma clang diagnostic pop
105*6b8177c5SMatthias Ringwald   #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
106*6b8177c5SMatthias Ringwald #endif
107*6b8177c5SMatthias Ringwald #ifndef   __ALIGNED
108*6b8177c5SMatthias Ringwald   #define __ALIGNED(x)                           __attribute__((aligned(x)))
109*6b8177c5SMatthias Ringwald #endif
110*6b8177c5SMatthias Ringwald #ifndef   __RESTRICT
111*6b8177c5SMatthias Ringwald   #define __RESTRICT                             __restrict
112*6b8177c5SMatthias Ringwald #endif
113*6b8177c5SMatthias Ringwald #ifndef   __COMPILER_BARRIER
114*6b8177c5SMatthias Ringwald   #define __COMPILER_BARRIER()                   __ASM volatile("":::"memory")
115*6b8177c5SMatthias Ringwald #endif
116*6b8177c5SMatthias Ringwald 
117*6b8177c5SMatthias Ringwald /* #########################  Startup and Lowlevel Init  ######################## */
118*6b8177c5SMatthias Ringwald 
119*6b8177c5SMatthias Ringwald #ifndef __PROGRAM_START
120*6b8177c5SMatthias Ringwald #define __PROGRAM_START           __main
121*6b8177c5SMatthias Ringwald #endif
122*6b8177c5SMatthias Ringwald 
123*6b8177c5SMatthias Ringwald #ifndef __INITIAL_SP
124*6b8177c5SMatthias Ringwald #define __INITIAL_SP              Image$$ARM_LIB_STACK$$ZI$$Limit
125*6b8177c5SMatthias Ringwald #endif
126*6b8177c5SMatthias Ringwald 
127*6b8177c5SMatthias Ringwald #ifndef __STACK_LIMIT
128*6b8177c5SMatthias Ringwald #define __STACK_LIMIT             Image$$ARM_LIB_STACK$$ZI$$Base
129*6b8177c5SMatthias Ringwald #endif
130*6b8177c5SMatthias Ringwald 
131*6b8177c5SMatthias Ringwald #ifndef __VECTOR_TABLE
132*6b8177c5SMatthias Ringwald #define __VECTOR_TABLE            __Vectors
133*6b8177c5SMatthias Ringwald #endif
134*6b8177c5SMatthias Ringwald 
135*6b8177c5SMatthias Ringwald #ifndef __VECTOR_TABLE_ATTRIBUTE
136*6b8177c5SMatthias Ringwald #define __VECTOR_TABLE_ATTRIBUTE  __attribute((used, section("RESET")))
137*6b8177c5SMatthias Ringwald #endif
138*6b8177c5SMatthias Ringwald 
139*6b8177c5SMatthias Ringwald /* ###########################  Core Function Access  ########################### */
140*6b8177c5SMatthias Ringwald /** \ingroup  CMSIS_Core_FunctionInterface
141*6b8177c5SMatthias Ringwald     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
142*6b8177c5SMatthias Ringwald   @{
143*6b8177c5SMatthias Ringwald  */
144*6b8177c5SMatthias Ringwald 
145*6b8177c5SMatthias Ringwald /**
146*6b8177c5SMatthias Ringwald   \brief   Enable IRQ Interrupts
147*6b8177c5SMatthias Ringwald   \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
148*6b8177c5SMatthias Ringwald            Can only be executed in Privileged modes.
149*6b8177c5SMatthias Ringwald  */
150*6b8177c5SMatthias Ringwald /* intrinsic void __enable_irq();  see arm_compat.h */
151*6b8177c5SMatthias Ringwald 
152*6b8177c5SMatthias Ringwald 
153*6b8177c5SMatthias Ringwald /**
154*6b8177c5SMatthias Ringwald   \brief   Disable IRQ Interrupts
155*6b8177c5SMatthias Ringwald   \details Disables IRQ interrupts by setting the I-bit in the CPSR.
156*6b8177c5SMatthias Ringwald            Can only be executed in Privileged modes.
157*6b8177c5SMatthias Ringwald  */
158*6b8177c5SMatthias Ringwald /* intrinsic void __disable_irq();  see arm_compat.h */
159*6b8177c5SMatthias Ringwald 
160*6b8177c5SMatthias Ringwald 
161*6b8177c5SMatthias Ringwald /**
162*6b8177c5SMatthias Ringwald   \brief   Get Control Register
163*6b8177c5SMatthias Ringwald   \details Returns the content of the Control Register.
164*6b8177c5SMatthias Ringwald   \return               Control Register value
165*6b8177c5SMatthias Ringwald  */
__get_CONTROL(void)166*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
167*6b8177c5SMatthias Ringwald {
168*6b8177c5SMatthias Ringwald   uint32_t result;
169*6b8177c5SMatthias Ringwald 
170*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, control" : "=r" (result) );
171*6b8177c5SMatthias Ringwald   return(result);
172*6b8177c5SMatthias Ringwald }
173*6b8177c5SMatthias Ringwald 
174*6b8177c5SMatthias Ringwald 
175*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
176*6b8177c5SMatthias Ringwald /**
177*6b8177c5SMatthias Ringwald   \brief   Get Control Register (non-secure)
178*6b8177c5SMatthias Ringwald   \details Returns the content of the non-secure Control Register when in secure mode.
179*6b8177c5SMatthias Ringwald   \return               non-secure Control Register value
180*6b8177c5SMatthias Ringwald  */
__TZ_get_CONTROL_NS(void)181*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
182*6b8177c5SMatthias Ringwald {
183*6b8177c5SMatthias Ringwald   uint32_t result;
184*6b8177c5SMatthias Ringwald 
185*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
186*6b8177c5SMatthias Ringwald   return(result);
187*6b8177c5SMatthias Ringwald }
188*6b8177c5SMatthias Ringwald #endif
189*6b8177c5SMatthias Ringwald 
190*6b8177c5SMatthias Ringwald 
191*6b8177c5SMatthias Ringwald /**
192*6b8177c5SMatthias Ringwald   \brief   Set Control Register
193*6b8177c5SMatthias Ringwald   \details Writes the given value to the Control Register.
194*6b8177c5SMatthias Ringwald   \param [in]    control  Control Register value to set
195*6b8177c5SMatthias Ringwald  */
__set_CONTROL(uint32_t control)196*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
197*6b8177c5SMatthias Ringwald {
198*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
199*6b8177c5SMatthias Ringwald }
200*6b8177c5SMatthias Ringwald 
201*6b8177c5SMatthias Ringwald 
202*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
203*6b8177c5SMatthias Ringwald /**
204*6b8177c5SMatthias Ringwald   \brief   Set Control Register (non-secure)
205*6b8177c5SMatthias Ringwald   \details Writes the given value to the non-secure Control Register when in secure state.
206*6b8177c5SMatthias Ringwald   \param [in]    control  Control Register value to set
207*6b8177c5SMatthias Ringwald  */
__TZ_set_CONTROL_NS(uint32_t control)208*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
209*6b8177c5SMatthias Ringwald {
210*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
211*6b8177c5SMatthias Ringwald }
212*6b8177c5SMatthias Ringwald #endif
213*6b8177c5SMatthias Ringwald 
214*6b8177c5SMatthias Ringwald 
215*6b8177c5SMatthias Ringwald /**
216*6b8177c5SMatthias Ringwald   \brief   Get IPSR Register
217*6b8177c5SMatthias Ringwald   \details Returns the content of the IPSR Register.
218*6b8177c5SMatthias Ringwald   \return               IPSR Register value
219*6b8177c5SMatthias Ringwald  */
__get_IPSR(void)220*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
221*6b8177c5SMatthias Ringwald {
222*6b8177c5SMatthias Ringwald   uint32_t result;
223*6b8177c5SMatthias Ringwald 
224*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
225*6b8177c5SMatthias Ringwald   return(result);
226*6b8177c5SMatthias Ringwald }
227*6b8177c5SMatthias Ringwald 
228*6b8177c5SMatthias Ringwald 
229*6b8177c5SMatthias Ringwald /**
230*6b8177c5SMatthias Ringwald   \brief   Get APSR Register
231*6b8177c5SMatthias Ringwald   \details Returns the content of the APSR Register.
232*6b8177c5SMatthias Ringwald   \return               APSR Register value
233*6b8177c5SMatthias Ringwald  */
__get_APSR(void)234*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_APSR(void)
235*6b8177c5SMatthias Ringwald {
236*6b8177c5SMatthias Ringwald   uint32_t result;
237*6b8177c5SMatthias Ringwald 
238*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, apsr" : "=r" (result) );
239*6b8177c5SMatthias Ringwald   return(result);
240*6b8177c5SMatthias Ringwald }
241*6b8177c5SMatthias Ringwald 
242*6b8177c5SMatthias Ringwald 
243*6b8177c5SMatthias Ringwald /**
244*6b8177c5SMatthias Ringwald   \brief   Get xPSR Register
245*6b8177c5SMatthias Ringwald   \details Returns the content of the xPSR Register.
246*6b8177c5SMatthias Ringwald   \return               xPSR Register value
247*6b8177c5SMatthias Ringwald  */
__get_xPSR(void)248*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
249*6b8177c5SMatthias Ringwald {
250*6b8177c5SMatthias Ringwald   uint32_t result;
251*6b8177c5SMatthias Ringwald 
252*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
253*6b8177c5SMatthias Ringwald   return(result);
254*6b8177c5SMatthias Ringwald }
255*6b8177c5SMatthias Ringwald 
256*6b8177c5SMatthias Ringwald 
257*6b8177c5SMatthias Ringwald /**
258*6b8177c5SMatthias Ringwald   \brief   Get Process Stack Pointer
259*6b8177c5SMatthias Ringwald   \details Returns the current value of the Process Stack Pointer (PSP).
260*6b8177c5SMatthias Ringwald   \return               PSP Register value
261*6b8177c5SMatthias Ringwald  */
__get_PSP(void)262*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PSP(void)
263*6b8177c5SMatthias Ringwald {
264*6b8177c5SMatthias Ringwald   uint32_t result;
265*6b8177c5SMatthias Ringwald 
266*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, psp"  : "=r" (result) );
267*6b8177c5SMatthias Ringwald   return(result);
268*6b8177c5SMatthias Ringwald }
269*6b8177c5SMatthias Ringwald 
270*6b8177c5SMatthias Ringwald 
271*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
272*6b8177c5SMatthias Ringwald /**
273*6b8177c5SMatthias Ringwald   \brief   Get Process Stack Pointer (non-secure)
274*6b8177c5SMatthias Ringwald   \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
275*6b8177c5SMatthias Ringwald   \return               PSP Register value
276*6b8177c5SMatthias Ringwald  */
__TZ_get_PSP_NS(void)277*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
278*6b8177c5SMatthias Ringwald {
279*6b8177c5SMatthias Ringwald   uint32_t result;
280*6b8177c5SMatthias Ringwald 
281*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, psp_ns"  : "=r" (result) );
282*6b8177c5SMatthias Ringwald   return(result);
283*6b8177c5SMatthias Ringwald }
284*6b8177c5SMatthias Ringwald #endif
285*6b8177c5SMatthias Ringwald 
286*6b8177c5SMatthias Ringwald 
287*6b8177c5SMatthias Ringwald /**
288*6b8177c5SMatthias Ringwald   \brief   Set Process Stack Pointer
289*6b8177c5SMatthias Ringwald   \details Assigns the given value to the Process Stack Pointer (PSP).
290*6b8177c5SMatthias Ringwald   \param [in]    topOfProcStack  Process Stack Pointer value to set
291*6b8177c5SMatthias Ringwald  */
__set_PSP(uint32_t topOfProcStack)292*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
293*6b8177c5SMatthias Ringwald {
294*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
295*6b8177c5SMatthias Ringwald }
296*6b8177c5SMatthias Ringwald 
297*6b8177c5SMatthias Ringwald 
298*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
299*6b8177c5SMatthias Ringwald /**
300*6b8177c5SMatthias Ringwald   \brief   Set Process Stack Pointer (non-secure)
301*6b8177c5SMatthias Ringwald   \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
302*6b8177c5SMatthias Ringwald   \param [in]    topOfProcStack  Process Stack Pointer value to set
303*6b8177c5SMatthias Ringwald  */
__TZ_set_PSP_NS(uint32_t topOfProcStack)304*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
305*6b8177c5SMatthias Ringwald {
306*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
307*6b8177c5SMatthias Ringwald }
308*6b8177c5SMatthias Ringwald #endif
309*6b8177c5SMatthias Ringwald 
310*6b8177c5SMatthias Ringwald 
311*6b8177c5SMatthias Ringwald /**
312*6b8177c5SMatthias Ringwald   \brief   Get Main Stack Pointer
313*6b8177c5SMatthias Ringwald   \details Returns the current value of the Main Stack Pointer (MSP).
314*6b8177c5SMatthias Ringwald   \return               MSP Register value
315*6b8177c5SMatthias Ringwald  */
__get_MSP(void)316*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_MSP(void)
317*6b8177c5SMatthias Ringwald {
318*6b8177c5SMatthias Ringwald   uint32_t result;
319*6b8177c5SMatthias Ringwald 
320*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, msp" : "=r" (result) );
321*6b8177c5SMatthias Ringwald   return(result);
322*6b8177c5SMatthias Ringwald }
323*6b8177c5SMatthias Ringwald 
324*6b8177c5SMatthias Ringwald 
325*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
326*6b8177c5SMatthias Ringwald /**
327*6b8177c5SMatthias Ringwald   \brief   Get Main Stack Pointer (non-secure)
328*6b8177c5SMatthias Ringwald   \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
329*6b8177c5SMatthias Ringwald   \return               MSP Register value
330*6b8177c5SMatthias Ringwald  */
__TZ_get_MSP_NS(void)331*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
332*6b8177c5SMatthias Ringwald {
333*6b8177c5SMatthias Ringwald   uint32_t result;
334*6b8177c5SMatthias Ringwald 
335*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
336*6b8177c5SMatthias Ringwald   return(result);
337*6b8177c5SMatthias Ringwald }
338*6b8177c5SMatthias Ringwald #endif
339*6b8177c5SMatthias Ringwald 
340*6b8177c5SMatthias Ringwald 
341*6b8177c5SMatthias Ringwald /**
342*6b8177c5SMatthias Ringwald   \brief   Set Main Stack Pointer
343*6b8177c5SMatthias Ringwald   \details Assigns the given value to the Main Stack Pointer (MSP).
344*6b8177c5SMatthias Ringwald   \param [in]    topOfMainStack  Main Stack Pointer value to set
345*6b8177c5SMatthias Ringwald  */
__set_MSP(uint32_t topOfMainStack)346*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
347*6b8177c5SMatthias Ringwald {
348*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
349*6b8177c5SMatthias Ringwald }
350*6b8177c5SMatthias Ringwald 
351*6b8177c5SMatthias Ringwald 
352*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
353*6b8177c5SMatthias Ringwald /**
354*6b8177c5SMatthias Ringwald   \brief   Set Main Stack Pointer (non-secure)
355*6b8177c5SMatthias Ringwald   \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
356*6b8177c5SMatthias Ringwald   \param [in]    topOfMainStack  Main Stack Pointer value to set
357*6b8177c5SMatthias Ringwald  */
__TZ_set_MSP_NS(uint32_t topOfMainStack)358*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
359*6b8177c5SMatthias Ringwald {
360*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
361*6b8177c5SMatthias Ringwald }
362*6b8177c5SMatthias Ringwald #endif
363*6b8177c5SMatthias Ringwald 
364*6b8177c5SMatthias Ringwald 
365*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
366*6b8177c5SMatthias Ringwald /**
367*6b8177c5SMatthias Ringwald   \brief   Get Stack Pointer (non-secure)
368*6b8177c5SMatthias Ringwald   \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
369*6b8177c5SMatthias Ringwald   \return               SP Register value
370*6b8177c5SMatthias Ringwald  */
__TZ_get_SP_NS(void)371*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
372*6b8177c5SMatthias Ringwald {
373*6b8177c5SMatthias Ringwald   uint32_t result;
374*6b8177c5SMatthias Ringwald 
375*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
376*6b8177c5SMatthias Ringwald   return(result);
377*6b8177c5SMatthias Ringwald }
378*6b8177c5SMatthias Ringwald 
379*6b8177c5SMatthias Ringwald 
380*6b8177c5SMatthias Ringwald /**
381*6b8177c5SMatthias Ringwald   \brief   Set Stack Pointer (non-secure)
382*6b8177c5SMatthias Ringwald   \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
383*6b8177c5SMatthias Ringwald   \param [in]    topOfStack  Stack Pointer value to set
384*6b8177c5SMatthias Ringwald  */
__TZ_set_SP_NS(uint32_t topOfStack)385*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
386*6b8177c5SMatthias Ringwald {
387*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
388*6b8177c5SMatthias Ringwald }
389*6b8177c5SMatthias Ringwald #endif
390*6b8177c5SMatthias Ringwald 
391*6b8177c5SMatthias Ringwald 
392*6b8177c5SMatthias Ringwald /**
393*6b8177c5SMatthias Ringwald   \brief   Get Priority Mask
394*6b8177c5SMatthias Ringwald   \details Returns the current state of the priority mask bit from the Priority Mask Register.
395*6b8177c5SMatthias Ringwald   \return               Priority Mask value
396*6b8177c5SMatthias Ringwald  */
__get_PRIMASK(void)397*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
398*6b8177c5SMatthias Ringwald {
399*6b8177c5SMatthias Ringwald   uint32_t result;
400*6b8177c5SMatthias Ringwald 
401*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, primask" : "=r" (result) );
402*6b8177c5SMatthias Ringwald   return(result);
403*6b8177c5SMatthias Ringwald }
404*6b8177c5SMatthias Ringwald 
405*6b8177c5SMatthias Ringwald 
406*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
407*6b8177c5SMatthias Ringwald /**
408*6b8177c5SMatthias Ringwald   \brief   Get Priority Mask (non-secure)
409*6b8177c5SMatthias Ringwald   \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
410*6b8177c5SMatthias Ringwald   \return               Priority Mask value
411*6b8177c5SMatthias Ringwald  */
__TZ_get_PRIMASK_NS(void)412*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
413*6b8177c5SMatthias Ringwald {
414*6b8177c5SMatthias Ringwald   uint32_t result;
415*6b8177c5SMatthias Ringwald 
416*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
417*6b8177c5SMatthias Ringwald   return(result);
418*6b8177c5SMatthias Ringwald }
419*6b8177c5SMatthias Ringwald #endif
420*6b8177c5SMatthias Ringwald 
421*6b8177c5SMatthias Ringwald 
422*6b8177c5SMatthias Ringwald /**
423*6b8177c5SMatthias Ringwald   \brief   Set Priority Mask
424*6b8177c5SMatthias Ringwald   \details Assigns the given value to the Priority Mask Register.
425*6b8177c5SMatthias Ringwald   \param [in]    priMask  Priority Mask
426*6b8177c5SMatthias Ringwald  */
__set_PRIMASK(uint32_t priMask)427*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
428*6b8177c5SMatthias Ringwald {
429*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
430*6b8177c5SMatthias Ringwald }
431*6b8177c5SMatthias Ringwald 
432*6b8177c5SMatthias Ringwald 
433*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
434*6b8177c5SMatthias Ringwald /**
435*6b8177c5SMatthias Ringwald   \brief   Set Priority Mask (non-secure)
436*6b8177c5SMatthias Ringwald   \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
437*6b8177c5SMatthias Ringwald   \param [in]    priMask  Priority Mask
438*6b8177c5SMatthias Ringwald  */
__TZ_set_PRIMASK_NS(uint32_t priMask)439*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
440*6b8177c5SMatthias Ringwald {
441*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
442*6b8177c5SMatthias Ringwald }
443*6b8177c5SMatthias Ringwald #endif
444*6b8177c5SMatthias Ringwald 
445*6b8177c5SMatthias Ringwald 
446*6b8177c5SMatthias Ringwald #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
447*6b8177c5SMatthias Ringwald      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
448*6b8177c5SMatthias Ringwald      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
449*6b8177c5SMatthias Ringwald /**
450*6b8177c5SMatthias Ringwald   \brief   Enable FIQ
451*6b8177c5SMatthias Ringwald   \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
452*6b8177c5SMatthias Ringwald            Can only be executed in Privileged modes.
453*6b8177c5SMatthias Ringwald  */
454*6b8177c5SMatthias Ringwald #define __enable_fault_irq                __enable_fiq   /* see arm_compat.h */
455*6b8177c5SMatthias Ringwald 
456*6b8177c5SMatthias Ringwald 
457*6b8177c5SMatthias Ringwald /**
458*6b8177c5SMatthias Ringwald   \brief   Disable FIQ
459*6b8177c5SMatthias Ringwald   \details Disables FIQ interrupts by setting the F-bit in the CPSR.
460*6b8177c5SMatthias Ringwald            Can only be executed in Privileged modes.
461*6b8177c5SMatthias Ringwald  */
462*6b8177c5SMatthias Ringwald #define __disable_fault_irq               __disable_fiq   /* see arm_compat.h */
463*6b8177c5SMatthias Ringwald 
464*6b8177c5SMatthias Ringwald 
465*6b8177c5SMatthias Ringwald /**
466*6b8177c5SMatthias Ringwald   \brief   Get Base Priority
467*6b8177c5SMatthias Ringwald   \details Returns the current value of the Base Priority register.
468*6b8177c5SMatthias Ringwald   \return               Base Priority register value
469*6b8177c5SMatthias Ringwald  */
__get_BASEPRI(void)470*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
471*6b8177c5SMatthias Ringwald {
472*6b8177c5SMatthias Ringwald   uint32_t result;
473*6b8177c5SMatthias Ringwald 
474*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, basepri" : "=r" (result) );
475*6b8177c5SMatthias Ringwald   return(result);
476*6b8177c5SMatthias Ringwald }
477*6b8177c5SMatthias Ringwald 
478*6b8177c5SMatthias Ringwald 
479*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
480*6b8177c5SMatthias Ringwald /**
481*6b8177c5SMatthias Ringwald   \brief   Get Base Priority (non-secure)
482*6b8177c5SMatthias Ringwald   \details Returns the current value of the non-secure Base Priority register when in secure state.
483*6b8177c5SMatthias Ringwald   \return               Base Priority register value
484*6b8177c5SMatthias Ringwald  */
__TZ_get_BASEPRI_NS(void)485*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
486*6b8177c5SMatthias Ringwald {
487*6b8177c5SMatthias Ringwald   uint32_t result;
488*6b8177c5SMatthias Ringwald 
489*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
490*6b8177c5SMatthias Ringwald   return(result);
491*6b8177c5SMatthias Ringwald }
492*6b8177c5SMatthias Ringwald #endif
493*6b8177c5SMatthias Ringwald 
494*6b8177c5SMatthias Ringwald 
495*6b8177c5SMatthias Ringwald /**
496*6b8177c5SMatthias Ringwald   \brief   Set Base Priority
497*6b8177c5SMatthias Ringwald   \details Assigns the given value to the Base Priority register.
498*6b8177c5SMatthias Ringwald   \param [in]    basePri  Base Priority value to set
499*6b8177c5SMatthias Ringwald  */
__set_BASEPRI(uint32_t basePri)500*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
501*6b8177c5SMatthias Ringwald {
502*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
503*6b8177c5SMatthias Ringwald }
504*6b8177c5SMatthias Ringwald 
505*6b8177c5SMatthias Ringwald 
506*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
507*6b8177c5SMatthias Ringwald /**
508*6b8177c5SMatthias Ringwald   \brief   Set Base Priority (non-secure)
509*6b8177c5SMatthias Ringwald   \details Assigns the given value to the non-secure Base Priority register when in secure state.
510*6b8177c5SMatthias Ringwald   \param [in]    basePri  Base Priority value to set
511*6b8177c5SMatthias Ringwald  */
__TZ_set_BASEPRI_NS(uint32_t basePri)512*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
513*6b8177c5SMatthias Ringwald {
514*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
515*6b8177c5SMatthias Ringwald }
516*6b8177c5SMatthias Ringwald #endif
517*6b8177c5SMatthias Ringwald 
518*6b8177c5SMatthias Ringwald 
519*6b8177c5SMatthias Ringwald /**
520*6b8177c5SMatthias Ringwald   \brief   Set Base Priority with condition
521*6b8177c5SMatthias Ringwald   \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
522*6b8177c5SMatthias Ringwald            or the new value increases the BASEPRI priority level.
523*6b8177c5SMatthias Ringwald   \param [in]    basePri  Base Priority value to set
524*6b8177c5SMatthias Ringwald  */
__set_BASEPRI_MAX(uint32_t basePri)525*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
526*6b8177c5SMatthias Ringwald {
527*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
528*6b8177c5SMatthias Ringwald }
529*6b8177c5SMatthias Ringwald 
530*6b8177c5SMatthias Ringwald 
531*6b8177c5SMatthias Ringwald /**
532*6b8177c5SMatthias Ringwald   \brief   Get Fault Mask
533*6b8177c5SMatthias Ringwald   \details Returns the current value of the Fault Mask register.
534*6b8177c5SMatthias Ringwald   \return               Fault Mask register value
535*6b8177c5SMatthias Ringwald  */
__get_FAULTMASK(void)536*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
537*6b8177c5SMatthias Ringwald {
538*6b8177c5SMatthias Ringwald   uint32_t result;
539*6b8177c5SMatthias Ringwald 
540*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
541*6b8177c5SMatthias Ringwald   return(result);
542*6b8177c5SMatthias Ringwald }
543*6b8177c5SMatthias Ringwald 
544*6b8177c5SMatthias Ringwald 
545*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
546*6b8177c5SMatthias Ringwald /**
547*6b8177c5SMatthias Ringwald   \brief   Get Fault Mask (non-secure)
548*6b8177c5SMatthias Ringwald   \details Returns the current value of the non-secure Fault Mask register when in secure state.
549*6b8177c5SMatthias Ringwald   \return               Fault Mask register value
550*6b8177c5SMatthias Ringwald  */
__TZ_get_FAULTMASK_NS(void)551*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
552*6b8177c5SMatthias Ringwald {
553*6b8177c5SMatthias Ringwald   uint32_t result;
554*6b8177c5SMatthias Ringwald 
555*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
556*6b8177c5SMatthias Ringwald   return(result);
557*6b8177c5SMatthias Ringwald }
558*6b8177c5SMatthias Ringwald #endif
559*6b8177c5SMatthias Ringwald 
560*6b8177c5SMatthias Ringwald 
561*6b8177c5SMatthias Ringwald /**
562*6b8177c5SMatthias Ringwald   \brief   Set Fault Mask
563*6b8177c5SMatthias Ringwald   \details Assigns the given value to the Fault Mask register.
564*6b8177c5SMatthias Ringwald   \param [in]    faultMask  Fault Mask value to set
565*6b8177c5SMatthias Ringwald  */
__set_FAULTMASK(uint32_t faultMask)566*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
567*6b8177c5SMatthias Ringwald {
568*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
569*6b8177c5SMatthias Ringwald }
570*6b8177c5SMatthias Ringwald 
571*6b8177c5SMatthias Ringwald 
572*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
573*6b8177c5SMatthias Ringwald /**
574*6b8177c5SMatthias Ringwald   \brief   Set Fault Mask (non-secure)
575*6b8177c5SMatthias Ringwald   \details Assigns the given value to the non-secure Fault Mask register when in secure state.
576*6b8177c5SMatthias Ringwald   \param [in]    faultMask  Fault Mask value to set
577*6b8177c5SMatthias Ringwald  */
__TZ_set_FAULTMASK_NS(uint32_t faultMask)578*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
579*6b8177c5SMatthias Ringwald {
580*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
581*6b8177c5SMatthias Ringwald }
582*6b8177c5SMatthias Ringwald #endif
583*6b8177c5SMatthias Ringwald 
584*6b8177c5SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
585*6b8177c5SMatthias Ringwald            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
586*6b8177c5SMatthias Ringwald            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
587*6b8177c5SMatthias Ringwald 
588*6b8177c5SMatthias Ringwald 
589*6b8177c5SMatthias Ringwald #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
590*6b8177c5SMatthias Ringwald      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
591*6b8177c5SMatthias Ringwald 
592*6b8177c5SMatthias Ringwald /**
593*6b8177c5SMatthias Ringwald   \brief   Get Process Stack Pointer Limit
594*6b8177c5SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
595*6b8177c5SMatthias Ringwald   Stack Pointer Limit register hence zero is returned always in non-secure
596*6b8177c5SMatthias Ringwald   mode.
597*6b8177c5SMatthias Ringwald 
598*6b8177c5SMatthias Ringwald   \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
599*6b8177c5SMatthias Ringwald   \return               PSPLIM Register value
600*6b8177c5SMatthias Ringwald  */
__get_PSPLIM(void)601*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
602*6b8177c5SMatthias Ringwald {
603*6b8177c5SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
604*6b8177c5SMatthias Ringwald     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
605*6b8177c5SMatthias Ringwald     // without main extensions, the non-secure PSPLIM is RAZ/WI
606*6b8177c5SMatthias Ringwald   return 0U;
607*6b8177c5SMatthias Ringwald #else
608*6b8177c5SMatthias Ringwald   uint32_t result;
609*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, psplim"  : "=r" (result) );
610*6b8177c5SMatthias Ringwald   return result;
611*6b8177c5SMatthias Ringwald #endif
612*6b8177c5SMatthias Ringwald }
613*6b8177c5SMatthias Ringwald 
614*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
615*6b8177c5SMatthias Ringwald /**
616*6b8177c5SMatthias Ringwald   \brief   Get Process Stack Pointer Limit (non-secure)
617*6b8177c5SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
618*6b8177c5SMatthias Ringwald   Stack Pointer Limit register hence zero is returned always in non-secure
619*6b8177c5SMatthias Ringwald   mode.
620*6b8177c5SMatthias Ringwald 
621*6b8177c5SMatthias Ringwald   \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
622*6b8177c5SMatthias Ringwald   \return               PSPLIM Register value
623*6b8177c5SMatthias Ringwald  */
__TZ_get_PSPLIM_NS(void)624*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
625*6b8177c5SMatthias Ringwald {
626*6b8177c5SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
627*6b8177c5SMatthias Ringwald   // without main extensions, the non-secure PSPLIM is RAZ/WI
628*6b8177c5SMatthias Ringwald   return 0U;
629*6b8177c5SMatthias Ringwald #else
630*6b8177c5SMatthias Ringwald   uint32_t result;
631*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, psplim_ns"  : "=r" (result) );
632*6b8177c5SMatthias Ringwald   return result;
633*6b8177c5SMatthias Ringwald #endif
634*6b8177c5SMatthias Ringwald }
635*6b8177c5SMatthias Ringwald #endif
636*6b8177c5SMatthias Ringwald 
637*6b8177c5SMatthias Ringwald 
638*6b8177c5SMatthias Ringwald /**
639*6b8177c5SMatthias Ringwald   \brief   Set Process Stack Pointer Limit
640*6b8177c5SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
641*6b8177c5SMatthias Ringwald   Stack Pointer Limit register hence the write is silently ignored in non-secure
642*6b8177c5SMatthias Ringwald   mode.
643*6b8177c5SMatthias Ringwald 
644*6b8177c5SMatthias Ringwald   \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
645*6b8177c5SMatthias Ringwald   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
646*6b8177c5SMatthias Ringwald  */
__set_PSPLIM(uint32_t ProcStackPtrLimit)647*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
648*6b8177c5SMatthias Ringwald {
649*6b8177c5SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
650*6b8177c5SMatthias Ringwald     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
651*6b8177c5SMatthias Ringwald   // without main extensions, the non-secure PSPLIM is RAZ/WI
652*6b8177c5SMatthias Ringwald   (void)ProcStackPtrLimit;
653*6b8177c5SMatthias Ringwald #else
654*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
655*6b8177c5SMatthias Ringwald #endif
656*6b8177c5SMatthias Ringwald }
657*6b8177c5SMatthias Ringwald 
658*6b8177c5SMatthias Ringwald 
659*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
660*6b8177c5SMatthias Ringwald /**
661*6b8177c5SMatthias Ringwald   \brief   Set Process Stack Pointer (non-secure)
662*6b8177c5SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
663*6b8177c5SMatthias Ringwald   Stack Pointer Limit register hence the write is silently ignored in non-secure
664*6b8177c5SMatthias Ringwald   mode.
665*6b8177c5SMatthias Ringwald 
666*6b8177c5SMatthias Ringwald   \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
667*6b8177c5SMatthias Ringwald   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
668*6b8177c5SMatthias Ringwald  */
__TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)669*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
670*6b8177c5SMatthias Ringwald {
671*6b8177c5SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
672*6b8177c5SMatthias Ringwald   // without main extensions, the non-secure PSPLIM is RAZ/WI
673*6b8177c5SMatthias Ringwald   (void)ProcStackPtrLimit;
674*6b8177c5SMatthias Ringwald #else
675*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
676*6b8177c5SMatthias Ringwald #endif
677*6b8177c5SMatthias Ringwald }
678*6b8177c5SMatthias Ringwald #endif
679*6b8177c5SMatthias Ringwald 
680*6b8177c5SMatthias Ringwald 
681*6b8177c5SMatthias Ringwald /**
682*6b8177c5SMatthias Ringwald   \brief   Get Main Stack Pointer Limit
683*6b8177c5SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
684*6b8177c5SMatthias Ringwald   Stack Pointer Limit register hence zero is returned always.
685*6b8177c5SMatthias Ringwald 
686*6b8177c5SMatthias Ringwald   \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
687*6b8177c5SMatthias Ringwald   \return               MSPLIM Register value
688*6b8177c5SMatthias Ringwald  */
__get_MSPLIM(void)689*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
690*6b8177c5SMatthias Ringwald {
691*6b8177c5SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
692*6b8177c5SMatthias Ringwald     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
693*6b8177c5SMatthias Ringwald   // without main extensions, the non-secure MSPLIM is RAZ/WI
694*6b8177c5SMatthias Ringwald   return 0U;
695*6b8177c5SMatthias Ringwald #else
696*6b8177c5SMatthias Ringwald   uint32_t result;
697*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, msplim" : "=r" (result) );
698*6b8177c5SMatthias Ringwald   return result;
699*6b8177c5SMatthias Ringwald #endif
700*6b8177c5SMatthias Ringwald }
701*6b8177c5SMatthias Ringwald 
702*6b8177c5SMatthias Ringwald 
703*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
704*6b8177c5SMatthias Ringwald /**
705*6b8177c5SMatthias Ringwald   \brief   Get Main Stack Pointer Limit (non-secure)
706*6b8177c5SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
707*6b8177c5SMatthias Ringwald   Stack Pointer Limit register hence zero is returned always.
708*6b8177c5SMatthias Ringwald 
709*6b8177c5SMatthias Ringwald   \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
710*6b8177c5SMatthias Ringwald   \return               MSPLIM Register value
711*6b8177c5SMatthias Ringwald  */
__TZ_get_MSPLIM_NS(void)712*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
713*6b8177c5SMatthias Ringwald {
714*6b8177c5SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
715*6b8177c5SMatthias Ringwald   // without main extensions, the non-secure MSPLIM is RAZ/WI
716*6b8177c5SMatthias Ringwald   return 0U;
717*6b8177c5SMatthias Ringwald #else
718*6b8177c5SMatthias Ringwald   uint32_t result;
719*6b8177c5SMatthias Ringwald   __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
720*6b8177c5SMatthias Ringwald   return result;
721*6b8177c5SMatthias Ringwald #endif
722*6b8177c5SMatthias Ringwald }
723*6b8177c5SMatthias Ringwald #endif
724*6b8177c5SMatthias Ringwald 
725*6b8177c5SMatthias Ringwald 
726*6b8177c5SMatthias Ringwald /**
727*6b8177c5SMatthias Ringwald   \brief   Set Main Stack Pointer Limit
728*6b8177c5SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
729*6b8177c5SMatthias Ringwald   Stack Pointer Limit register hence the write is silently ignored.
730*6b8177c5SMatthias Ringwald 
731*6b8177c5SMatthias Ringwald   \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
732*6b8177c5SMatthias Ringwald   \param [in]    MainStackPtrLimit  Main Stack Pointer Limit value to set
733*6b8177c5SMatthias Ringwald  */
__set_MSPLIM(uint32_t MainStackPtrLimit)734*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
735*6b8177c5SMatthias Ringwald {
736*6b8177c5SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
737*6b8177c5SMatthias Ringwald     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
738*6b8177c5SMatthias Ringwald   // without main extensions, the non-secure MSPLIM is RAZ/WI
739*6b8177c5SMatthias Ringwald   (void)MainStackPtrLimit;
740*6b8177c5SMatthias Ringwald #else
741*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
742*6b8177c5SMatthias Ringwald #endif
743*6b8177c5SMatthias Ringwald }
744*6b8177c5SMatthias Ringwald 
745*6b8177c5SMatthias Ringwald 
746*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
747*6b8177c5SMatthias Ringwald /**
748*6b8177c5SMatthias Ringwald   \brief   Set Main Stack Pointer Limit (non-secure)
749*6b8177c5SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
750*6b8177c5SMatthias Ringwald   Stack Pointer Limit register hence the write is silently ignored.
751*6b8177c5SMatthias Ringwald 
752*6b8177c5SMatthias Ringwald   \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
753*6b8177c5SMatthias Ringwald   \param [in]    MainStackPtrLimit  Main Stack Pointer value to set
754*6b8177c5SMatthias Ringwald  */
__TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)755*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
756*6b8177c5SMatthias Ringwald {
757*6b8177c5SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
758*6b8177c5SMatthias Ringwald   // without main extensions, the non-secure MSPLIM is RAZ/WI
759*6b8177c5SMatthias Ringwald   (void)MainStackPtrLimit;
760*6b8177c5SMatthias Ringwald #else
761*6b8177c5SMatthias Ringwald   __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
762*6b8177c5SMatthias Ringwald #endif
763*6b8177c5SMatthias Ringwald }
764*6b8177c5SMatthias Ringwald #endif
765*6b8177c5SMatthias Ringwald 
766*6b8177c5SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
767*6b8177c5SMatthias Ringwald            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
768*6b8177c5SMatthias Ringwald 
769*6b8177c5SMatthias Ringwald /**
770*6b8177c5SMatthias Ringwald   \brief   Get FPSCR
771*6b8177c5SMatthias Ringwald   \details Returns the current value of the Floating Point Status/Control register.
772*6b8177c5SMatthias Ringwald   \return               Floating Point Status/Control register value
773*6b8177c5SMatthias Ringwald  */
774*6b8177c5SMatthias Ringwald #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
775*6b8177c5SMatthias Ringwald      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
776*6b8177c5SMatthias Ringwald #define __get_FPSCR      (uint32_t)__builtin_arm_get_fpscr
777*6b8177c5SMatthias Ringwald #else
778*6b8177c5SMatthias Ringwald #define __get_FPSCR()      ((uint32_t)0U)
779*6b8177c5SMatthias Ringwald #endif
780*6b8177c5SMatthias Ringwald 
781*6b8177c5SMatthias Ringwald /**
782*6b8177c5SMatthias Ringwald   \brief   Set FPSCR
783*6b8177c5SMatthias Ringwald   \details Assigns the given value to the Floating Point Status/Control register.
784*6b8177c5SMatthias Ringwald   \param [in]    fpscr  Floating Point Status/Control value to set
785*6b8177c5SMatthias Ringwald  */
786*6b8177c5SMatthias Ringwald #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
787*6b8177c5SMatthias Ringwald      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
788*6b8177c5SMatthias Ringwald #define __set_FPSCR      __builtin_arm_set_fpscr
789*6b8177c5SMatthias Ringwald #else
790*6b8177c5SMatthias Ringwald #define __set_FPSCR(x)      ((void)(x))
791*6b8177c5SMatthias Ringwald #endif
792*6b8177c5SMatthias Ringwald 
793*6b8177c5SMatthias Ringwald 
794*6b8177c5SMatthias Ringwald /*@} end of CMSIS_Core_RegAccFunctions */
795*6b8177c5SMatthias Ringwald 
796*6b8177c5SMatthias Ringwald 
797*6b8177c5SMatthias Ringwald /* ##########################  Core Instruction Access  ######################### */
798*6b8177c5SMatthias Ringwald /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
799*6b8177c5SMatthias Ringwald   Access to dedicated instructions
800*6b8177c5SMatthias Ringwald   @{
801*6b8177c5SMatthias Ringwald */
802*6b8177c5SMatthias Ringwald 
803*6b8177c5SMatthias Ringwald /* Define macros for porting to both thumb1 and thumb2.
804*6b8177c5SMatthias Ringwald  * For thumb1, use low register (r0-r7), specified by constraint "l"
805*6b8177c5SMatthias Ringwald  * Otherwise, use general registers, specified by constraint "r" */
806*6b8177c5SMatthias Ringwald #if defined (__thumb__) && !defined (__thumb2__)
807*6b8177c5SMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
808*6b8177c5SMatthias Ringwald #define __CMSIS_GCC_RW_REG(r) "+l" (r)
809*6b8177c5SMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "l" (r)
810*6b8177c5SMatthias Ringwald #else
811*6b8177c5SMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
812*6b8177c5SMatthias Ringwald #define __CMSIS_GCC_RW_REG(r) "+r" (r)
813*6b8177c5SMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "r" (r)
814*6b8177c5SMatthias Ringwald #endif
815*6b8177c5SMatthias Ringwald 
816*6b8177c5SMatthias Ringwald /**
817*6b8177c5SMatthias Ringwald   \brief   No Operation
818*6b8177c5SMatthias Ringwald   \details No Operation does nothing. This instruction can be used for code alignment purposes.
819*6b8177c5SMatthias Ringwald  */
820*6b8177c5SMatthias Ringwald #define __NOP          __builtin_arm_nop
821*6b8177c5SMatthias Ringwald 
822*6b8177c5SMatthias Ringwald /**
823*6b8177c5SMatthias Ringwald   \brief   Wait For Interrupt
824*6b8177c5SMatthias Ringwald   \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
825*6b8177c5SMatthias Ringwald  */
826*6b8177c5SMatthias Ringwald #define __WFI          __builtin_arm_wfi
827*6b8177c5SMatthias Ringwald 
828*6b8177c5SMatthias Ringwald 
829*6b8177c5SMatthias Ringwald /**
830*6b8177c5SMatthias Ringwald   \brief   Wait For Event
831*6b8177c5SMatthias Ringwald   \details Wait For Event is a hint instruction that permits the processor to enter
832*6b8177c5SMatthias Ringwald            a low-power state until one of a number of events occurs.
833*6b8177c5SMatthias Ringwald  */
834*6b8177c5SMatthias Ringwald #define __WFE          __builtin_arm_wfe
835*6b8177c5SMatthias Ringwald 
836*6b8177c5SMatthias Ringwald 
837*6b8177c5SMatthias Ringwald /**
838*6b8177c5SMatthias Ringwald   \brief   Send Event
839*6b8177c5SMatthias Ringwald   \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
840*6b8177c5SMatthias Ringwald  */
841*6b8177c5SMatthias Ringwald #define __SEV          __builtin_arm_sev
842*6b8177c5SMatthias Ringwald 
843*6b8177c5SMatthias Ringwald 
844*6b8177c5SMatthias Ringwald /**
845*6b8177c5SMatthias Ringwald   \brief   Instruction Synchronization Barrier
846*6b8177c5SMatthias Ringwald   \details Instruction Synchronization Barrier flushes the pipeline in the processor,
847*6b8177c5SMatthias Ringwald            so that all instructions following the ISB are fetched from cache or memory,
848*6b8177c5SMatthias Ringwald            after the instruction has been completed.
849*6b8177c5SMatthias Ringwald  */
850*6b8177c5SMatthias Ringwald #define __ISB()        __builtin_arm_isb(0xF)
851*6b8177c5SMatthias Ringwald 
852*6b8177c5SMatthias Ringwald /**
853*6b8177c5SMatthias Ringwald   \brief   Data Synchronization Barrier
854*6b8177c5SMatthias Ringwald   \details Acts as a special kind of Data Memory Barrier.
855*6b8177c5SMatthias Ringwald            It completes when all explicit memory accesses before this instruction complete.
856*6b8177c5SMatthias Ringwald  */
857*6b8177c5SMatthias Ringwald #define __DSB()        __builtin_arm_dsb(0xF)
858*6b8177c5SMatthias Ringwald 
859*6b8177c5SMatthias Ringwald 
860*6b8177c5SMatthias Ringwald /**
861*6b8177c5SMatthias Ringwald   \brief   Data Memory Barrier
862*6b8177c5SMatthias Ringwald   \details Ensures the apparent order of the explicit memory operations before
863*6b8177c5SMatthias Ringwald            and after the instruction, without ensuring their completion.
864*6b8177c5SMatthias Ringwald  */
865*6b8177c5SMatthias Ringwald #define __DMB()        __builtin_arm_dmb(0xF)
866*6b8177c5SMatthias Ringwald 
867*6b8177c5SMatthias Ringwald 
868*6b8177c5SMatthias Ringwald /**
869*6b8177c5SMatthias Ringwald   \brief   Reverse byte order (32 bit)
870*6b8177c5SMatthias Ringwald   \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
871*6b8177c5SMatthias Ringwald   \param [in]    value  Value to reverse
872*6b8177c5SMatthias Ringwald   \return               Reversed value
873*6b8177c5SMatthias Ringwald  */
874*6b8177c5SMatthias Ringwald #define __REV(value)   __builtin_bswap32(value)
875*6b8177c5SMatthias Ringwald 
876*6b8177c5SMatthias Ringwald 
877*6b8177c5SMatthias Ringwald /**
878*6b8177c5SMatthias Ringwald   \brief   Reverse byte order (16 bit)
879*6b8177c5SMatthias Ringwald   \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
880*6b8177c5SMatthias Ringwald   \param [in]    value  Value to reverse
881*6b8177c5SMatthias Ringwald   \return               Reversed value
882*6b8177c5SMatthias Ringwald  */
883*6b8177c5SMatthias Ringwald #define __REV16(value) __ROR(__REV(value), 16)
884*6b8177c5SMatthias Ringwald 
885*6b8177c5SMatthias Ringwald 
886*6b8177c5SMatthias Ringwald /**
887*6b8177c5SMatthias Ringwald   \brief   Reverse byte order (16 bit)
888*6b8177c5SMatthias Ringwald   \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
889*6b8177c5SMatthias Ringwald   \param [in]    value  Value to reverse
890*6b8177c5SMatthias Ringwald   \return               Reversed value
891*6b8177c5SMatthias Ringwald  */
892*6b8177c5SMatthias Ringwald #define __REVSH(value) (int16_t)__builtin_bswap16(value)
893*6b8177c5SMatthias Ringwald 
894*6b8177c5SMatthias Ringwald 
895*6b8177c5SMatthias Ringwald /**
896*6b8177c5SMatthias Ringwald   \brief   Rotate Right in unsigned value (32 bit)
897*6b8177c5SMatthias Ringwald   \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
898*6b8177c5SMatthias Ringwald   \param [in]    op1  Value to rotate
899*6b8177c5SMatthias Ringwald   \param [in]    op2  Number of Bits to rotate
900*6b8177c5SMatthias Ringwald   \return               Rotated value
901*6b8177c5SMatthias Ringwald  */
__ROR(uint32_t op1,uint32_t op2)902*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
903*6b8177c5SMatthias Ringwald {
904*6b8177c5SMatthias Ringwald   op2 %= 32U;
905*6b8177c5SMatthias Ringwald   if (op2 == 0U)
906*6b8177c5SMatthias Ringwald   {
907*6b8177c5SMatthias Ringwald     return op1;
908*6b8177c5SMatthias Ringwald   }
909*6b8177c5SMatthias Ringwald   return (op1 >> op2) | (op1 << (32U - op2));
910*6b8177c5SMatthias Ringwald }
911*6b8177c5SMatthias Ringwald 
912*6b8177c5SMatthias Ringwald 
913*6b8177c5SMatthias Ringwald /**
914*6b8177c5SMatthias Ringwald   \brief   Breakpoint
915*6b8177c5SMatthias Ringwald   \details Causes the processor to enter Debug state.
916*6b8177c5SMatthias Ringwald            Debug tools can use this to investigate system state when the instruction at a particular address is reached.
917*6b8177c5SMatthias Ringwald   \param [in]    value  is ignored by the processor.
918*6b8177c5SMatthias Ringwald                  If required, a debugger can use it to store additional information about the breakpoint.
919*6b8177c5SMatthias Ringwald  */
920*6b8177c5SMatthias Ringwald #define __BKPT(value)     __ASM volatile ("bkpt "#value)
921*6b8177c5SMatthias Ringwald 
922*6b8177c5SMatthias Ringwald 
923*6b8177c5SMatthias Ringwald /**
924*6b8177c5SMatthias Ringwald   \brief   Reverse bit order of value
925*6b8177c5SMatthias Ringwald   \details Reverses the bit order of the given value.
926*6b8177c5SMatthias Ringwald   \param [in]    value  Value to reverse
927*6b8177c5SMatthias Ringwald   \return               Reversed value
928*6b8177c5SMatthias Ringwald  */
929*6b8177c5SMatthias Ringwald #define __RBIT            __builtin_arm_rbit
930*6b8177c5SMatthias Ringwald 
931*6b8177c5SMatthias Ringwald /**
932*6b8177c5SMatthias Ringwald   \brief   Count leading zeros
933*6b8177c5SMatthias Ringwald   \details Counts the number of leading zeros of a data value.
934*6b8177c5SMatthias Ringwald   \param [in]  value  Value to count the leading zeros
935*6b8177c5SMatthias Ringwald   \return             number of leading zeros in value
936*6b8177c5SMatthias Ringwald  */
__CLZ(uint32_t value)937*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
938*6b8177c5SMatthias Ringwald {
939*6b8177c5SMatthias Ringwald   /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
940*6b8177c5SMatthias Ringwald      __builtin_clz(0) is undefined behaviour, so handle this case specially.
941*6b8177c5SMatthias Ringwald      This guarantees ARM-compatible results if happening to compile on a non-ARM
942*6b8177c5SMatthias Ringwald      target, and ensures the compiler doesn't decide to activate any
943*6b8177c5SMatthias Ringwald      optimisations using the logic "value was passed to __builtin_clz, so it
944*6b8177c5SMatthias Ringwald      is non-zero".
945*6b8177c5SMatthias Ringwald      ARM Compiler 6.10 and possibly earlier will optimise this test away, leaving a
946*6b8177c5SMatthias Ringwald      single CLZ instruction.
947*6b8177c5SMatthias Ringwald    */
948*6b8177c5SMatthias Ringwald   if (value == 0U)
949*6b8177c5SMatthias Ringwald   {
950*6b8177c5SMatthias Ringwald     return 32U;
951*6b8177c5SMatthias Ringwald   }
952*6b8177c5SMatthias Ringwald   return __builtin_clz(value);
953*6b8177c5SMatthias Ringwald }
954*6b8177c5SMatthias Ringwald 
955*6b8177c5SMatthias Ringwald 
956*6b8177c5SMatthias Ringwald #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
957*6b8177c5SMatthias Ringwald      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
958*6b8177c5SMatthias Ringwald      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
959*6b8177c5SMatthias Ringwald      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
960*6b8177c5SMatthias Ringwald /**
961*6b8177c5SMatthias Ringwald   \brief   LDR Exclusive (8 bit)
962*6b8177c5SMatthias Ringwald   \details Executes a exclusive LDR instruction for 8 bit value.
963*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to data
964*6b8177c5SMatthias Ringwald   \return             value of type uint8_t at (*ptr)
965*6b8177c5SMatthias Ringwald  */
966*6b8177c5SMatthias Ringwald #define __LDREXB        (uint8_t)__builtin_arm_ldrex
967*6b8177c5SMatthias Ringwald 
968*6b8177c5SMatthias Ringwald 
969*6b8177c5SMatthias Ringwald /**
970*6b8177c5SMatthias Ringwald   \brief   LDR Exclusive (16 bit)
971*6b8177c5SMatthias Ringwald   \details Executes a exclusive LDR instruction for 16 bit values.
972*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to data
973*6b8177c5SMatthias Ringwald   \return        value of type uint16_t at (*ptr)
974*6b8177c5SMatthias Ringwald  */
975*6b8177c5SMatthias Ringwald #define __LDREXH        (uint16_t)__builtin_arm_ldrex
976*6b8177c5SMatthias Ringwald 
977*6b8177c5SMatthias Ringwald 
978*6b8177c5SMatthias Ringwald /**
979*6b8177c5SMatthias Ringwald   \brief   LDR Exclusive (32 bit)
980*6b8177c5SMatthias Ringwald   \details Executes a exclusive LDR instruction for 32 bit values.
981*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to data
982*6b8177c5SMatthias Ringwald   \return        value of type uint32_t at (*ptr)
983*6b8177c5SMatthias Ringwald  */
984*6b8177c5SMatthias Ringwald #define __LDREXW        (uint32_t)__builtin_arm_ldrex
985*6b8177c5SMatthias Ringwald 
986*6b8177c5SMatthias Ringwald 
987*6b8177c5SMatthias Ringwald /**
988*6b8177c5SMatthias Ringwald   \brief   STR Exclusive (8 bit)
989*6b8177c5SMatthias Ringwald   \details Executes a exclusive STR instruction for 8 bit values.
990*6b8177c5SMatthias Ringwald   \param [in]  value  Value to store
991*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to location
992*6b8177c5SMatthias Ringwald   \return          0  Function succeeded
993*6b8177c5SMatthias Ringwald   \return          1  Function failed
994*6b8177c5SMatthias Ringwald  */
995*6b8177c5SMatthias Ringwald #define __STREXB        (uint32_t)__builtin_arm_strex
996*6b8177c5SMatthias Ringwald 
997*6b8177c5SMatthias Ringwald 
998*6b8177c5SMatthias Ringwald /**
999*6b8177c5SMatthias Ringwald   \brief   STR Exclusive (16 bit)
1000*6b8177c5SMatthias Ringwald   \details Executes a exclusive STR instruction for 16 bit values.
1001*6b8177c5SMatthias Ringwald   \param [in]  value  Value to store
1002*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to location
1003*6b8177c5SMatthias Ringwald   \return          0  Function succeeded
1004*6b8177c5SMatthias Ringwald   \return          1  Function failed
1005*6b8177c5SMatthias Ringwald  */
1006*6b8177c5SMatthias Ringwald #define __STREXH        (uint32_t)__builtin_arm_strex
1007*6b8177c5SMatthias Ringwald 
1008*6b8177c5SMatthias Ringwald 
1009*6b8177c5SMatthias Ringwald /**
1010*6b8177c5SMatthias Ringwald   \brief   STR Exclusive (32 bit)
1011*6b8177c5SMatthias Ringwald   \details Executes a exclusive STR instruction for 32 bit values.
1012*6b8177c5SMatthias Ringwald   \param [in]  value  Value to store
1013*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to location
1014*6b8177c5SMatthias Ringwald   \return          0  Function succeeded
1015*6b8177c5SMatthias Ringwald   \return          1  Function failed
1016*6b8177c5SMatthias Ringwald  */
1017*6b8177c5SMatthias Ringwald #define __STREXW        (uint32_t)__builtin_arm_strex
1018*6b8177c5SMatthias Ringwald 
1019*6b8177c5SMatthias Ringwald 
1020*6b8177c5SMatthias Ringwald /**
1021*6b8177c5SMatthias Ringwald   \brief   Remove the exclusive lock
1022*6b8177c5SMatthias Ringwald   \details Removes the exclusive lock which is created by LDREX.
1023*6b8177c5SMatthias Ringwald  */
1024*6b8177c5SMatthias Ringwald #define __CLREX             __builtin_arm_clrex
1025*6b8177c5SMatthias Ringwald 
1026*6b8177c5SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1027*6b8177c5SMatthias Ringwald            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1028*6b8177c5SMatthias Ringwald            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1029*6b8177c5SMatthias Ringwald            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
1030*6b8177c5SMatthias Ringwald 
1031*6b8177c5SMatthias Ringwald 
1032*6b8177c5SMatthias Ringwald #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1033*6b8177c5SMatthias Ringwald      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1034*6b8177c5SMatthias Ringwald      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
1035*6b8177c5SMatthias Ringwald 
1036*6b8177c5SMatthias Ringwald /**
1037*6b8177c5SMatthias Ringwald   \brief   Signed Saturate
1038*6b8177c5SMatthias Ringwald   \details Saturates a signed value.
1039*6b8177c5SMatthias Ringwald   \param [in]  value  Value to be saturated
1040*6b8177c5SMatthias Ringwald   \param [in]    sat  Bit position to saturate to (1..32)
1041*6b8177c5SMatthias Ringwald   \return             Saturated value
1042*6b8177c5SMatthias Ringwald  */
1043*6b8177c5SMatthias Ringwald #define __SSAT             __builtin_arm_ssat
1044*6b8177c5SMatthias Ringwald 
1045*6b8177c5SMatthias Ringwald 
1046*6b8177c5SMatthias Ringwald /**
1047*6b8177c5SMatthias Ringwald   \brief   Unsigned Saturate
1048*6b8177c5SMatthias Ringwald   \details Saturates an unsigned value.
1049*6b8177c5SMatthias Ringwald   \param [in]  value  Value to be saturated
1050*6b8177c5SMatthias Ringwald   \param [in]    sat  Bit position to saturate to (0..31)
1051*6b8177c5SMatthias Ringwald   \return             Saturated value
1052*6b8177c5SMatthias Ringwald  */
1053*6b8177c5SMatthias Ringwald #define __USAT             __builtin_arm_usat
1054*6b8177c5SMatthias Ringwald 
1055*6b8177c5SMatthias Ringwald 
1056*6b8177c5SMatthias Ringwald /**
1057*6b8177c5SMatthias Ringwald   \brief   Rotate Right with Extend (32 bit)
1058*6b8177c5SMatthias Ringwald   \details Moves each bit of a bitstring right by one bit.
1059*6b8177c5SMatthias Ringwald            The carry input is shifted in at the left end of the bitstring.
1060*6b8177c5SMatthias Ringwald   \param [in]    value  Value to rotate
1061*6b8177c5SMatthias Ringwald   \return               Rotated value
1062*6b8177c5SMatthias Ringwald  */
__RRX(uint32_t value)1063*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1064*6b8177c5SMatthias Ringwald {
1065*6b8177c5SMatthias Ringwald   uint32_t result;
1066*6b8177c5SMatthias Ringwald 
1067*6b8177c5SMatthias Ringwald   __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1068*6b8177c5SMatthias Ringwald   return(result);
1069*6b8177c5SMatthias Ringwald }
1070*6b8177c5SMatthias Ringwald 
1071*6b8177c5SMatthias Ringwald 
1072*6b8177c5SMatthias Ringwald /**
1073*6b8177c5SMatthias Ringwald   \brief   LDRT Unprivileged (8 bit)
1074*6b8177c5SMatthias Ringwald   \details Executes a Unprivileged LDRT instruction for 8 bit value.
1075*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to data
1076*6b8177c5SMatthias Ringwald   \return             value of type uint8_t at (*ptr)
1077*6b8177c5SMatthias Ringwald  */
__LDRBT(volatile uint8_t * ptr)1078*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1079*6b8177c5SMatthias Ringwald {
1080*6b8177c5SMatthias Ringwald   uint32_t result;
1081*6b8177c5SMatthias Ringwald 
1082*6b8177c5SMatthias Ringwald   __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1083*6b8177c5SMatthias Ringwald   return ((uint8_t) result);    /* Add explicit type cast here */
1084*6b8177c5SMatthias Ringwald }
1085*6b8177c5SMatthias Ringwald 
1086*6b8177c5SMatthias Ringwald 
1087*6b8177c5SMatthias Ringwald /**
1088*6b8177c5SMatthias Ringwald   \brief   LDRT Unprivileged (16 bit)
1089*6b8177c5SMatthias Ringwald   \details Executes a Unprivileged LDRT instruction for 16 bit values.
1090*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to data
1091*6b8177c5SMatthias Ringwald   \return        value of type uint16_t at (*ptr)
1092*6b8177c5SMatthias Ringwald  */
__LDRHT(volatile uint16_t * ptr)1093*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1094*6b8177c5SMatthias Ringwald {
1095*6b8177c5SMatthias Ringwald   uint32_t result;
1096*6b8177c5SMatthias Ringwald 
1097*6b8177c5SMatthias Ringwald   __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1098*6b8177c5SMatthias Ringwald   return ((uint16_t) result);    /* Add explicit type cast here */
1099*6b8177c5SMatthias Ringwald }
1100*6b8177c5SMatthias Ringwald 
1101*6b8177c5SMatthias Ringwald 
1102*6b8177c5SMatthias Ringwald /**
1103*6b8177c5SMatthias Ringwald   \brief   LDRT Unprivileged (32 bit)
1104*6b8177c5SMatthias Ringwald   \details Executes a Unprivileged LDRT instruction for 32 bit values.
1105*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to data
1106*6b8177c5SMatthias Ringwald   \return        value of type uint32_t at (*ptr)
1107*6b8177c5SMatthias Ringwald  */
__LDRT(volatile uint32_t * ptr)1108*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1109*6b8177c5SMatthias Ringwald {
1110*6b8177c5SMatthias Ringwald   uint32_t result;
1111*6b8177c5SMatthias Ringwald 
1112*6b8177c5SMatthias Ringwald   __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1113*6b8177c5SMatthias Ringwald   return(result);
1114*6b8177c5SMatthias Ringwald }
1115*6b8177c5SMatthias Ringwald 
1116*6b8177c5SMatthias Ringwald 
1117*6b8177c5SMatthias Ringwald /**
1118*6b8177c5SMatthias Ringwald   \brief   STRT Unprivileged (8 bit)
1119*6b8177c5SMatthias Ringwald   \details Executes a Unprivileged STRT instruction for 8 bit values.
1120*6b8177c5SMatthias Ringwald   \param [in]  value  Value to store
1121*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to location
1122*6b8177c5SMatthias Ringwald  */
__STRBT(uint8_t value,volatile uint8_t * ptr)1123*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1124*6b8177c5SMatthias Ringwald {
1125*6b8177c5SMatthias Ringwald   __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1126*6b8177c5SMatthias Ringwald }
1127*6b8177c5SMatthias Ringwald 
1128*6b8177c5SMatthias Ringwald 
1129*6b8177c5SMatthias Ringwald /**
1130*6b8177c5SMatthias Ringwald   \brief   STRT Unprivileged (16 bit)
1131*6b8177c5SMatthias Ringwald   \details Executes a Unprivileged STRT instruction for 16 bit values.
1132*6b8177c5SMatthias Ringwald   \param [in]  value  Value to store
1133*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to location
1134*6b8177c5SMatthias Ringwald  */
__STRHT(uint16_t value,volatile uint16_t * ptr)1135*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1136*6b8177c5SMatthias Ringwald {
1137*6b8177c5SMatthias Ringwald   __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1138*6b8177c5SMatthias Ringwald }
1139*6b8177c5SMatthias Ringwald 
1140*6b8177c5SMatthias Ringwald 
1141*6b8177c5SMatthias Ringwald /**
1142*6b8177c5SMatthias Ringwald   \brief   STRT Unprivileged (32 bit)
1143*6b8177c5SMatthias Ringwald   \details Executes a Unprivileged STRT instruction for 32 bit values.
1144*6b8177c5SMatthias Ringwald   \param [in]  value  Value to store
1145*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to location
1146*6b8177c5SMatthias Ringwald  */
__STRT(uint32_t value,volatile uint32_t * ptr)1147*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1148*6b8177c5SMatthias Ringwald {
1149*6b8177c5SMatthias Ringwald   __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1150*6b8177c5SMatthias Ringwald }
1151*6b8177c5SMatthias Ringwald 
1152*6b8177c5SMatthias Ringwald #else  /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1153*6b8177c5SMatthias Ringwald            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1154*6b8177c5SMatthias Ringwald            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
1155*6b8177c5SMatthias Ringwald 
1156*6b8177c5SMatthias Ringwald /**
1157*6b8177c5SMatthias Ringwald   \brief   Signed Saturate
1158*6b8177c5SMatthias Ringwald   \details Saturates a signed value.
1159*6b8177c5SMatthias Ringwald   \param [in]  value  Value to be saturated
1160*6b8177c5SMatthias Ringwald   \param [in]    sat  Bit position to saturate to (1..32)
1161*6b8177c5SMatthias Ringwald   \return             Saturated value
1162*6b8177c5SMatthias Ringwald  */
__SSAT(int32_t val,uint32_t sat)1163*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1164*6b8177c5SMatthias Ringwald {
1165*6b8177c5SMatthias Ringwald   if ((sat >= 1U) && (sat <= 32U))
1166*6b8177c5SMatthias Ringwald   {
1167*6b8177c5SMatthias Ringwald     const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1168*6b8177c5SMatthias Ringwald     const int32_t min = -1 - max ;
1169*6b8177c5SMatthias Ringwald     if (val > max)
1170*6b8177c5SMatthias Ringwald     {
1171*6b8177c5SMatthias Ringwald       return max;
1172*6b8177c5SMatthias Ringwald     }
1173*6b8177c5SMatthias Ringwald     else if (val < min)
1174*6b8177c5SMatthias Ringwald     {
1175*6b8177c5SMatthias Ringwald       return min;
1176*6b8177c5SMatthias Ringwald     }
1177*6b8177c5SMatthias Ringwald   }
1178*6b8177c5SMatthias Ringwald   return val;
1179*6b8177c5SMatthias Ringwald }
1180*6b8177c5SMatthias Ringwald 
1181*6b8177c5SMatthias Ringwald /**
1182*6b8177c5SMatthias Ringwald   \brief   Unsigned Saturate
1183*6b8177c5SMatthias Ringwald   \details Saturates an unsigned value.
1184*6b8177c5SMatthias Ringwald   \param [in]  value  Value to be saturated
1185*6b8177c5SMatthias Ringwald   \param [in]    sat  Bit position to saturate to (0..31)
1186*6b8177c5SMatthias Ringwald   \return             Saturated value
1187*6b8177c5SMatthias Ringwald  */
__USAT(int32_t val,uint32_t sat)1188*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1189*6b8177c5SMatthias Ringwald {
1190*6b8177c5SMatthias Ringwald   if (sat <= 31U)
1191*6b8177c5SMatthias Ringwald   {
1192*6b8177c5SMatthias Ringwald     const uint32_t max = ((1U << sat) - 1U);
1193*6b8177c5SMatthias Ringwald     if (val > (int32_t)max)
1194*6b8177c5SMatthias Ringwald     {
1195*6b8177c5SMatthias Ringwald       return max;
1196*6b8177c5SMatthias Ringwald     }
1197*6b8177c5SMatthias Ringwald     else if (val < 0)
1198*6b8177c5SMatthias Ringwald     {
1199*6b8177c5SMatthias Ringwald       return 0U;
1200*6b8177c5SMatthias Ringwald     }
1201*6b8177c5SMatthias Ringwald   }
1202*6b8177c5SMatthias Ringwald   return (uint32_t)val;
1203*6b8177c5SMatthias Ringwald }
1204*6b8177c5SMatthias Ringwald 
1205*6b8177c5SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1206*6b8177c5SMatthias Ringwald            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1207*6b8177c5SMatthias Ringwald            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
1208*6b8177c5SMatthias Ringwald 
1209*6b8177c5SMatthias Ringwald 
1210*6b8177c5SMatthias Ringwald #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1211*6b8177c5SMatthias Ringwald      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
1212*6b8177c5SMatthias Ringwald /**
1213*6b8177c5SMatthias Ringwald   \brief   Load-Acquire (8 bit)
1214*6b8177c5SMatthias Ringwald   \details Executes a LDAB instruction for 8 bit value.
1215*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to data
1216*6b8177c5SMatthias Ringwald   \return             value of type uint8_t at (*ptr)
1217*6b8177c5SMatthias Ringwald  */
__LDAB(volatile uint8_t * ptr)1218*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1219*6b8177c5SMatthias Ringwald {
1220*6b8177c5SMatthias Ringwald   uint32_t result;
1221*6b8177c5SMatthias Ringwald 
1222*6b8177c5SMatthias Ringwald   __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1223*6b8177c5SMatthias Ringwald   return ((uint8_t) result);
1224*6b8177c5SMatthias Ringwald }
1225*6b8177c5SMatthias Ringwald 
1226*6b8177c5SMatthias Ringwald 
1227*6b8177c5SMatthias Ringwald /**
1228*6b8177c5SMatthias Ringwald   \brief   Load-Acquire (16 bit)
1229*6b8177c5SMatthias Ringwald   \details Executes a LDAH instruction for 16 bit values.
1230*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to data
1231*6b8177c5SMatthias Ringwald   \return        value of type uint16_t at (*ptr)
1232*6b8177c5SMatthias Ringwald  */
__LDAH(volatile uint16_t * ptr)1233*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1234*6b8177c5SMatthias Ringwald {
1235*6b8177c5SMatthias Ringwald   uint32_t result;
1236*6b8177c5SMatthias Ringwald 
1237*6b8177c5SMatthias Ringwald   __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1238*6b8177c5SMatthias Ringwald   return ((uint16_t) result);
1239*6b8177c5SMatthias Ringwald }
1240*6b8177c5SMatthias Ringwald 
1241*6b8177c5SMatthias Ringwald 
1242*6b8177c5SMatthias Ringwald /**
1243*6b8177c5SMatthias Ringwald   \brief   Load-Acquire (32 bit)
1244*6b8177c5SMatthias Ringwald   \details Executes a LDA instruction for 32 bit values.
1245*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to data
1246*6b8177c5SMatthias Ringwald   \return        value of type uint32_t at (*ptr)
1247*6b8177c5SMatthias Ringwald  */
__LDA(volatile uint32_t * ptr)1248*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1249*6b8177c5SMatthias Ringwald {
1250*6b8177c5SMatthias Ringwald   uint32_t result;
1251*6b8177c5SMatthias Ringwald 
1252*6b8177c5SMatthias Ringwald   __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1253*6b8177c5SMatthias Ringwald   return(result);
1254*6b8177c5SMatthias Ringwald }
1255*6b8177c5SMatthias Ringwald 
1256*6b8177c5SMatthias Ringwald 
1257*6b8177c5SMatthias Ringwald /**
1258*6b8177c5SMatthias Ringwald   \brief   Store-Release (8 bit)
1259*6b8177c5SMatthias Ringwald   \details Executes a STLB instruction for 8 bit values.
1260*6b8177c5SMatthias Ringwald   \param [in]  value  Value to store
1261*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to location
1262*6b8177c5SMatthias Ringwald  */
__STLB(uint8_t value,volatile uint8_t * ptr)1263*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1264*6b8177c5SMatthias Ringwald {
1265*6b8177c5SMatthias Ringwald   __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1266*6b8177c5SMatthias Ringwald }
1267*6b8177c5SMatthias Ringwald 
1268*6b8177c5SMatthias Ringwald 
1269*6b8177c5SMatthias Ringwald /**
1270*6b8177c5SMatthias Ringwald   \brief   Store-Release (16 bit)
1271*6b8177c5SMatthias Ringwald   \details Executes a STLH instruction for 16 bit values.
1272*6b8177c5SMatthias Ringwald   \param [in]  value  Value to store
1273*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to location
1274*6b8177c5SMatthias Ringwald  */
__STLH(uint16_t value,volatile uint16_t * ptr)1275*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1276*6b8177c5SMatthias Ringwald {
1277*6b8177c5SMatthias Ringwald   __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1278*6b8177c5SMatthias Ringwald }
1279*6b8177c5SMatthias Ringwald 
1280*6b8177c5SMatthias Ringwald 
1281*6b8177c5SMatthias Ringwald /**
1282*6b8177c5SMatthias Ringwald   \brief   Store-Release (32 bit)
1283*6b8177c5SMatthias Ringwald   \details Executes a STL instruction for 32 bit values.
1284*6b8177c5SMatthias Ringwald   \param [in]  value  Value to store
1285*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to location
1286*6b8177c5SMatthias Ringwald  */
__STL(uint32_t value,volatile uint32_t * ptr)1287*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1288*6b8177c5SMatthias Ringwald {
1289*6b8177c5SMatthias Ringwald   __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1290*6b8177c5SMatthias Ringwald }
1291*6b8177c5SMatthias Ringwald 
1292*6b8177c5SMatthias Ringwald 
1293*6b8177c5SMatthias Ringwald /**
1294*6b8177c5SMatthias Ringwald   \brief   Load-Acquire Exclusive (8 bit)
1295*6b8177c5SMatthias Ringwald   \details Executes a LDAB exclusive instruction for 8 bit value.
1296*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to data
1297*6b8177c5SMatthias Ringwald   \return             value of type uint8_t at (*ptr)
1298*6b8177c5SMatthias Ringwald  */
1299*6b8177c5SMatthias Ringwald #define     __LDAEXB                 (uint8_t)__builtin_arm_ldaex
1300*6b8177c5SMatthias Ringwald 
1301*6b8177c5SMatthias Ringwald 
1302*6b8177c5SMatthias Ringwald /**
1303*6b8177c5SMatthias Ringwald   \brief   Load-Acquire Exclusive (16 bit)
1304*6b8177c5SMatthias Ringwald   \details Executes a LDAH exclusive instruction for 16 bit values.
1305*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to data
1306*6b8177c5SMatthias Ringwald   \return        value of type uint16_t at (*ptr)
1307*6b8177c5SMatthias Ringwald  */
1308*6b8177c5SMatthias Ringwald #define     __LDAEXH                 (uint16_t)__builtin_arm_ldaex
1309*6b8177c5SMatthias Ringwald 
1310*6b8177c5SMatthias Ringwald 
1311*6b8177c5SMatthias Ringwald /**
1312*6b8177c5SMatthias Ringwald   \brief   Load-Acquire Exclusive (32 bit)
1313*6b8177c5SMatthias Ringwald   \details Executes a LDA exclusive instruction for 32 bit values.
1314*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to data
1315*6b8177c5SMatthias Ringwald   \return        value of type uint32_t at (*ptr)
1316*6b8177c5SMatthias Ringwald  */
1317*6b8177c5SMatthias Ringwald #define     __LDAEX                  (uint32_t)__builtin_arm_ldaex
1318*6b8177c5SMatthias Ringwald 
1319*6b8177c5SMatthias Ringwald 
1320*6b8177c5SMatthias Ringwald /**
1321*6b8177c5SMatthias Ringwald   \brief   Store-Release Exclusive (8 bit)
1322*6b8177c5SMatthias Ringwald   \details Executes a STLB exclusive instruction for 8 bit values.
1323*6b8177c5SMatthias Ringwald   \param [in]  value  Value to store
1324*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to location
1325*6b8177c5SMatthias Ringwald   \return          0  Function succeeded
1326*6b8177c5SMatthias Ringwald   \return          1  Function failed
1327*6b8177c5SMatthias Ringwald  */
1328*6b8177c5SMatthias Ringwald #define     __STLEXB                 (uint32_t)__builtin_arm_stlex
1329*6b8177c5SMatthias Ringwald 
1330*6b8177c5SMatthias Ringwald 
1331*6b8177c5SMatthias Ringwald /**
1332*6b8177c5SMatthias Ringwald   \brief   Store-Release Exclusive (16 bit)
1333*6b8177c5SMatthias Ringwald   \details Executes a STLH exclusive instruction for 16 bit values.
1334*6b8177c5SMatthias Ringwald   \param [in]  value  Value to store
1335*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to location
1336*6b8177c5SMatthias Ringwald   \return          0  Function succeeded
1337*6b8177c5SMatthias Ringwald   \return          1  Function failed
1338*6b8177c5SMatthias Ringwald  */
1339*6b8177c5SMatthias Ringwald #define     __STLEXH                 (uint32_t)__builtin_arm_stlex
1340*6b8177c5SMatthias Ringwald 
1341*6b8177c5SMatthias Ringwald 
1342*6b8177c5SMatthias Ringwald /**
1343*6b8177c5SMatthias Ringwald   \brief   Store-Release Exclusive (32 bit)
1344*6b8177c5SMatthias Ringwald   \details Executes a STL exclusive instruction for 32 bit values.
1345*6b8177c5SMatthias Ringwald   \param [in]  value  Value to store
1346*6b8177c5SMatthias Ringwald   \param [in]    ptr  Pointer to location
1347*6b8177c5SMatthias Ringwald   \return          0  Function succeeded
1348*6b8177c5SMatthias Ringwald   \return          1  Function failed
1349*6b8177c5SMatthias Ringwald  */
1350*6b8177c5SMatthias Ringwald #define     __STLEX                  (uint32_t)__builtin_arm_stlex
1351*6b8177c5SMatthias Ringwald 
1352*6b8177c5SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1353*6b8177c5SMatthias Ringwald            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
1354*6b8177c5SMatthias Ringwald 
1355*6b8177c5SMatthias Ringwald /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
1356*6b8177c5SMatthias Ringwald 
1357*6b8177c5SMatthias Ringwald 
1358*6b8177c5SMatthias Ringwald /* ###################  Compiler specific Intrinsics  ########################### */
1359*6b8177c5SMatthias Ringwald /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1360*6b8177c5SMatthias Ringwald   Access to dedicated SIMD instructions
1361*6b8177c5SMatthias Ringwald   @{
1362*6b8177c5SMatthias Ringwald */
1363*6b8177c5SMatthias Ringwald 
1364*6b8177c5SMatthias Ringwald #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1365*6b8177c5SMatthias Ringwald 
1366*6b8177c5SMatthias Ringwald #define     __SADD8                 __builtin_arm_sadd8
1367*6b8177c5SMatthias Ringwald #define     __QADD8                 __builtin_arm_qadd8
1368*6b8177c5SMatthias Ringwald #define     __SHADD8                __builtin_arm_shadd8
1369*6b8177c5SMatthias Ringwald #define     __UADD8                 __builtin_arm_uadd8
1370*6b8177c5SMatthias Ringwald #define     __UQADD8                __builtin_arm_uqadd8
1371*6b8177c5SMatthias Ringwald #define     __UHADD8                __builtin_arm_uhadd8
1372*6b8177c5SMatthias Ringwald #define     __SSUB8                 __builtin_arm_ssub8
1373*6b8177c5SMatthias Ringwald #define     __QSUB8                 __builtin_arm_qsub8
1374*6b8177c5SMatthias Ringwald #define     __SHSUB8                __builtin_arm_shsub8
1375*6b8177c5SMatthias Ringwald #define     __USUB8                 __builtin_arm_usub8
1376*6b8177c5SMatthias Ringwald #define     __UQSUB8                __builtin_arm_uqsub8
1377*6b8177c5SMatthias Ringwald #define     __UHSUB8                __builtin_arm_uhsub8
1378*6b8177c5SMatthias Ringwald #define     __SADD16                __builtin_arm_sadd16
1379*6b8177c5SMatthias Ringwald #define     __QADD16                __builtin_arm_qadd16
1380*6b8177c5SMatthias Ringwald #define     __SHADD16               __builtin_arm_shadd16
1381*6b8177c5SMatthias Ringwald #define     __UADD16                __builtin_arm_uadd16
1382*6b8177c5SMatthias Ringwald #define     __UQADD16               __builtin_arm_uqadd16
1383*6b8177c5SMatthias Ringwald #define     __UHADD16               __builtin_arm_uhadd16
1384*6b8177c5SMatthias Ringwald #define     __SSUB16                __builtin_arm_ssub16
1385*6b8177c5SMatthias Ringwald #define     __QSUB16                __builtin_arm_qsub16
1386*6b8177c5SMatthias Ringwald #define     __SHSUB16               __builtin_arm_shsub16
1387*6b8177c5SMatthias Ringwald #define     __USUB16                __builtin_arm_usub16
1388*6b8177c5SMatthias Ringwald #define     __UQSUB16               __builtin_arm_uqsub16
1389*6b8177c5SMatthias Ringwald #define     __UHSUB16               __builtin_arm_uhsub16
1390*6b8177c5SMatthias Ringwald #define     __SASX                  __builtin_arm_sasx
1391*6b8177c5SMatthias Ringwald #define     __QASX                  __builtin_arm_qasx
1392*6b8177c5SMatthias Ringwald #define     __SHASX                 __builtin_arm_shasx
1393*6b8177c5SMatthias Ringwald #define     __UASX                  __builtin_arm_uasx
1394*6b8177c5SMatthias Ringwald #define     __UQASX                 __builtin_arm_uqasx
1395*6b8177c5SMatthias Ringwald #define     __UHASX                 __builtin_arm_uhasx
1396*6b8177c5SMatthias Ringwald #define     __SSAX                  __builtin_arm_ssax
1397*6b8177c5SMatthias Ringwald #define     __QSAX                  __builtin_arm_qsax
1398*6b8177c5SMatthias Ringwald #define     __SHSAX                 __builtin_arm_shsax
1399*6b8177c5SMatthias Ringwald #define     __USAX                  __builtin_arm_usax
1400*6b8177c5SMatthias Ringwald #define     __UQSAX                 __builtin_arm_uqsax
1401*6b8177c5SMatthias Ringwald #define     __UHSAX                 __builtin_arm_uhsax
1402*6b8177c5SMatthias Ringwald #define     __USAD8                 __builtin_arm_usad8
1403*6b8177c5SMatthias Ringwald #define     __USADA8                __builtin_arm_usada8
1404*6b8177c5SMatthias Ringwald #define     __SSAT16                __builtin_arm_ssat16
1405*6b8177c5SMatthias Ringwald #define     __USAT16                __builtin_arm_usat16
1406*6b8177c5SMatthias Ringwald #define     __UXTB16                __builtin_arm_uxtb16
1407*6b8177c5SMatthias Ringwald #define     __UXTAB16               __builtin_arm_uxtab16
1408*6b8177c5SMatthias Ringwald #define     __SXTB16                __builtin_arm_sxtb16
1409*6b8177c5SMatthias Ringwald #define     __SXTAB16               __builtin_arm_sxtab16
1410*6b8177c5SMatthias Ringwald #define     __SMUAD                 __builtin_arm_smuad
1411*6b8177c5SMatthias Ringwald #define     __SMUADX                __builtin_arm_smuadx
1412*6b8177c5SMatthias Ringwald #define     __SMLAD                 __builtin_arm_smlad
1413*6b8177c5SMatthias Ringwald #define     __SMLADX                __builtin_arm_smladx
1414*6b8177c5SMatthias Ringwald #define     __SMLALD                __builtin_arm_smlald
1415*6b8177c5SMatthias Ringwald #define     __SMLALDX               __builtin_arm_smlaldx
1416*6b8177c5SMatthias Ringwald #define     __SMUSD                 __builtin_arm_smusd
1417*6b8177c5SMatthias Ringwald #define     __SMUSDX                __builtin_arm_smusdx
1418*6b8177c5SMatthias Ringwald #define     __SMLSD                 __builtin_arm_smlsd
1419*6b8177c5SMatthias Ringwald #define     __SMLSDX                __builtin_arm_smlsdx
1420*6b8177c5SMatthias Ringwald #define     __SMLSLD                __builtin_arm_smlsld
1421*6b8177c5SMatthias Ringwald #define     __SMLSLDX               __builtin_arm_smlsldx
1422*6b8177c5SMatthias Ringwald #define     __SEL                   __builtin_arm_sel
1423*6b8177c5SMatthias Ringwald #define     __QADD                  __builtin_arm_qadd
1424*6b8177c5SMatthias Ringwald #define     __QSUB                  __builtin_arm_qsub
1425*6b8177c5SMatthias Ringwald 
1426*6b8177c5SMatthias Ringwald #define __PKHBT(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0x0000FFFFUL) |  \
1427*6b8177c5SMatthias Ringwald                                            ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)  )
1428*6b8177c5SMatthias Ringwald 
1429*6b8177c5SMatthias Ringwald #define __PKHTB(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0xFFFF0000UL) |  \
1430*6b8177c5SMatthias Ringwald                                            ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)  )
1431*6b8177c5SMatthias Ringwald 
__SMMLA(int32_t op1,int32_t op2,int32_t op3)1432*6b8177c5SMatthias Ringwald __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1433*6b8177c5SMatthias Ringwald {
1434*6b8177c5SMatthias Ringwald   int32_t result;
1435*6b8177c5SMatthias Ringwald 
1436*6b8177c5SMatthias Ringwald   __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
1437*6b8177c5SMatthias Ringwald   return(result);
1438*6b8177c5SMatthias Ringwald }
1439*6b8177c5SMatthias Ringwald 
1440*6b8177c5SMatthias Ringwald #endif /* (__ARM_FEATURE_DSP == 1) */
1441*6b8177c5SMatthias Ringwald /*@} end of group CMSIS_SIMD_intrinsics */
1442*6b8177c5SMatthias Ringwald 
1443*6b8177c5SMatthias Ringwald 
1444*6b8177c5SMatthias Ringwald #endif /* __CMSIS_ARMCLANG_H */
1445