1*2fd737d3SMatthias Ringwald /**************************************************************************//**
2*2fd737d3SMatthias Ringwald * @file cmsis_gcc.h
3*2fd737d3SMatthias Ringwald * @brief CMSIS compiler GCC header file
4*2fd737d3SMatthias Ringwald * @version V5.0.4
5*2fd737d3SMatthias Ringwald * @date 09. April 2018
6*2fd737d3SMatthias Ringwald ******************************************************************************/
7*2fd737d3SMatthias Ringwald /*
8*2fd737d3SMatthias Ringwald * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
9*2fd737d3SMatthias Ringwald *
10*2fd737d3SMatthias Ringwald * SPDX-License-Identifier: Apache-2.0
11*2fd737d3SMatthias Ringwald *
12*2fd737d3SMatthias Ringwald * Licensed under the Apache License, Version 2.0 (the License); you may
13*2fd737d3SMatthias Ringwald * not use this file except in compliance with the License.
14*2fd737d3SMatthias Ringwald * You may obtain a copy of the License at
15*2fd737d3SMatthias Ringwald *
16*2fd737d3SMatthias Ringwald * www.apache.org/licenses/LICENSE-2.0
17*2fd737d3SMatthias Ringwald *
18*2fd737d3SMatthias Ringwald * Unless required by applicable law or agreed to in writing, software
19*2fd737d3SMatthias Ringwald * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20*2fd737d3SMatthias Ringwald * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21*2fd737d3SMatthias Ringwald * See the License for the specific language governing permissions and
22*2fd737d3SMatthias Ringwald * limitations under the License.
23*2fd737d3SMatthias Ringwald */
24*2fd737d3SMatthias Ringwald
25*2fd737d3SMatthias Ringwald #ifndef __CMSIS_GCC_H
26*2fd737d3SMatthias Ringwald #define __CMSIS_GCC_H
27*2fd737d3SMatthias Ringwald
28*2fd737d3SMatthias Ringwald /* ignore some GCC warnings */
29*2fd737d3SMatthias Ringwald #pragma GCC diagnostic push
30*2fd737d3SMatthias Ringwald #pragma GCC diagnostic ignored "-Wsign-conversion"
31*2fd737d3SMatthias Ringwald #pragma GCC diagnostic ignored "-Wconversion"
32*2fd737d3SMatthias Ringwald #pragma GCC diagnostic ignored "-Wunused-parameter"
33*2fd737d3SMatthias Ringwald
34*2fd737d3SMatthias Ringwald /* Fallback for __has_builtin */
35*2fd737d3SMatthias Ringwald #ifndef __has_builtin
36*2fd737d3SMatthias Ringwald #define __has_builtin(x) (0)
37*2fd737d3SMatthias Ringwald #endif
38*2fd737d3SMatthias Ringwald
39*2fd737d3SMatthias Ringwald /* CMSIS compiler specific defines */
40*2fd737d3SMatthias Ringwald #ifndef __ASM
41*2fd737d3SMatthias Ringwald #define __ASM __asm
42*2fd737d3SMatthias Ringwald #endif
43*2fd737d3SMatthias Ringwald #ifndef __INLINE
44*2fd737d3SMatthias Ringwald #define __INLINE inline
45*2fd737d3SMatthias Ringwald #endif
46*2fd737d3SMatthias Ringwald #ifndef __STATIC_INLINE
47*2fd737d3SMatthias Ringwald #define __STATIC_INLINE static inline
48*2fd737d3SMatthias Ringwald #endif
49*2fd737d3SMatthias Ringwald #ifndef __STATIC_FORCEINLINE
50*2fd737d3SMatthias Ringwald #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
51*2fd737d3SMatthias Ringwald #endif
52*2fd737d3SMatthias Ringwald #ifndef __NO_RETURN
53*2fd737d3SMatthias Ringwald #define __NO_RETURN __attribute__((__noreturn__))
54*2fd737d3SMatthias Ringwald #endif
55*2fd737d3SMatthias Ringwald #ifndef __USED
56*2fd737d3SMatthias Ringwald #define __USED __attribute__((used))
57*2fd737d3SMatthias Ringwald #endif
58*2fd737d3SMatthias Ringwald #ifndef __WEAK
59*2fd737d3SMatthias Ringwald #define __WEAK __attribute__((weak))
60*2fd737d3SMatthias Ringwald #endif
61*2fd737d3SMatthias Ringwald #ifndef __PACKED
62*2fd737d3SMatthias Ringwald #define __PACKED __attribute__((packed, aligned(1)))
63*2fd737d3SMatthias Ringwald #endif
64*2fd737d3SMatthias Ringwald #ifndef __PACKED_STRUCT
65*2fd737d3SMatthias Ringwald #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
66*2fd737d3SMatthias Ringwald #endif
67*2fd737d3SMatthias Ringwald #ifndef __PACKED_UNION
68*2fd737d3SMatthias Ringwald #define __PACKED_UNION union __attribute__((packed, aligned(1)))
69*2fd737d3SMatthias Ringwald #endif
70*2fd737d3SMatthias Ringwald #ifndef __UNALIGNED_UINT32 /* deprecated */
71*2fd737d3SMatthias Ringwald #pragma GCC diagnostic push
72*2fd737d3SMatthias Ringwald #pragma GCC diagnostic ignored "-Wpacked"
73*2fd737d3SMatthias Ringwald #pragma GCC diagnostic ignored "-Wattributes"
74*2fd737d3SMatthias Ringwald struct __attribute__((packed)) T_UINT32 { uint32_t v; };
75*2fd737d3SMatthias Ringwald #pragma GCC diagnostic pop
76*2fd737d3SMatthias Ringwald #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
77*2fd737d3SMatthias Ringwald #endif
78*2fd737d3SMatthias Ringwald #ifndef __UNALIGNED_UINT16_WRITE
79*2fd737d3SMatthias Ringwald #pragma GCC diagnostic push
80*2fd737d3SMatthias Ringwald #pragma GCC diagnostic ignored "-Wpacked"
81*2fd737d3SMatthias Ringwald #pragma GCC diagnostic ignored "-Wattributes"
82*2fd737d3SMatthias Ringwald __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
83*2fd737d3SMatthias Ringwald #pragma GCC diagnostic pop
84*2fd737d3SMatthias Ringwald #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
85*2fd737d3SMatthias Ringwald #endif
86*2fd737d3SMatthias Ringwald #ifndef __UNALIGNED_UINT16_READ
87*2fd737d3SMatthias Ringwald #pragma GCC diagnostic push
88*2fd737d3SMatthias Ringwald #pragma GCC diagnostic ignored "-Wpacked"
89*2fd737d3SMatthias Ringwald #pragma GCC diagnostic ignored "-Wattributes"
90*2fd737d3SMatthias Ringwald __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
91*2fd737d3SMatthias Ringwald #pragma GCC diagnostic pop
92*2fd737d3SMatthias Ringwald #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
93*2fd737d3SMatthias Ringwald #endif
94*2fd737d3SMatthias Ringwald #ifndef __UNALIGNED_UINT32_WRITE
95*2fd737d3SMatthias Ringwald #pragma GCC diagnostic push
96*2fd737d3SMatthias Ringwald #pragma GCC diagnostic ignored "-Wpacked"
97*2fd737d3SMatthias Ringwald #pragma GCC diagnostic ignored "-Wattributes"
98*2fd737d3SMatthias Ringwald __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
99*2fd737d3SMatthias Ringwald #pragma GCC diagnostic pop
100*2fd737d3SMatthias Ringwald #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
101*2fd737d3SMatthias Ringwald #endif
102*2fd737d3SMatthias Ringwald #ifndef __UNALIGNED_UINT32_READ
103*2fd737d3SMatthias Ringwald #pragma GCC diagnostic push
104*2fd737d3SMatthias Ringwald #pragma GCC diagnostic ignored "-Wpacked"
105*2fd737d3SMatthias Ringwald #pragma GCC diagnostic ignored "-Wattributes"
106*2fd737d3SMatthias Ringwald __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
107*2fd737d3SMatthias Ringwald #pragma GCC diagnostic pop
108*2fd737d3SMatthias Ringwald #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
109*2fd737d3SMatthias Ringwald #endif
110*2fd737d3SMatthias Ringwald #ifndef __ALIGNED
111*2fd737d3SMatthias Ringwald #define __ALIGNED(x) __attribute__((aligned(x)))
112*2fd737d3SMatthias Ringwald #endif
113*2fd737d3SMatthias Ringwald #ifndef __RESTRICT
114*2fd737d3SMatthias Ringwald #define __RESTRICT __restrict
115*2fd737d3SMatthias Ringwald #endif
116*2fd737d3SMatthias Ringwald
117*2fd737d3SMatthias Ringwald
118*2fd737d3SMatthias Ringwald /* ########################### Core Function Access ########################### */
119*2fd737d3SMatthias Ringwald /** \ingroup CMSIS_Core_FunctionInterface
120*2fd737d3SMatthias Ringwald \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
121*2fd737d3SMatthias Ringwald @{
122*2fd737d3SMatthias Ringwald */
123*2fd737d3SMatthias Ringwald
124*2fd737d3SMatthias Ringwald /**
125*2fd737d3SMatthias Ringwald \brief Enable IRQ Interrupts
126*2fd737d3SMatthias Ringwald \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
127*2fd737d3SMatthias Ringwald Can only be executed in Privileged modes.
128*2fd737d3SMatthias Ringwald */
__enable_irq(void)129*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __enable_irq(void)
130*2fd737d3SMatthias Ringwald {
131*2fd737d3SMatthias Ringwald __ASM volatile ("cpsie i" : : : "memory");
132*2fd737d3SMatthias Ringwald }
133*2fd737d3SMatthias Ringwald
134*2fd737d3SMatthias Ringwald
135*2fd737d3SMatthias Ringwald /**
136*2fd737d3SMatthias Ringwald \brief Disable IRQ Interrupts
137*2fd737d3SMatthias Ringwald \details Disables IRQ interrupts by setting the I-bit in the CPSR.
138*2fd737d3SMatthias Ringwald Can only be executed in Privileged modes.
139*2fd737d3SMatthias Ringwald */
__disable_irq(void)140*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __disable_irq(void)
141*2fd737d3SMatthias Ringwald {
142*2fd737d3SMatthias Ringwald __ASM volatile ("cpsid i" : : : "memory");
143*2fd737d3SMatthias Ringwald }
144*2fd737d3SMatthias Ringwald
145*2fd737d3SMatthias Ringwald
146*2fd737d3SMatthias Ringwald /**
147*2fd737d3SMatthias Ringwald \brief Get Control Register
148*2fd737d3SMatthias Ringwald \details Returns the content of the Control Register.
149*2fd737d3SMatthias Ringwald \return Control Register value
150*2fd737d3SMatthias Ringwald */
__get_CONTROL(void)151*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
152*2fd737d3SMatthias Ringwald {
153*2fd737d3SMatthias Ringwald uint32_t result;
154*2fd737d3SMatthias Ringwald
155*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, control" : "=r" (result) );
156*2fd737d3SMatthias Ringwald return(result);
157*2fd737d3SMatthias Ringwald }
158*2fd737d3SMatthias Ringwald
159*2fd737d3SMatthias Ringwald
160*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
161*2fd737d3SMatthias Ringwald /**
162*2fd737d3SMatthias Ringwald \brief Get Control Register (non-secure)
163*2fd737d3SMatthias Ringwald \details Returns the content of the non-secure Control Register when in secure mode.
164*2fd737d3SMatthias Ringwald \return non-secure Control Register value
165*2fd737d3SMatthias Ringwald */
__TZ_get_CONTROL_NS(void)166*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
167*2fd737d3SMatthias Ringwald {
168*2fd737d3SMatthias Ringwald uint32_t result;
169*2fd737d3SMatthias Ringwald
170*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
171*2fd737d3SMatthias Ringwald return(result);
172*2fd737d3SMatthias Ringwald }
173*2fd737d3SMatthias Ringwald #endif
174*2fd737d3SMatthias Ringwald
175*2fd737d3SMatthias Ringwald
176*2fd737d3SMatthias Ringwald /**
177*2fd737d3SMatthias Ringwald \brief Set Control Register
178*2fd737d3SMatthias Ringwald \details Writes the given value to the Control Register.
179*2fd737d3SMatthias Ringwald \param [in] control Control Register value to set
180*2fd737d3SMatthias Ringwald */
__set_CONTROL(uint32_t control)181*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
182*2fd737d3SMatthias Ringwald {
183*2fd737d3SMatthias Ringwald __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
184*2fd737d3SMatthias Ringwald }
185*2fd737d3SMatthias Ringwald
186*2fd737d3SMatthias Ringwald
187*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
188*2fd737d3SMatthias Ringwald /**
189*2fd737d3SMatthias Ringwald \brief Set Control Register (non-secure)
190*2fd737d3SMatthias Ringwald \details Writes the given value to the non-secure Control Register when in secure state.
191*2fd737d3SMatthias Ringwald \param [in] control Control Register value to set
192*2fd737d3SMatthias Ringwald */
__TZ_set_CONTROL_NS(uint32_t control)193*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
194*2fd737d3SMatthias Ringwald {
195*2fd737d3SMatthias Ringwald __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
196*2fd737d3SMatthias Ringwald }
197*2fd737d3SMatthias Ringwald #endif
198*2fd737d3SMatthias Ringwald
199*2fd737d3SMatthias Ringwald
200*2fd737d3SMatthias Ringwald /**
201*2fd737d3SMatthias Ringwald \brief Get IPSR Register
202*2fd737d3SMatthias Ringwald \details Returns the content of the IPSR Register.
203*2fd737d3SMatthias Ringwald \return IPSR Register value
204*2fd737d3SMatthias Ringwald */
__get_IPSR(void)205*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
206*2fd737d3SMatthias Ringwald {
207*2fd737d3SMatthias Ringwald uint32_t result;
208*2fd737d3SMatthias Ringwald
209*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
210*2fd737d3SMatthias Ringwald return(result);
211*2fd737d3SMatthias Ringwald }
212*2fd737d3SMatthias Ringwald
213*2fd737d3SMatthias Ringwald
214*2fd737d3SMatthias Ringwald /**
215*2fd737d3SMatthias Ringwald \brief Get APSR Register
216*2fd737d3SMatthias Ringwald \details Returns the content of the APSR Register.
217*2fd737d3SMatthias Ringwald \return APSR Register value
218*2fd737d3SMatthias Ringwald */
__get_APSR(void)219*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_APSR(void)
220*2fd737d3SMatthias Ringwald {
221*2fd737d3SMatthias Ringwald uint32_t result;
222*2fd737d3SMatthias Ringwald
223*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, apsr" : "=r" (result) );
224*2fd737d3SMatthias Ringwald return(result);
225*2fd737d3SMatthias Ringwald }
226*2fd737d3SMatthias Ringwald
227*2fd737d3SMatthias Ringwald
228*2fd737d3SMatthias Ringwald /**
229*2fd737d3SMatthias Ringwald \brief Get xPSR Register
230*2fd737d3SMatthias Ringwald \details Returns the content of the xPSR Register.
231*2fd737d3SMatthias Ringwald \return xPSR Register value
232*2fd737d3SMatthias Ringwald */
__get_xPSR(void)233*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
234*2fd737d3SMatthias Ringwald {
235*2fd737d3SMatthias Ringwald uint32_t result;
236*2fd737d3SMatthias Ringwald
237*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
238*2fd737d3SMatthias Ringwald return(result);
239*2fd737d3SMatthias Ringwald }
240*2fd737d3SMatthias Ringwald
241*2fd737d3SMatthias Ringwald
242*2fd737d3SMatthias Ringwald /**
243*2fd737d3SMatthias Ringwald \brief Get Process Stack Pointer
244*2fd737d3SMatthias Ringwald \details Returns the current value of the Process Stack Pointer (PSP).
245*2fd737d3SMatthias Ringwald \return PSP Register value
246*2fd737d3SMatthias Ringwald */
__get_PSP(void)247*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PSP(void)
248*2fd737d3SMatthias Ringwald {
249*2fd737d3SMatthias Ringwald uint32_t result;
250*2fd737d3SMatthias Ringwald
251*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, psp" : "=r" (result) );
252*2fd737d3SMatthias Ringwald return(result);
253*2fd737d3SMatthias Ringwald }
254*2fd737d3SMatthias Ringwald
255*2fd737d3SMatthias Ringwald
256*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
257*2fd737d3SMatthias Ringwald /**
258*2fd737d3SMatthias Ringwald \brief Get Process Stack Pointer (non-secure)
259*2fd737d3SMatthias Ringwald \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
260*2fd737d3SMatthias Ringwald \return PSP Register value
261*2fd737d3SMatthias Ringwald */
__TZ_get_PSP_NS(void)262*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
263*2fd737d3SMatthias Ringwald {
264*2fd737d3SMatthias Ringwald uint32_t result;
265*2fd737d3SMatthias Ringwald
266*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
267*2fd737d3SMatthias Ringwald return(result);
268*2fd737d3SMatthias Ringwald }
269*2fd737d3SMatthias Ringwald #endif
270*2fd737d3SMatthias Ringwald
271*2fd737d3SMatthias Ringwald
272*2fd737d3SMatthias Ringwald /**
273*2fd737d3SMatthias Ringwald \brief Set Process Stack Pointer
274*2fd737d3SMatthias Ringwald \details Assigns the given value to the Process Stack Pointer (PSP).
275*2fd737d3SMatthias Ringwald \param [in] topOfProcStack Process Stack Pointer value to set
276*2fd737d3SMatthias Ringwald */
__set_PSP(uint32_t topOfProcStack)277*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
278*2fd737d3SMatthias Ringwald {
279*2fd737d3SMatthias Ringwald __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
280*2fd737d3SMatthias Ringwald }
281*2fd737d3SMatthias Ringwald
282*2fd737d3SMatthias Ringwald
283*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
284*2fd737d3SMatthias Ringwald /**
285*2fd737d3SMatthias Ringwald \brief Set Process Stack Pointer (non-secure)
286*2fd737d3SMatthias Ringwald \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
287*2fd737d3SMatthias Ringwald \param [in] topOfProcStack Process Stack Pointer value to set
288*2fd737d3SMatthias Ringwald */
__TZ_set_PSP_NS(uint32_t topOfProcStack)289*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
290*2fd737d3SMatthias Ringwald {
291*2fd737d3SMatthias Ringwald __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
292*2fd737d3SMatthias Ringwald }
293*2fd737d3SMatthias Ringwald #endif
294*2fd737d3SMatthias Ringwald
295*2fd737d3SMatthias Ringwald
296*2fd737d3SMatthias Ringwald /**
297*2fd737d3SMatthias Ringwald \brief Get Main Stack Pointer
298*2fd737d3SMatthias Ringwald \details Returns the current value of the Main Stack Pointer (MSP).
299*2fd737d3SMatthias Ringwald \return MSP Register value
300*2fd737d3SMatthias Ringwald */
__get_MSP(void)301*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_MSP(void)
302*2fd737d3SMatthias Ringwald {
303*2fd737d3SMatthias Ringwald uint32_t result;
304*2fd737d3SMatthias Ringwald
305*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, msp" : "=r" (result) );
306*2fd737d3SMatthias Ringwald return(result);
307*2fd737d3SMatthias Ringwald }
308*2fd737d3SMatthias Ringwald
309*2fd737d3SMatthias Ringwald
310*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
311*2fd737d3SMatthias Ringwald /**
312*2fd737d3SMatthias Ringwald \brief Get Main Stack Pointer (non-secure)
313*2fd737d3SMatthias Ringwald \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
314*2fd737d3SMatthias Ringwald \return MSP Register value
315*2fd737d3SMatthias Ringwald */
__TZ_get_MSP_NS(void)316*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
317*2fd737d3SMatthias Ringwald {
318*2fd737d3SMatthias Ringwald uint32_t result;
319*2fd737d3SMatthias Ringwald
320*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
321*2fd737d3SMatthias Ringwald return(result);
322*2fd737d3SMatthias Ringwald }
323*2fd737d3SMatthias Ringwald #endif
324*2fd737d3SMatthias Ringwald
325*2fd737d3SMatthias Ringwald
326*2fd737d3SMatthias Ringwald /**
327*2fd737d3SMatthias Ringwald \brief Set Main Stack Pointer
328*2fd737d3SMatthias Ringwald \details Assigns the given value to the Main Stack Pointer (MSP).
329*2fd737d3SMatthias Ringwald \param [in] topOfMainStack Main Stack Pointer value to set
330*2fd737d3SMatthias Ringwald */
__set_MSP(uint32_t topOfMainStack)331*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
332*2fd737d3SMatthias Ringwald {
333*2fd737d3SMatthias Ringwald __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
334*2fd737d3SMatthias Ringwald }
335*2fd737d3SMatthias Ringwald
336*2fd737d3SMatthias Ringwald
337*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
338*2fd737d3SMatthias Ringwald /**
339*2fd737d3SMatthias Ringwald \brief Set Main Stack Pointer (non-secure)
340*2fd737d3SMatthias Ringwald \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
341*2fd737d3SMatthias Ringwald \param [in] topOfMainStack Main Stack Pointer value to set
342*2fd737d3SMatthias Ringwald */
__TZ_set_MSP_NS(uint32_t topOfMainStack)343*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
344*2fd737d3SMatthias Ringwald {
345*2fd737d3SMatthias Ringwald __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
346*2fd737d3SMatthias Ringwald }
347*2fd737d3SMatthias Ringwald #endif
348*2fd737d3SMatthias Ringwald
349*2fd737d3SMatthias Ringwald
350*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
351*2fd737d3SMatthias Ringwald /**
352*2fd737d3SMatthias Ringwald \brief Get Stack Pointer (non-secure)
353*2fd737d3SMatthias Ringwald \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
354*2fd737d3SMatthias Ringwald \return SP Register value
355*2fd737d3SMatthias Ringwald */
__TZ_get_SP_NS(void)356*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
357*2fd737d3SMatthias Ringwald {
358*2fd737d3SMatthias Ringwald uint32_t result;
359*2fd737d3SMatthias Ringwald
360*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
361*2fd737d3SMatthias Ringwald return(result);
362*2fd737d3SMatthias Ringwald }
363*2fd737d3SMatthias Ringwald
364*2fd737d3SMatthias Ringwald
365*2fd737d3SMatthias Ringwald /**
366*2fd737d3SMatthias Ringwald \brief Set Stack Pointer (non-secure)
367*2fd737d3SMatthias Ringwald \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
368*2fd737d3SMatthias Ringwald \param [in] topOfStack Stack Pointer value to set
369*2fd737d3SMatthias Ringwald */
__TZ_set_SP_NS(uint32_t topOfStack)370*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
371*2fd737d3SMatthias Ringwald {
372*2fd737d3SMatthias Ringwald __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
373*2fd737d3SMatthias Ringwald }
374*2fd737d3SMatthias Ringwald #endif
375*2fd737d3SMatthias Ringwald
376*2fd737d3SMatthias Ringwald
377*2fd737d3SMatthias Ringwald /**
378*2fd737d3SMatthias Ringwald \brief Get Priority Mask
379*2fd737d3SMatthias Ringwald \details Returns the current state of the priority mask bit from the Priority Mask Register.
380*2fd737d3SMatthias Ringwald \return Priority Mask value
381*2fd737d3SMatthias Ringwald */
__get_PRIMASK(void)382*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
383*2fd737d3SMatthias Ringwald {
384*2fd737d3SMatthias Ringwald uint32_t result;
385*2fd737d3SMatthias Ringwald
386*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, primask" : "=r" (result) :: "memory");
387*2fd737d3SMatthias Ringwald return(result);
388*2fd737d3SMatthias Ringwald }
389*2fd737d3SMatthias Ringwald
390*2fd737d3SMatthias Ringwald
391*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
392*2fd737d3SMatthias Ringwald /**
393*2fd737d3SMatthias Ringwald \brief Get Priority Mask (non-secure)
394*2fd737d3SMatthias Ringwald \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
395*2fd737d3SMatthias Ringwald \return Priority Mask value
396*2fd737d3SMatthias Ringwald */
__TZ_get_PRIMASK_NS(void)397*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
398*2fd737d3SMatthias Ringwald {
399*2fd737d3SMatthias Ringwald uint32_t result;
400*2fd737d3SMatthias Ringwald
401*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, primask_ns" : "=r" (result) :: "memory");
402*2fd737d3SMatthias Ringwald return(result);
403*2fd737d3SMatthias Ringwald }
404*2fd737d3SMatthias Ringwald #endif
405*2fd737d3SMatthias Ringwald
406*2fd737d3SMatthias Ringwald
407*2fd737d3SMatthias Ringwald /**
408*2fd737d3SMatthias Ringwald \brief Set Priority Mask
409*2fd737d3SMatthias Ringwald \details Assigns the given value to the Priority Mask Register.
410*2fd737d3SMatthias Ringwald \param [in] priMask Priority Mask
411*2fd737d3SMatthias Ringwald */
__set_PRIMASK(uint32_t priMask)412*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
413*2fd737d3SMatthias Ringwald {
414*2fd737d3SMatthias Ringwald __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
415*2fd737d3SMatthias Ringwald }
416*2fd737d3SMatthias Ringwald
417*2fd737d3SMatthias Ringwald
418*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
419*2fd737d3SMatthias Ringwald /**
420*2fd737d3SMatthias Ringwald \brief Set Priority Mask (non-secure)
421*2fd737d3SMatthias Ringwald \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
422*2fd737d3SMatthias Ringwald \param [in] priMask Priority Mask
423*2fd737d3SMatthias Ringwald */
__TZ_set_PRIMASK_NS(uint32_t priMask)424*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
425*2fd737d3SMatthias Ringwald {
426*2fd737d3SMatthias Ringwald __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
427*2fd737d3SMatthias Ringwald }
428*2fd737d3SMatthias Ringwald #endif
429*2fd737d3SMatthias Ringwald
430*2fd737d3SMatthias Ringwald
431*2fd737d3SMatthias Ringwald #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
432*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
433*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
434*2fd737d3SMatthias Ringwald /**
435*2fd737d3SMatthias Ringwald \brief Enable FIQ
436*2fd737d3SMatthias Ringwald \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
437*2fd737d3SMatthias Ringwald Can only be executed in Privileged modes.
438*2fd737d3SMatthias Ringwald */
__enable_fault_irq(void)439*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __enable_fault_irq(void)
440*2fd737d3SMatthias Ringwald {
441*2fd737d3SMatthias Ringwald __ASM volatile ("cpsie f" : : : "memory");
442*2fd737d3SMatthias Ringwald }
443*2fd737d3SMatthias Ringwald
444*2fd737d3SMatthias Ringwald
445*2fd737d3SMatthias Ringwald /**
446*2fd737d3SMatthias Ringwald \brief Disable FIQ
447*2fd737d3SMatthias Ringwald \details Disables FIQ interrupts by setting the F-bit in the CPSR.
448*2fd737d3SMatthias Ringwald Can only be executed in Privileged modes.
449*2fd737d3SMatthias Ringwald */
__disable_fault_irq(void)450*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __disable_fault_irq(void)
451*2fd737d3SMatthias Ringwald {
452*2fd737d3SMatthias Ringwald __ASM volatile ("cpsid f" : : : "memory");
453*2fd737d3SMatthias Ringwald }
454*2fd737d3SMatthias Ringwald
455*2fd737d3SMatthias Ringwald
456*2fd737d3SMatthias Ringwald /**
457*2fd737d3SMatthias Ringwald \brief Get Base Priority
458*2fd737d3SMatthias Ringwald \details Returns the current value of the Base Priority register.
459*2fd737d3SMatthias Ringwald \return Base Priority register value
460*2fd737d3SMatthias Ringwald */
__get_BASEPRI(void)461*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
462*2fd737d3SMatthias Ringwald {
463*2fd737d3SMatthias Ringwald uint32_t result;
464*2fd737d3SMatthias Ringwald
465*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, basepri" : "=r" (result) );
466*2fd737d3SMatthias Ringwald return(result);
467*2fd737d3SMatthias Ringwald }
468*2fd737d3SMatthias Ringwald
469*2fd737d3SMatthias Ringwald
470*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
471*2fd737d3SMatthias Ringwald /**
472*2fd737d3SMatthias Ringwald \brief Get Base Priority (non-secure)
473*2fd737d3SMatthias Ringwald \details Returns the current value of the non-secure Base Priority register when in secure state.
474*2fd737d3SMatthias Ringwald \return Base Priority register value
475*2fd737d3SMatthias Ringwald */
__TZ_get_BASEPRI_NS(void)476*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
477*2fd737d3SMatthias Ringwald {
478*2fd737d3SMatthias Ringwald uint32_t result;
479*2fd737d3SMatthias Ringwald
480*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
481*2fd737d3SMatthias Ringwald return(result);
482*2fd737d3SMatthias Ringwald }
483*2fd737d3SMatthias Ringwald #endif
484*2fd737d3SMatthias Ringwald
485*2fd737d3SMatthias Ringwald
486*2fd737d3SMatthias Ringwald /**
487*2fd737d3SMatthias Ringwald \brief Set Base Priority
488*2fd737d3SMatthias Ringwald \details Assigns the given value to the Base Priority register.
489*2fd737d3SMatthias Ringwald \param [in] basePri Base Priority value to set
490*2fd737d3SMatthias Ringwald */
__set_BASEPRI(uint32_t basePri)491*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
492*2fd737d3SMatthias Ringwald {
493*2fd737d3SMatthias Ringwald __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
494*2fd737d3SMatthias Ringwald }
495*2fd737d3SMatthias Ringwald
496*2fd737d3SMatthias Ringwald
497*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
498*2fd737d3SMatthias Ringwald /**
499*2fd737d3SMatthias Ringwald \brief Set Base Priority (non-secure)
500*2fd737d3SMatthias Ringwald \details Assigns the given value to the non-secure Base Priority register when in secure state.
501*2fd737d3SMatthias Ringwald \param [in] basePri Base Priority value to set
502*2fd737d3SMatthias Ringwald */
__TZ_set_BASEPRI_NS(uint32_t basePri)503*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
504*2fd737d3SMatthias Ringwald {
505*2fd737d3SMatthias Ringwald __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
506*2fd737d3SMatthias Ringwald }
507*2fd737d3SMatthias Ringwald #endif
508*2fd737d3SMatthias Ringwald
509*2fd737d3SMatthias Ringwald
510*2fd737d3SMatthias Ringwald /**
511*2fd737d3SMatthias Ringwald \brief Set Base Priority with condition
512*2fd737d3SMatthias Ringwald \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
513*2fd737d3SMatthias Ringwald or the new value increases the BASEPRI priority level.
514*2fd737d3SMatthias Ringwald \param [in] basePri Base Priority value to set
515*2fd737d3SMatthias Ringwald */
__set_BASEPRI_MAX(uint32_t basePri)516*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
517*2fd737d3SMatthias Ringwald {
518*2fd737d3SMatthias Ringwald __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
519*2fd737d3SMatthias Ringwald }
520*2fd737d3SMatthias Ringwald
521*2fd737d3SMatthias Ringwald
522*2fd737d3SMatthias Ringwald /**
523*2fd737d3SMatthias Ringwald \brief Get Fault Mask
524*2fd737d3SMatthias Ringwald \details Returns the current value of the Fault Mask register.
525*2fd737d3SMatthias Ringwald \return Fault Mask register value
526*2fd737d3SMatthias Ringwald */
__get_FAULTMASK(void)527*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
528*2fd737d3SMatthias Ringwald {
529*2fd737d3SMatthias Ringwald uint32_t result;
530*2fd737d3SMatthias Ringwald
531*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
532*2fd737d3SMatthias Ringwald return(result);
533*2fd737d3SMatthias Ringwald }
534*2fd737d3SMatthias Ringwald
535*2fd737d3SMatthias Ringwald
536*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
537*2fd737d3SMatthias Ringwald /**
538*2fd737d3SMatthias Ringwald \brief Get Fault Mask (non-secure)
539*2fd737d3SMatthias Ringwald \details Returns the current value of the non-secure Fault Mask register when in secure state.
540*2fd737d3SMatthias Ringwald \return Fault Mask register value
541*2fd737d3SMatthias Ringwald */
__TZ_get_FAULTMASK_NS(void)542*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
543*2fd737d3SMatthias Ringwald {
544*2fd737d3SMatthias Ringwald uint32_t result;
545*2fd737d3SMatthias Ringwald
546*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
547*2fd737d3SMatthias Ringwald return(result);
548*2fd737d3SMatthias Ringwald }
549*2fd737d3SMatthias Ringwald #endif
550*2fd737d3SMatthias Ringwald
551*2fd737d3SMatthias Ringwald
552*2fd737d3SMatthias Ringwald /**
553*2fd737d3SMatthias Ringwald \brief Set Fault Mask
554*2fd737d3SMatthias Ringwald \details Assigns the given value to the Fault Mask register.
555*2fd737d3SMatthias Ringwald \param [in] faultMask Fault Mask value to set
556*2fd737d3SMatthias Ringwald */
__set_FAULTMASK(uint32_t faultMask)557*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
558*2fd737d3SMatthias Ringwald {
559*2fd737d3SMatthias Ringwald __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
560*2fd737d3SMatthias Ringwald }
561*2fd737d3SMatthias Ringwald
562*2fd737d3SMatthias Ringwald
563*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
564*2fd737d3SMatthias Ringwald /**
565*2fd737d3SMatthias Ringwald \brief Set Fault Mask (non-secure)
566*2fd737d3SMatthias Ringwald \details Assigns the given value to the non-secure Fault Mask register when in secure state.
567*2fd737d3SMatthias Ringwald \param [in] faultMask Fault Mask value to set
568*2fd737d3SMatthias Ringwald */
__TZ_set_FAULTMASK_NS(uint32_t faultMask)569*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
570*2fd737d3SMatthias Ringwald {
571*2fd737d3SMatthias Ringwald __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
572*2fd737d3SMatthias Ringwald }
573*2fd737d3SMatthias Ringwald #endif
574*2fd737d3SMatthias Ringwald
575*2fd737d3SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
576*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
577*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
578*2fd737d3SMatthias Ringwald
579*2fd737d3SMatthias Ringwald
580*2fd737d3SMatthias Ringwald #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
581*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
582*2fd737d3SMatthias Ringwald
583*2fd737d3SMatthias Ringwald /**
584*2fd737d3SMatthias Ringwald \brief Get Process Stack Pointer Limit
585*2fd737d3SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
586*2fd737d3SMatthias Ringwald Stack Pointer Limit register hence zero is returned always in non-secure
587*2fd737d3SMatthias Ringwald mode.
588*2fd737d3SMatthias Ringwald
589*2fd737d3SMatthias Ringwald \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
590*2fd737d3SMatthias Ringwald \return PSPLIM Register value
591*2fd737d3SMatthias Ringwald */
__get_PSPLIM(void)592*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
593*2fd737d3SMatthias Ringwald {
594*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
595*2fd737d3SMatthias Ringwald (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
596*2fd737d3SMatthias Ringwald // without main extensions, the non-secure PSPLIM is RAZ/WI
597*2fd737d3SMatthias Ringwald return 0U;
598*2fd737d3SMatthias Ringwald #else
599*2fd737d3SMatthias Ringwald uint32_t result;
600*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, psplim" : "=r" (result) );
601*2fd737d3SMatthias Ringwald return result;
602*2fd737d3SMatthias Ringwald #endif
603*2fd737d3SMatthias Ringwald }
604*2fd737d3SMatthias Ringwald
605*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
606*2fd737d3SMatthias Ringwald /**
607*2fd737d3SMatthias Ringwald \brief Get Process Stack Pointer Limit (non-secure)
608*2fd737d3SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
609*2fd737d3SMatthias Ringwald Stack Pointer Limit register hence zero is returned always.
610*2fd737d3SMatthias Ringwald
611*2fd737d3SMatthias Ringwald \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
612*2fd737d3SMatthias Ringwald \return PSPLIM Register value
613*2fd737d3SMatthias Ringwald */
__TZ_get_PSPLIM_NS(void)614*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
615*2fd737d3SMatthias Ringwald {
616*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
617*2fd737d3SMatthias Ringwald // without main extensions, the non-secure PSPLIM is RAZ/WI
618*2fd737d3SMatthias Ringwald return 0U;
619*2fd737d3SMatthias Ringwald #else
620*2fd737d3SMatthias Ringwald uint32_t result;
621*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
622*2fd737d3SMatthias Ringwald return result;
623*2fd737d3SMatthias Ringwald #endif
624*2fd737d3SMatthias Ringwald }
625*2fd737d3SMatthias Ringwald #endif
626*2fd737d3SMatthias Ringwald
627*2fd737d3SMatthias Ringwald
628*2fd737d3SMatthias Ringwald /**
629*2fd737d3SMatthias Ringwald \brief Set Process Stack Pointer Limit
630*2fd737d3SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
631*2fd737d3SMatthias Ringwald Stack Pointer Limit register hence the write is silently ignored in non-secure
632*2fd737d3SMatthias Ringwald mode.
633*2fd737d3SMatthias Ringwald
634*2fd737d3SMatthias Ringwald \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
635*2fd737d3SMatthias Ringwald \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
636*2fd737d3SMatthias Ringwald */
__set_PSPLIM(uint32_t ProcStackPtrLimit)637*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
638*2fd737d3SMatthias Ringwald {
639*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
640*2fd737d3SMatthias Ringwald (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
641*2fd737d3SMatthias Ringwald // without main extensions, the non-secure PSPLIM is RAZ/WI
642*2fd737d3SMatthias Ringwald (void)ProcStackPtrLimit;
643*2fd737d3SMatthias Ringwald #else
644*2fd737d3SMatthias Ringwald __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
645*2fd737d3SMatthias Ringwald #endif
646*2fd737d3SMatthias Ringwald }
647*2fd737d3SMatthias Ringwald
648*2fd737d3SMatthias Ringwald
649*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
650*2fd737d3SMatthias Ringwald /**
651*2fd737d3SMatthias Ringwald \brief Set Process Stack Pointer (non-secure)
652*2fd737d3SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
653*2fd737d3SMatthias Ringwald Stack Pointer Limit register hence the write is silently ignored.
654*2fd737d3SMatthias Ringwald
655*2fd737d3SMatthias Ringwald \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
656*2fd737d3SMatthias Ringwald \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
657*2fd737d3SMatthias Ringwald */
__TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)658*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
659*2fd737d3SMatthias Ringwald {
660*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
661*2fd737d3SMatthias Ringwald // without main extensions, the non-secure PSPLIM is RAZ/WI
662*2fd737d3SMatthias Ringwald (void)ProcStackPtrLimit;
663*2fd737d3SMatthias Ringwald #else
664*2fd737d3SMatthias Ringwald __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
665*2fd737d3SMatthias Ringwald #endif
666*2fd737d3SMatthias Ringwald }
667*2fd737d3SMatthias Ringwald #endif
668*2fd737d3SMatthias Ringwald
669*2fd737d3SMatthias Ringwald
670*2fd737d3SMatthias Ringwald /**
671*2fd737d3SMatthias Ringwald \brief Get Main Stack Pointer Limit
672*2fd737d3SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
673*2fd737d3SMatthias Ringwald Stack Pointer Limit register hence zero is returned always in non-secure
674*2fd737d3SMatthias Ringwald mode.
675*2fd737d3SMatthias Ringwald
676*2fd737d3SMatthias Ringwald \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
677*2fd737d3SMatthias Ringwald \return MSPLIM Register value
678*2fd737d3SMatthias Ringwald */
__get_MSPLIM(void)679*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
680*2fd737d3SMatthias Ringwald {
681*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
682*2fd737d3SMatthias Ringwald (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
683*2fd737d3SMatthias Ringwald // without main extensions, the non-secure MSPLIM is RAZ/WI
684*2fd737d3SMatthias Ringwald return 0U;
685*2fd737d3SMatthias Ringwald #else
686*2fd737d3SMatthias Ringwald uint32_t result;
687*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, msplim" : "=r" (result) );
688*2fd737d3SMatthias Ringwald return result;
689*2fd737d3SMatthias Ringwald #endif
690*2fd737d3SMatthias Ringwald }
691*2fd737d3SMatthias Ringwald
692*2fd737d3SMatthias Ringwald
693*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
694*2fd737d3SMatthias Ringwald /**
695*2fd737d3SMatthias Ringwald \brief Get Main Stack Pointer Limit (non-secure)
696*2fd737d3SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
697*2fd737d3SMatthias Ringwald Stack Pointer Limit register hence zero is returned always.
698*2fd737d3SMatthias Ringwald
699*2fd737d3SMatthias Ringwald \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
700*2fd737d3SMatthias Ringwald \return MSPLIM Register value
701*2fd737d3SMatthias Ringwald */
__TZ_get_MSPLIM_NS(void)702*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
703*2fd737d3SMatthias Ringwald {
704*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
705*2fd737d3SMatthias Ringwald // without main extensions, the non-secure MSPLIM is RAZ/WI
706*2fd737d3SMatthias Ringwald return 0U;
707*2fd737d3SMatthias Ringwald #else
708*2fd737d3SMatthias Ringwald uint32_t result;
709*2fd737d3SMatthias Ringwald __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
710*2fd737d3SMatthias Ringwald return result;
711*2fd737d3SMatthias Ringwald #endif
712*2fd737d3SMatthias Ringwald }
713*2fd737d3SMatthias Ringwald #endif
714*2fd737d3SMatthias Ringwald
715*2fd737d3SMatthias Ringwald
716*2fd737d3SMatthias Ringwald /**
717*2fd737d3SMatthias Ringwald \brief Set Main Stack Pointer Limit
718*2fd737d3SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
719*2fd737d3SMatthias Ringwald Stack Pointer Limit register hence the write is silently ignored in non-secure
720*2fd737d3SMatthias Ringwald mode.
721*2fd737d3SMatthias Ringwald
722*2fd737d3SMatthias Ringwald \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
723*2fd737d3SMatthias Ringwald \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set
724*2fd737d3SMatthias Ringwald */
__set_MSPLIM(uint32_t MainStackPtrLimit)725*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
726*2fd737d3SMatthias Ringwald {
727*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
728*2fd737d3SMatthias Ringwald (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
729*2fd737d3SMatthias Ringwald // without main extensions, the non-secure MSPLIM is RAZ/WI
730*2fd737d3SMatthias Ringwald (void)MainStackPtrLimit;
731*2fd737d3SMatthias Ringwald #else
732*2fd737d3SMatthias Ringwald __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
733*2fd737d3SMatthias Ringwald #endif
734*2fd737d3SMatthias Ringwald }
735*2fd737d3SMatthias Ringwald
736*2fd737d3SMatthias Ringwald
737*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
738*2fd737d3SMatthias Ringwald /**
739*2fd737d3SMatthias Ringwald \brief Set Main Stack Pointer Limit (non-secure)
740*2fd737d3SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
741*2fd737d3SMatthias Ringwald Stack Pointer Limit register hence the write is silently ignored.
742*2fd737d3SMatthias Ringwald
743*2fd737d3SMatthias Ringwald \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
744*2fd737d3SMatthias Ringwald \param [in] MainStackPtrLimit Main Stack Pointer value to set
745*2fd737d3SMatthias Ringwald */
__TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)746*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
747*2fd737d3SMatthias Ringwald {
748*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
749*2fd737d3SMatthias Ringwald // without main extensions, the non-secure MSPLIM is RAZ/WI
750*2fd737d3SMatthias Ringwald (void)MainStackPtrLimit;
751*2fd737d3SMatthias Ringwald #else
752*2fd737d3SMatthias Ringwald __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
753*2fd737d3SMatthias Ringwald #endif
754*2fd737d3SMatthias Ringwald }
755*2fd737d3SMatthias Ringwald #endif
756*2fd737d3SMatthias Ringwald
757*2fd737d3SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
758*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
759*2fd737d3SMatthias Ringwald
760*2fd737d3SMatthias Ringwald
761*2fd737d3SMatthias Ringwald /**
762*2fd737d3SMatthias Ringwald \brief Get FPSCR
763*2fd737d3SMatthias Ringwald \details Returns the current value of the Floating Point Status/Control register.
764*2fd737d3SMatthias Ringwald \return Floating Point Status/Control register value
765*2fd737d3SMatthias Ringwald */
__get_FPSCR(void)766*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
767*2fd737d3SMatthias Ringwald {
768*2fd737d3SMatthias Ringwald #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
769*2fd737d3SMatthias Ringwald (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
770*2fd737d3SMatthias Ringwald #if __has_builtin(__builtin_arm_get_fpscr)
771*2fd737d3SMatthias Ringwald // Re-enable using built-in when GCC has been fixed
772*2fd737d3SMatthias Ringwald // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
773*2fd737d3SMatthias Ringwald /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
774*2fd737d3SMatthias Ringwald return __builtin_arm_get_fpscr();
775*2fd737d3SMatthias Ringwald #else
776*2fd737d3SMatthias Ringwald uint32_t result;
777*2fd737d3SMatthias Ringwald
778*2fd737d3SMatthias Ringwald __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
779*2fd737d3SMatthias Ringwald return(result);
780*2fd737d3SMatthias Ringwald #endif
781*2fd737d3SMatthias Ringwald #else
782*2fd737d3SMatthias Ringwald return(0U);
783*2fd737d3SMatthias Ringwald #endif
784*2fd737d3SMatthias Ringwald }
785*2fd737d3SMatthias Ringwald
786*2fd737d3SMatthias Ringwald
787*2fd737d3SMatthias Ringwald /**
788*2fd737d3SMatthias Ringwald \brief Set FPSCR
789*2fd737d3SMatthias Ringwald \details Assigns the given value to the Floating Point Status/Control register.
790*2fd737d3SMatthias Ringwald \param [in] fpscr Floating Point Status/Control value to set
791*2fd737d3SMatthias Ringwald */
__set_FPSCR(uint32_t fpscr)792*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
793*2fd737d3SMatthias Ringwald {
794*2fd737d3SMatthias Ringwald #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
795*2fd737d3SMatthias Ringwald (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
796*2fd737d3SMatthias Ringwald #if __has_builtin(__builtin_arm_set_fpscr)
797*2fd737d3SMatthias Ringwald // Re-enable using built-in when GCC has been fixed
798*2fd737d3SMatthias Ringwald // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
799*2fd737d3SMatthias Ringwald /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
800*2fd737d3SMatthias Ringwald __builtin_arm_set_fpscr(fpscr);
801*2fd737d3SMatthias Ringwald #else
802*2fd737d3SMatthias Ringwald __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
803*2fd737d3SMatthias Ringwald #endif
804*2fd737d3SMatthias Ringwald #else
805*2fd737d3SMatthias Ringwald (void)fpscr;
806*2fd737d3SMatthias Ringwald #endif
807*2fd737d3SMatthias Ringwald }
808*2fd737d3SMatthias Ringwald
809*2fd737d3SMatthias Ringwald
810*2fd737d3SMatthias Ringwald /*@} end of CMSIS_Core_RegAccFunctions */
811*2fd737d3SMatthias Ringwald
812*2fd737d3SMatthias Ringwald
813*2fd737d3SMatthias Ringwald /* ########################## Core Instruction Access ######################### */
814*2fd737d3SMatthias Ringwald /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
815*2fd737d3SMatthias Ringwald Access to dedicated instructions
816*2fd737d3SMatthias Ringwald @{
817*2fd737d3SMatthias Ringwald */
818*2fd737d3SMatthias Ringwald
819*2fd737d3SMatthias Ringwald /* Define macros for porting to both thumb1 and thumb2.
820*2fd737d3SMatthias Ringwald * For thumb1, use low register (r0-r7), specified by constraint "l"
821*2fd737d3SMatthias Ringwald * Otherwise, use general registers, specified by constraint "r" */
822*2fd737d3SMatthias Ringwald #if defined (__thumb__) && !defined (__thumb2__)
823*2fd737d3SMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
824*2fd737d3SMatthias Ringwald #define __CMSIS_GCC_RW_REG(r) "+l" (r)
825*2fd737d3SMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "l" (r)
826*2fd737d3SMatthias Ringwald #else
827*2fd737d3SMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
828*2fd737d3SMatthias Ringwald #define __CMSIS_GCC_RW_REG(r) "+r" (r)
829*2fd737d3SMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "r" (r)
830*2fd737d3SMatthias Ringwald #endif
831*2fd737d3SMatthias Ringwald
832*2fd737d3SMatthias Ringwald /**
833*2fd737d3SMatthias Ringwald \brief No Operation
834*2fd737d3SMatthias Ringwald \details No Operation does nothing. This instruction can be used for code alignment purposes.
835*2fd737d3SMatthias Ringwald */
836*2fd737d3SMatthias Ringwald #define __NOP() __ASM volatile ("nop")
837*2fd737d3SMatthias Ringwald
838*2fd737d3SMatthias Ringwald /**
839*2fd737d3SMatthias Ringwald \brief Wait For Interrupt
840*2fd737d3SMatthias Ringwald \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
841*2fd737d3SMatthias Ringwald */
842*2fd737d3SMatthias Ringwald #define __WFI() __ASM volatile ("wfi")
843*2fd737d3SMatthias Ringwald
844*2fd737d3SMatthias Ringwald
845*2fd737d3SMatthias Ringwald /**
846*2fd737d3SMatthias Ringwald \brief Wait For Event
847*2fd737d3SMatthias Ringwald \details Wait For Event is a hint instruction that permits the processor to enter
848*2fd737d3SMatthias Ringwald a low-power state until one of a number of events occurs.
849*2fd737d3SMatthias Ringwald */
850*2fd737d3SMatthias Ringwald #define __WFE() __ASM volatile ("wfe")
851*2fd737d3SMatthias Ringwald
852*2fd737d3SMatthias Ringwald
853*2fd737d3SMatthias Ringwald /**
854*2fd737d3SMatthias Ringwald \brief Send Event
855*2fd737d3SMatthias Ringwald \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
856*2fd737d3SMatthias Ringwald */
857*2fd737d3SMatthias Ringwald #define __SEV() __ASM volatile ("sev")
858*2fd737d3SMatthias Ringwald
859*2fd737d3SMatthias Ringwald
860*2fd737d3SMatthias Ringwald /**
861*2fd737d3SMatthias Ringwald \brief Instruction Synchronization Barrier
862*2fd737d3SMatthias Ringwald \details Instruction Synchronization Barrier flushes the pipeline in the processor,
863*2fd737d3SMatthias Ringwald so that all instructions following the ISB are fetched from cache or memory,
864*2fd737d3SMatthias Ringwald after the instruction has been completed.
865*2fd737d3SMatthias Ringwald */
__ISB(void)866*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __ISB(void)
867*2fd737d3SMatthias Ringwald {
868*2fd737d3SMatthias Ringwald __ASM volatile ("isb 0xF":::"memory");
869*2fd737d3SMatthias Ringwald }
870*2fd737d3SMatthias Ringwald
871*2fd737d3SMatthias Ringwald
872*2fd737d3SMatthias Ringwald /**
873*2fd737d3SMatthias Ringwald \brief Data Synchronization Barrier
874*2fd737d3SMatthias Ringwald \details Acts as a special kind of Data Memory Barrier.
875*2fd737d3SMatthias Ringwald It completes when all explicit memory accesses before this instruction complete.
876*2fd737d3SMatthias Ringwald */
__DSB(void)877*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __DSB(void)
878*2fd737d3SMatthias Ringwald {
879*2fd737d3SMatthias Ringwald __ASM volatile ("dsb 0xF":::"memory");
880*2fd737d3SMatthias Ringwald }
881*2fd737d3SMatthias Ringwald
882*2fd737d3SMatthias Ringwald
883*2fd737d3SMatthias Ringwald /**
884*2fd737d3SMatthias Ringwald \brief Data Memory Barrier
885*2fd737d3SMatthias Ringwald \details Ensures the apparent order of the explicit memory operations before
886*2fd737d3SMatthias Ringwald and after the instruction, without ensuring their completion.
887*2fd737d3SMatthias Ringwald */
__DMB(void)888*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __DMB(void)
889*2fd737d3SMatthias Ringwald {
890*2fd737d3SMatthias Ringwald __ASM volatile ("dmb 0xF":::"memory");
891*2fd737d3SMatthias Ringwald }
892*2fd737d3SMatthias Ringwald
893*2fd737d3SMatthias Ringwald
894*2fd737d3SMatthias Ringwald /**
895*2fd737d3SMatthias Ringwald \brief Reverse byte order (32 bit)
896*2fd737d3SMatthias Ringwald \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
897*2fd737d3SMatthias Ringwald \param [in] value Value to reverse
898*2fd737d3SMatthias Ringwald \return Reversed value
899*2fd737d3SMatthias Ringwald */
__REV(uint32_t value)900*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
901*2fd737d3SMatthias Ringwald {
902*2fd737d3SMatthias Ringwald #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
903*2fd737d3SMatthias Ringwald return __builtin_bswap32(value);
904*2fd737d3SMatthias Ringwald #else
905*2fd737d3SMatthias Ringwald uint32_t result;
906*2fd737d3SMatthias Ringwald
907*2fd737d3SMatthias Ringwald __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
908*2fd737d3SMatthias Ringwald return result;
909*2fd737d3SMatthias Ringwald #endif
910*2fd737d3SMatthias Ringwald }
911*2fd737d3SMatthias Ringwald
912*2fd737d3SMatthias Ringwald
913*2fd737d3SMatthias Ringwald /**
914*2fd737d3SMatthias Ringwald \brief Reverse byte order (16 bit)
915*2fd737d3SMatthias Ringwald \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
916*2fd737d3SMatthias Ringwald \param [in] value Value to reverse
917*2fd737d3SMatthias Ringwald \return Reversed value
918*2fd737d3SMatthias Ringwald */
__REV16(uint32_t value)919*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
920*2fd737d3SMatthias Ringwald {
921*2fd737d3SMatthias Ringwald uint32_t result;
922*2fd737d3SMatthias Ringwald
923*2fd737d3SMatthias Ringwald __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
924*2fd737d3SMatthias Ringwald return result;
925*2fd737d3SMatthias Ringwald }
926*2fd737d3SMatthias Ringwald
927*2fd737d3SMatthias Ringwald
928*2fd737d3SMatthias Ringwald /**
929*2fd737d3SMatthias Ringwald \brief Reverse byte order (16 bit)
930*2fd737d3SMatthias Ringwald \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
931*2fd737d3SMatthias Ringwald \param [in] value Value to reverse
932*2fd737d3SMatthias Ringwald \return Reversed value
933*2fd737d3SMatthias Ringwald */
__REVSH(int16_t value)934*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
935*2fd737d3SMatthias Ringwald {
936*2fd737d3SMatthias Ringwald #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
937*2fd737d3SMatthias Ringwald return (int16_t)__builtin_bswap16(value);
938*2fd737d3SMatthias Ringwald #else
939*2fd737d3SMatthias Ringwald int16_t result;
940*2fd737d3SMatthias Ringwald
941*2fd737d3SMatthias Ringwald __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
942*2fd737d3SMatthias Ringwald return result;
943*2fd737d3SMatthias Ringwald #endif
944*2fd737d3SMatthias Ringwald }
945*2fd737d3SMatthias Ringwald
946*2fd737d3SMatthias Ringwald
947*2fd737d3SMatthias Ringwald /**
948*2fd737d3SMatthias Ringwald \brief Rotate Right in unsigned value (32 bit)
949*2fd737d3SMatthias Ringwald \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
950*2fd737d3SMatthias Ringwald \param [in] op1 Value to rotate
951*2fd737d3SMatthias Ringwald \param [in] op2 Number of Bits to rotate
952*2fd737d3SMatthias Ringwald \return Rotated value
953*2fd737d3SMatthias Ringwald */
__ROR(uint32_t op1,uint32_t op2)954*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
955*2fd737d3SMatthias Ringwald {
956*2fd737d3SMatthias Ringwald op2 %= 32U;
957*2fd737d3SMatthias Ringwald if (op2 == 0U)
958*2fd737d3SMatthias Ringwald {
959*2fd737d3SMatthias Ringwald return op1;
960*2fd737d3SMatthias Ringwald }
961*2fd737d3SMatthias Ringwald return (op1 >> op2) | (op1 << (32U - op2));
962*2fd737d3SMatthias Ringwald }
963*2fd737d3SMatthias Ringwald
964*2fd737d3SMatthias Ringwald
965*2fd737d3SMatthias Ringwald /**
966*2fd737d3SMatthias Ringwald \brief Breakpoint
967*2fd737d3SMatthias Ringwald \details Causes the processor to enter Debug state.
968*2fd737d3SMatthias Ringwald Debug tools can use this to investigate system state when the instruction at a particular address is reached.
969*2fd737d3SMatthias Ringwald \param [in] value is ignored by the processor.
970*2fd737d3SMatthias Ringwald If required, a debugger can use it to store additional information about the breakpoint.
971*2fd737d3SMatthias Ringwald */
972*2fd737d3SMatthias Ringwald #define __BKPT(value) __ASM volatile ("bkpt "#value)
973*2fd737d3SMatthias Ringwald
974*2fd737d3SMatthias Ringwald
975*2fd737d3SMatthias Ringwald /**
976*2fd737d3SMatthias Ringwald \brief Reverse bit order of value
977*2fd737d3SMatthias Ringwald \details Reverses the bit order of the given value.
978*2fd737d3SMatthias Ringwald \param [in] value Value to reverse
979*2fd737d3SMatthias Ringwald \return Reversed value
980*2fd737d3SMatthias Ringwald */
__RBIT(uint32_t value)981*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
982*2fd737d3SMatthias Ringwald {
983*2fd737d3SMatthias Ringwald uint32_t result;
984*2fd737d3SMatthias Ringwald
985*2fd737d3SMatthias Ringwald #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
986*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
987*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
988*2fd737d3SMatthias Ringwald __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
989*2fd737d3SMatthias Ringwald #else
990*2fd737d3SMatthias Ringwald uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
991*2fd737d3SMatthias Ringwald
992*2fd737d3SMatthias Ringwald result = value; /* r will be reversed bits of v; first get LSB of v */
993*2fd737d3SMatthias Ringwald for (value >>= 1U; value != 0U; value >>= 1U)
994*2fd737d3SMatthias Ringwald {
995*2fd737d3SMatthias Ringwald result <<= 1U;
996*2fd737d3SMatthias Ringwald result |= value & 1U;
997*2fd737d3SMatthias Ringwald s--;
998*2fd737d3SMatthias Ringwald }
999*2fd737d3SMatthias Ringwald result <<= s; /* shift when v's highest bits are zero */
1000*2fd737d3SMatthias Ringwald #endif
1001*2fd737d3SMatthias Ringwald return result;
1002*2fd737d3SMatthias Ringwald }
1003*2fd737d3SMatthias Ringwald
1004*2fd737d3SMatthias Ringwald
1005*2fd737d3SMatthias Ringwald /**
1006*2fd737d3SMatthias Ringwald \brief Count leading zeros
1007*2fd737d3SMatthias Ringwald \details Counts the number of leading zeros of a data value.
1008*2fd737d3SMatthias Ringwald \param [in] value Value to count the leading zeros
1009*2fd737d3SMatthias Ringwald \return number of leading zeros in value
1010*2fd737d3SMatthias Ringwald */
1011*2fd737d3SMatthias Ringwald #define __CLZ (uint8_t)__builtin_clz
1012*2fd737d3SMatthias Ringwald
1013*2fd737d3SMatthias Ringwald
1014*2fd737d3SMatthias Ringwald #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1015*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1016*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1017*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1018*2fd737d3SMatthias Ringwald /**
1019*2fd737d3SMatthias Ringwald \brief LDR Exclusive (8 bit)
1020*2fd737d3SMatthias Ringwald \details Executes a exclusive LDR instruction for 8 bit value.
1021*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to data
1022*2fd737d3SMatthias Ringwald \return value of type uint8_t at (*ptr)
1023*2fd737d3SMatthias Ringwald */
__LDREXB(volatile uint8_t * addr)1024*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
1025*2fd737d3SMatthias Ringwald {
1026*2fd737d3SMatthias Ringwald uint32_t result;
1027*2fd737d3SMatthias Ringwald
1028*2fd737d3SMatthias Ringwald #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1029*2fd737d3SMatthias Ringwald __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
1030*2fd737d3SMatthias Ringwald #else
1031*2fd737d3SMatthias Ringwald /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1032*2fd737d3SMatthias Ringwald accepted by assembler. So has to use following less efficient pattern.
1033*2fd737d3SMatthias Ringwald */
1034*2fd737d3SMatthias Ringwald __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1035*2fd737d3SMatthias Ringwald #endif
1036*2fd737d3SMatthias Ringwald return ((uint8_t) result); /* Add explicit type cast here */
1037*2fd737d3SMatthias Ringwald }
1038*2fd737d3SMatthias Ringwald
1039*2fd737d3SMatthias Ringwald
1040*2fd737d3SMatthias Ringwald /**
1041*2fd737d3SMatthias Ringwald \brief LDR Exclusive (16 bit)
1042*2fd737d3SMatthias Ringwald \details Executes a exclusive LDR instruction for 16 bit values.
1043*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to data
1044*2fd737d3SMatthias Ringwald \return value of type uint16_t at (*ptr)
1045*2fd737d3SMatthias Ringwald */
__LDREXH(volatile uint16_t * addr)1046*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
1047*2fd737d3SMatthias Ringwald {
1048*2fd737d3SMatthias Ringwald uint32_t result;
1049*2fd737d3SMatthias Ringwald
1050*2fd737d3SMatthias Ringwald #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1051*2fd737d3SMatthias Ringwald __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
1052*2fd737d3SMatthias Ringwald #else
1053*2fd737d3SMatthias Ringwald /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1054*2fd737d3SMatthias Ringwald accepted by assembler. So has to use following less efficient pattern.
1055*2fd737d3SMatthias Ringwald */
1056*2fd737d3SMatthias Ringwald __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1057*2fd737d3SMatthias Ringwald #endif
1058*2fd737d3SMatthias Ringwald return ((uint16_t) result); /* Add explicit type cast here */
1059*2fd737d3SMatthias Ringwald }
1060*2fd737d3SMatthias Ringwald
1061*2fd737d3SMatthias Ringwald
1062*2fd737d3SMatthias Ringwald /**
1063*2fd737d3SMatthias Ringwald \brief LDR Exclusive (32 bit)
1064*2fd737d3SMatthias Ringwald \details Executes a exclusive LDR instruction for 32 bit values.
1065*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to data
1066*2fd737d3SMatthias Ringwald \return value of type uint32_t at (*ptr)
1067*2fd737d3SMatthias Ringwald */
__LDREXW(volatile uint32_t * addr)1068*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
1069*2fd737d3SMatthias Ringwald {
1070*2fd737d3SMatthias Ringwald uint32_t result;
1071*2fd737d3SMatthias Ringwald
1072*2fd737d3SMatthias Ringwald __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
1073*2fd737d3SMatthias Ringwald return(result);
1074*2fd737d3SMatthias Ringwald }
1075*2fd737d3SMatthias Ringwald
1076*2fd737d3SMatthias Ringwald
1077*2fd737d3SMatthias Ringwald /**
1078*2fd737d3SMatthias Ringwald \brief STR Exclusive (8 bit)
1079*2fd737d3SMatthias Ringwald \details Executes a exclusive STR instruction for 8 bit values.
1080*2fd737d3SMatthias Ringwald \param [in] value Value to store
1081*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to location
1082*2fd737d3SMatthias Ringwald \return 0 Function succeeded
1083*2fd737d3SMatthias Ringwald \return 1 Function failed
1084*2fd737d3SMatthias Ringwald */
__STREXB(uint8_t value,volatile uint8_t * addr)1085*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
1086*2fd737d3SMatthias Ringwald {
1087*2fd737d3SMatthias Ringwald uint32_t result;
1088*2fd737d3SMatthias Ringwald
1089*2fd737d3SMatthias Ringwald __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1090*2fd737d3SMatthias Ringwald return(result);
1091*2fd737d3SMatthias Ringwald }
1092*2fd737d3SMatthias Ringwald
1093*2fd737d3SMatthias Ringwald
1094*2fd737d3SMatthias Ringwald /**
1095*2fd737d3SMatthias Ringwald \brief STR Exclusive (16 bit)
1096*2fd737d3SMatthias Ringwald \details Executes a exclusive STR instruction for 16 bit values.
1097*2fd737d3SMatthias Ringwald \param [in] value Value to store
1098*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to location
1099*2fd737d3SMatthias Ringwald \return 0 Function succeeded
1100*2fd737d3SMatthias Ringwald \return 1 Function failed
1101*2fd737d3SMatthias Ringwald */
__STREXH(uint16_t value,volatile uint16_t * addr)1102*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
1103*2fd737d3SMatthias Ringwald {
1104*2fd737d3SMatthias Ringwald uint32_t result;
1105*2fd737d3SMatthias Ringwald
1106*2fd737d3SMatthias Ringwald __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1107*2fd737d3SMatthias Ringwald return(result);
1108*2fd737d3SMatthias Ringwald }
1109*2fd737d3SMatthias Ringwald
1110*2fd737d3SMatthias Ringwald
1111*2fd737d3SMatthias Ringwald /**
1112*2fd737d3SMatthias Ringwald \brief STR Exclusive (32 bit)
1113*2fd737d3SMatthias Ringwald \details Executes a exclusive STR instruction for 32 bit values.
1114*2fd737d3SMatthias Ringwald \param [in] value Value to store
1115*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to location
1116*2fd737d3SMatthias Ringwald \return 0 Function succeeded
1117*2fd737d3SMatthias Ringwald \return 1 Function failed
1118*2fd737d3SMatthias Ringwald */
__STREXW(uint32_t value,volatile uint32_t * addr)1119*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
1120*2fd737d3SMatthias Ringwald {
1121*2fd737d3SMatthias Ringwald uint32_t result;
1122*2fd737d3SMatthias Ringwald
1123*2fd737d3SMatthias Ringwald __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
1124*2fd737d3SMatthias Ringwald return(result);
1125*2fd737d3SMatthias Ringwald }
1126*2fd737d3SMatthias Ringwald
1127*2fd737d3SMatthias Ringwald
1128*2fd737d3SMatthias Ringwald /**
1129*2fd737d3SMatthias Ringwald \brief Remove the exclusive lock
1130*2fd737d3SMatthias Ringwald \details Removes the exclusive lock which is created by LDREX.
1131*2fd737d3SMatthias Ringwald */
__CLREX(void)1132*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __CLREX(void)
1133*2fd737d3SMatthias Ringwald {
1134*2fd737d3SMatthias Ringwald __ASM volatile ("clrex" ::: "memory");
1135*2fd737d3SMatthias Ringwald }
1136*2fd737d3SMatthias Ringwald
1137*2fd737d3SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1138*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1139*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1140*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1141*2fd737d3SMatthias Ringwald
1142*2fd737d3SMatthias Ringwald
1143*2fd737d3SMatthias Ringwald #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1144*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1145*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1146*2fd737d3SMatthias Ringwald /**
1147*2fd737d3SMatthias Ringwald \brief Signed Saturate
1148*2fd737d3SMatthias Ringwald \details Saturates a signed value.
1149*2fd737d3SMatthias Ringwald \param [in] ARG1 Value to be saturated
1150*2fd737d3SMatthias Ringwald \param [in] ARG2 Bit position to saturate to (1..32)
1151*2fd737d3SMatthias Ringwald \return Saturated value
1152*2fd737d3SMatthias Ringwald */
1153*2fd737d3SMatthias Ringwald #define __SSAT(ARG1,ARG2) \
1154*2fd737d3SMatthias Ringwald __extension__ \
1155*2fd737d3SMatthias Ringwald ({ \
1156*2fd737d3SMatthias Ringwald int32_t __RES, __ARG1 = (ARG1); \
1157*2fd737d3SMatthias Ringwald __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1158*2fd737d3SMatthias Ringwald __RES; \
1159*2fd737d3SMatthias Ringwald })
1160*2fd737d3SMatthias Ringwald
1161*2fd737d3SMatthias Ringwald
1162*2fd737d3SMatthias Ringwald /**
1163*2fd737d3SMatthias Ringwald \brief Unsigned Saturate
1164*2fd737d3SMatthias Ringwald \details Saturates an unsigned value.
1165*2fd737d3SMatthias Ringwald \param [in] ARG1 Value to be saturated
1166*2fd737d3SMatthias Ringwald \param [in] ARG2 Bit position to saturate to (0..31)
1167*2fd737d3SMatthias Ringwald \return Saturated value
1168*2fd737d3SMatthias Ringwald */
1169*2fd737d3SMatthias Ringwald #define __USAT(ARG1,ARG2) \
1170*2fd737d3SMatthias Ringwald __extension__ \
1171*2fd737d3SMatthias Ringwald ({ \
1172*2fd737d3SMatthias Ringwald uint32_t __RES, __ARG1 = (ARG1); \
1173*2fd737d3SMatthias Ringwald __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1174*2fd737d3SMatthias Ringwald __RES; \
1175*2fd737d3SMatthias Ringwald })
1176*2fd737d3SMatthias Ringwald
1177*2fd737d3SMatthias Ringwald
1178*2fd737d3SMatthias Ringwald /**
1179*2fd737d3SMatthias Ringwald \brief Rotate Right with Extend (32 bit)
1180*2fd737d3SMatthias Ringwald \details Moves each bit of a bitstring right by one bit.
1181*2fd737d3SMatthias Ringwald The carry input is shifted in at the left end of the bitstring.
1182*2fd737d3SMatthias Ringwald \param [in] value Value to rotate
1183*2fd737d3SMatthias Ringwald \return Rotated value
1184*2fd737d3SMatthias Ringwald */
__RRX(uint32_t value)1185*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1186*2fd737d3SMatthias Ringwald {
1187*2fd737d3SMatthias Ringwald uint32_t result;
1188*2fd737d3SMatthias Ringwald
1189*2fd737d3SMatthias Ringwald __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1190*2fd737d3SMatthias Ringwald return(result);
1191*2fd737d3SMatthias Ringwald }
1192*2fd737d3SMatthias Ringwald
1193*2fd737d3SMatthias Ringwald
1194*2fd737d3SMatthias Ringwald /**
1195*2fd737d3SMatthias Ringwald \brief LDRT Unprivileged (8 bit)
1196*2fd737d3SMatthias Ringwald \details Executes a Unprivileged LDRT instruction for 8 bit value.
1197*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to data
1198*2fd737d3SMatthias Ringwald \return value of type uint8_t at (*ptr)
1199*2fd737d3SMatthias Ringwald */
__LDRBT(volatile uint8_t * ptr)1200*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1201*2fd737d3SMatthias Ringwald {
1202*2fd737d3SMatthias Ringwald uint32_t result;
1203*2fd737d3SMatthias Ringwald
1204*2fd737d3SMatthias Ringwald #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1205*2fd737d3SMatthias Ringwald __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1206*2fd737d3SMatthias Ringwald #else
1207*2fd737d3SMatthias Ringwald /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1208*2fd737d3SMatthias Ringwald accepted by assembler. So has to use following less efficient pattern.
1209*2fd737d3SMatthias Ringwald */
1210*2fd737d3SMatthias Ringwald __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1211*2fd737d3SMatthias Ringwald #endif
1212*2fd737d3SMatthias Ringwald return ((uint8_t) result); /* Add explicit type cast here */
1213*2fd737d3SMatthias Ringwald }
1214*2fd737d3SMatthias Ringwald
1215*2fd737d3SMatthias Ringwald
1216*2fd737d3SMatthias Ringwald /**
1217*2fd737d3SMatthias Ringwald \brief LDRT Unprivileged (16 bit)
1218*2fd737d3SMatthias Ringwald \details Executes a Unprivileged LDRT instruction for 16 bit values.
1219*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to data
1220*2fd737d3SMatthias Ringwald \return value of type uint16_t at (*ptr)
1221*2fd737d3SMatthias Ringwald */
__LDRHT(volatile uint16_t * ptr)1222*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1223*2fd737d3SMatthias Ringwald {
1224*2fd737d3SMatthias Ringwald uint32_t result;
1225*2fd737d3SMatthias Ringwald
1226*2fd737d3SMatthias Ringwald #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1227*2fd737d3SMatthias Ringwald __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1228*2fd737d3SMatthias Ringwald #else
1229*2fd737d3SMatthias Ringwald /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1230*2fd737d3SMatthias Ringwald accepted by assembler. So has to use following less efficient pattern.
1231*2fd737d3SMatthias Ringwald */
1232*2fd737d3SMatthias Ringwald __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1233*2fd737d3SMatthias Ringwald #endif
1234*2fd737d3SMatthias Ringwald return ((uint16_t) result); /* Add explicit type cast here */
1235*2fd737d3SMatthias Ringwald }
1236*2fd737d3SMatthias Ringwald
1237*2fd737d3SMatthias Ringwald
1238*2fd737d3SMatthias Ringwald /**
1239*2fd737d3SMatthias Ringwald \brief LDRT Unprivileged (32 bit)
1240*2fd737d3SMatthias Ringwald \details Executes a Unprivileged LDRT instruction for 32 bit values.
1241*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to data
1242*2fd737d3SMatthias Ringwald \return value of type uint32_t at (*ptr)
1243*2fd737d3SMatthias Ringwald */
__LDRT(volatile uint32_t * ptr)1244*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1245*2fd737d3SMatthias Ringwald {
1246*2fd737d3SMatthias Ringwald uint32_t result;
1247*2fd737d3SMatthias Ringwald
1248*2fd737d3SMatthias Ringwald __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1249*2fd737d3SMatthias Ringwald return(result);
1250*2fd737d3SMatthias Ringwald }
1251*2fd737d3SMatthias Ringwald
1252*2fd737d3SMatthias Ringwald
1253*2fd737d3SMatthias Ringwald /**
1254*2fd737d3SMatthias Ringwald \brief STRT Unprivileged (8 bit)
1255*2fd737d3SMatthias Ringwald \details Executes a Unprivileged STRT instruction for 8 bit values.
1256*2fd737d3SMatthias Ringwald \param [in] value Value to store
1257*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to location
1258*2fd737d3SMatthias Ringwald */
__STRBT(uint8_t value,volatile uint8_t * ptr)1259*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1260*2fd737d3SMatthias Ringwald {
1261*2fd737d3SMatthias Ringwald __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1262*2fd737d3SMatthias Ringwald }
1263*2fd737d3SMatthias Ringwald
1264*2fd737d3SMatthias Ringwald
1265*2fd737d3SMatthias Ringwald /**
1266*2fd737d3SMatthias Ringwald \brief STRT Unprivileged (16 bit)
1267*2fd737d3SMatthias Ringwald \details Executes a Unprivileged STRT instruction for 16 bit values.
1268*2fd737d3SMatthias Ringwald \param [in] value Value to store
1269*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to location
1270*2fd737d3SMatthias Ringwald */
__STRHT(uint16_t value,volatile uint16_t * ptr)1271*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1272*2fd737d3SMatthias Ringwald {
1273*2fd737d3SMatthias Ringwald __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1274*2fd737d3SMatthias Ringwald }
1275*2fd737d3SMatthias Ringwald
1276*2fd737d3SMatthias Ringwald
1277*2fd737d3SMatthias Ringwald /**
1278*2fd737d3SMatthias Ringwald \brief STRT Unprivileged (32 bit)
1279*2fd737d3SMatthias Ringwald \details Executes a Unprivileged STRT instruction for 32 bit values.
1280*2fd737d3SMatthias Ringwald \param [in] value Value to store
1281*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to location
1282*2fd737d3SMatthias Ringwald */
__STRT(uint32_t value,volatile uint32_t * ptr)1283*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1284*2fd737d3SMatthias Ringwald {
1285*2fd737d3SMatthias Ringwald __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1286*2fd737d3SMatthias Ringwald }
1287*2fd737d3SMatthias Ringwald
1288*2fd737d3SMatthias Ringwald #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1289*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1290*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1291*2fd737d3SMatthias Ringwald
1292*2fd737d3SMatthias Ringwald /**
1293*2fd737d3SMatthias Ringwald \brief Signed Saturate
1294*2fd737d3SMatthias Ringwald \details Saturates a signed value.
1295*2fd737d3SMatthias Ringwald \param [in] value Value to be saturated
1296*2fd737d3SMatthias Ringwald \param [in] sat Bit position to saturate to (1..32)
1297*2fd737d3SMatthias Ringwald \return Saturated value
1298*2fd737d3SMatthias Ringwald */
__SSAT(int32_t val,uint32_t sat)1299*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1300*2fd737d3SMatthias Ringwald {
1301*2fd737d3SMatthias Ringwald if ((sat >= 1U) && (sat <= 32U))
1302*2fd737d3SMatthias Ringwald {
1303*2fd737d3SMatthias Ringwald const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1304*2fd737d3SMatthias Ringwald const int32_t min = -1 - max ;
1305*2fd737d3SMatthias Ringwald if (val > max)
1306*2fd737d3SMatthias Ringwald {
1307*2fd737d3SMatthias Ringwald return max;
1308*2fd737d3SMatthias Ringwald }
1309*2fd737d3SMatthias Ringwald else if (val < min)
1310*2fd737d3SMatthias Ringwald {
1311*2fd737d3SMatthias Ringwald return min;
1312*2fd737d3SMatthias Ringwald }
1313*2fd737d3SMatthias Ringwald }
1314*2fd737d3SMatthias Ringwald return val;
1315*2fd737d3SMatthias Ringwald }
1316*2fd737d3SMatthias Ringwald
1317*2fd737d3SMatthias Ringwald /**
1318*2fd737d3SMatthias Ringwald \brief Unsigned Saturate
1319*2fd737d3SMatthias Ringwald \details Saturates an unsigned value.
1320*2fd737d3SMatthias Ringwald \param [in] value Value to be saturated
1321*2fd737d3SMatthias Ringwald \param [in] sat Bit position to saturate to (0..31)
1322*2fd737d3SMatthias Ringwald \return Saturated value
1323*2fd737d3SMatthias Ringwald */
__USAT(int32_t val,uint32_t sat)1324*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1325*2fd737d3SMatthias Ringwald {
1326*2fd737d3SMatthias Ringwald if (sat <= 31U)
1327*2fd737d3SMatthias Ringwald {
1328*2fd737d3SMatthias Ringwald const uint32_t max = ((1U << sat) - 1U);
1329*2fd737d3SMatthias Ringwald if (val > (int32_t)max)
1330*2fd737d3SMatthias Ringwald {
1331*2fd737d3SMatthias Ringwald return max;
1332*2fd737d3SMatthias Ringwald }
1333*2fd737d3SMatthias Ringwald else if (val < 0)
1334*2fd737d3SMatthias Ringwald {
1335*2fd737d3SMatthias Ringwald return 0U;
1336*2fd737d3SMatthias Ringwald }
1337*2fd737d3SMatthias Ringwald }
1338*2fd737d3SMatthias Ringwald return (uint32_t)val;
1339*2fd737d3SMatthias Ringwald }
1340*2fd737d3SMatthias Ringwald
1341*2fd737d3SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1342*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1343*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1344*2fd737d3SMatthias Ringwald
1345*2fd737d3SMatthias Ringwald
1346*2fd737d3SMatthias Ringwald #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1347*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1348*2fd737d3SMatthias Ringwald /**
1349*2fd737d3SMatthias Ringwald \brief Load-Acquire (8 bit)
1350*2fd737d3SMatthias Ringwald \details Executes a LDAB instruction for 8 bit value.
1351*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to data
1352*2fd737d3SMatthias Ringwald \return value of type uint8_t at (*ptr)
1353*2fd737d3SMatthias Ringwald */
__LDAB(volatile uint8_t * ptr)1354*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1355*2fd737d3SMatthias Ringwald {
1356*2fd737d3SMatthias Ringwald uint32_t result;
1357*2fd737d3SMatthias Ringwald
1358*2fd737d3SMatthias Ringwald __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1359*2fd737d3SMatthias Ringwald return ((uint8_t) result);
1360*2fd737d3SMatthias Ringwald }
1361*2fd737d3SMatthias Ringwald
1362*2fd737d3SMatthias Ringwald
1363*2fd737d3SMatthias Ringwald /**
1364*2fd737d3SMatthias Ringwald \brief Load-Acquire (16 bit)
1365*2fd737d3SMatthias Ringwald \details Executes a LDAH instruction for 16 bit values.
1366*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to data
1367*2fd737d3SMatthias Ringwald \return value of type uint16_t at (*ptr)
1368*2fd737d3SMatthias Ringwald */
__LDAH(volatile uint16_t * ptr)1369*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1370*2fd737d3SMatthias Ringwald {
1371*2fd737d3SMatthias Ringwald uint32_t result;
1372*2fd737d3SMatthias Ringwald
1373*2fd737d3SMatthias Ringwald __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1374*2fd737d3SMatthias Ringwald return ((uint16_t) result);
1375*2fd737d3SMatthias Ringwald }
1376*2fd737d3SMatthias Ringwald
1377*2fd737d3SMatthias Ringwald
1378*2fd737d3SMatthias Ringwald /**
1379*2fd737d3SMatthias Ringwald \brief Load-Acquire (32 bit)
1380*2fd737d3SMatthias Ringwald \details Executes a LDA instruction for 32 bit values.
1381*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to data
1382*2fd737d3SMatthias Ringwald \return value of type uint32_t at (*ptr)
1383*2fd737d3SMatthias Ringwald */
__LDA(volatile uint32_t * ptr)1384*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1385*2fd737d3SMatthias Ringwald {
1386*2fd737d3SMatthias Ringwald uint32_t result;
1387*2fd737d3SMatthias Ringwald
1388*2fd737d3SMatthias Ringwald __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1389*2fd737d3SMatthias Ringwald return(result);
1390*2fd737d3SMatthias Ringwald }
1391*2fd737d3SMatthias Ringwald
1392*2fd737d3SMatthias Ringwald
1393*2fd737d3SMatthias Ringwald /**
1394*2fd737d3SMatthias Ringwald \brief Store-Release (8 bit)
1395*2fd737d3SMatthias Ringwald \details Executes a STLB instruction for 8 bit values.
1396*2fd737d3SMatthias Ringwald \param [in] value Value to store
1397*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to location
1398*2fd737d3SMatthias Ringwald */
__STLB(uint8_t value,volatile uint8_t * ptr)1399*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1400*2fd737d3SMatthias Ringwald {
1401*2fd737d3SMatthias Ringwald __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1402*2fd737d3SMatthias Ringwald }
1403*2fd737d3SMatthias Ringwald
1404*2fd737d3SMatthias Ringwald
1405*2fd737d3SMatthias Ringwald /**
1406*2fd737d3SMatthias Ringwald \brief Store-Release (16 bit)
1407*2fd737d3SMatthias Ringwald \details Executes a STLH instruction for 16 bit values.
1408*2fd737d3SMatthias Ringwald \param [in] value Value to store
1409*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to location
1410*2fd737d3SMatthias Ringwald */
__STLH(uint16_t value,volatile uint16_t * ptr)1411*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1412*2fd737d3SMatthias Ringwald {
1413*2fd737d3SMatthias Ringwald __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1414*2fd737d3SMatthias Ringwald }
1415*2fd737d3SMatthias Ringwald
1416*2fd737d3SMatthias Ringwald
1417*2fd737d3SMatthias Ringwald /**
1418*2fd737d3SMatthias Ringwald \brief Store-Release (32 bit)
1419*2fd737d3SMatthias Ringwald \details Executes a STL instruction for 32 bit values.
1420*2fd737d3SMatthias Ringwald \param [in] value Value to store
1421*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to location
1422*2fd737d3SMatthias Ringwald */
__STL(uint32_t value,volatile uint32_t * ptr)1423*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1424*2fd737d3SMatthias Ringwald {
1425*2fd737d3SMatthias Ringwald __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1426*2fd737d3SMatthias Ringwald }
1427*2fd737d3SMatthias Ringwald
1428*2fd737d3SMatthias Ringwald
1429*2fd737d3SMatthias Ringwald /**
1430*2fd737d3SMatthias Ringwald \brief Load-Acquire Exclusive (8 bit)
1431*2fd737d3SMatthias Ringwald \details Executes a LDAB exclusive instruction for 8 bit value.
1432*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to data
1433*2fd737d3SMatthias Ringwald \return value of type uint8_t at (*ptr)
1434*2fd737d3SMatthias Ringwald */
__LDAEXB(volatile uint8_t * ptr)1435*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
1436*2fd737d3SMatthias Ringwald {
1437*2fd737d3SMatthias Ringwald uint32_t result;
1438*2fd737d3SMatthias Ringwald
1439*2fd737d3SMatthias Ringwald __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) );
1440*2fd737d3SMatthias Ringwald return ((uint8_t) result);
1441*2fd737d3SMatthias Ringwald }
1442*2fd737d3SMatthias Ringwald
1443*2fd737d3SMatthias Ringwald
1444*2fd737d3SMatthias Ringwald /**
1445*2fd737d3SMatthias Ringwald \brief Load-Acquire Exclusive (16 bit)
1446*2fd737d3SMatthias Ringwald \details Executes a LDAH exclusive instruction for 16 bit values.
1447*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to data
1448*2fd737d3SMatthias Ringwald \return value of type uint16_t at (*ptr)
1449*2fd737d3SMatthias Ringwald */
__LDAEXH(volatile uint16_t * ptr)1450*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
1451*2fd737d3SMatthias Ringwald {
1452*2fd737d3SMatthias Ringwald uint32_t result;
1453*2fd737d3SMatthias Ringwald
1454*2fd737d3SMatthias Ringwald __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) );
1455*2fd737d3SMatthias Ringwald return ((uint16_t) result);
1456*2fd737d3SMatthias Ringwald }
1457*2fd737d3SMatthias Ringwald
1458*2fd737d3SMatthias Ringwald
1459*2fd737d3SMatthias Ringwald /**
1460*2fd737d3SMatthias Ringwald \brief Load-Acquire Exclusive (32 bit)
1461*2fd737d3SMatthias Ringwald \details Executes a LDA exclusive instruction for 32 bit values.
1462*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to data
1463*2fd737d3SMatthias Ringwald \return value of type uint32_t at (*ptr)
1464*2fd737d3SMatthias Ringwald */
__LDAEX(volatile uint32_t * ptr)1465*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
1466*2fd737d3SMatthias Ringwald {
1467*2fd737d3SMatthias Ringwald uint32_t result;
1468*2fd737d3SMatthias Ringwald
1469*2fd737d3SMatthias Ringwald __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) );
1470*2fd737d3SMatthias Ringwald return(result);
1471*2fd737d3SMatthias Ringwald }
1472*2fd737d3SMatthias Ringwald
1473*2fd737d3SMatthias Ringwald
1474*2fd737d3SMatthias Ringwald /**
1475*2fd737d3SMatthias Ringwald \brief Store-Release Exclusive (8 bit)
1476*2fd737d3SMatthias Ringwald \details Executes a STLB exclusive instruction for 8 bit values.
1477*2fd737d3SMatthias Ringwald \param [in] value Value to store
1478*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to location
1479*2fd737d3SMatthias Ringwald \return 0 Function succeeded
1480*2fd737d3SMatthias Ringwald \return 1 Function failed
1481*2fd737d3SMatthias Ringwald */
__STLEXB(uint8_t value,volatile uint8_t * ptr)1482*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
1483*2fd737d3SMatthias Ringwald {
1484*2fd737d3SMatthias Ringwald uint32_t result;
1485*2fd737d3SMatthias Ringwald
1486*2fd737d3SMatthias Ringwald __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1487*2fd737d3SMatthias Ringwald return(result);
1488*2fd737d3SMatthias Ringwald }
1489*2fd737d3SMatthias Ringwald
1490*2fd737d3SMatthias Ringwald
1491*2fd737d3SMatthias Ringwald /**
1492*2fd737d3SMatthias Ringwald \brief Store-Release Exclusive (16 bit)
1493*2fd737d3SMatthias Ringwald \details Executes a STLH exclusive instruction for 16 bit values.
1494*2fd737d3SMatthias Ringwald \param [in] value Value to store
1495*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to location
1496*2fd737d3SMatthias Ringwald \return 0 Function succeeded
1497*2fd737d3SMatthias Ringwald \return 1 Function failed
1498*2fd737d3SMatthias Ringwald */
__STLEXH(uint16_t value,volatile uint16_t * ptr)1499*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
1500*2fd737d3SMatthias Ringwald {
1501*2fd737d3SMatthias Ringwald uint32_t result;
1502*2fd737d3SMatthias Ringwald
1503*2fd737d3SMatthias Ringwald __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1504*2fd737d3SMatthias Ringwald return(result);
1505*2fd737d3SMatthias Ringwald }
1506*2fd737d3SMatthias Ringwald
1507*2fd737d3SMatthias Ringwald
1508*2fd737d3SMatthias Ringwald /**
1509*2fd737d3SMatthias Ringwald \brief Store-Release Exclusive (32 bit)
1510*2fd737d3SMatthias Ringwald \details Executes a STL exclusive instruction for 32 bit values.
1511*2fd737d3SMatthias Ringwald \param [in] value Value to store
1512*2fd737d3SMatthias Ringwald \param [in] ptr Pointer to location
1513*2fd737d3SMatthias Ringwald \return 0 Function succeeded
1514*2fd737d3SMatthias Ringwald \return 1 Function failed
1515*2fd737d3SMatthias Ringwald */
__STLEX(uint32_t value,volatile uint32_t * ptr)1516*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
1517*2fd737d3SMatthias Ringwald {
1518*2fd737d3SMatthias Ringwald uint32_t result;
1519*2fd737d3SMatthias Ringwald
1520*2fd737d3SMatthias Ringwald __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1521*2fd737d3SMatthias Ringwald return(result);
1522*2fd737d3SMatthias Ringwald }
1523*2fd737d3SMatthias Ringwald
1524*2fd737d3SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1525*2fd737d3SMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1526*2fd737d3SMatthias Ringwald
1527*2fd737d3SMatthias Ringwald /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
1528*2fd737d3SMatthias Ringwald
1529*2fd737d3SMatthias Ringwald
1530*2fd737d3SMatthias Ringwald /* ################### Compiler specific Intrinsics ########################### */
1531*2fd737d3SMatthias Ringwald /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1532*2fd737d3SMatthias Ringwald Access to dedicated SIMD instructions
1533*2fd737d3SMatthias Ringwald @{
1534*2fd737d3SMatthias Ringwald */
1535*2fd737d3SMatthias Ringwald
1536*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1537*2fd737d3SMatthias Ringwald
__SADD8(uint32_t op1,uint32_t op2)1538*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1539*2fd737d3SMatthias Ringwald {
1540*2fd737d3SMatthias Ringwald uint32_t result;
1541*2fd737d3SMatthias Ringwald
1542*2fd737d3SMatthias Ringwald __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1543*2fd737d3SMatthias Ringwald return(result);
1544*2fd737d3SMatthias Ringwald }
1545*2fd737d3SMatthias Ringwald
__QADD8(uint32_t op1,uint32_t op2)1546*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1547*2fd737d3SMatthias Ringwald {
1548*2fd737d3SMatthias Ringwald uint32_t result;
1549*2fd737d3SMatthias Ringwald
1550*2fd737d3SMatthias Ringwald __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1551*2fd737d3SMatthias Ringwald return(result);
1552*2fd737d3SMatthias Ringwald }
1553*2fd737d3SMatthias Ringwald
__SHADD8(uint32_t op1,uint32_t op2)1554*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1555*2fd737d3SMatthias Ringwald {
1556*2fd737d3SMatthias Ringwald uint32_t result;
1557*2fd737d3SMatthias Ringwald
1558*2fd737d3SMatthias Ringwald __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1559*2fd737d3SMatthias Ringwald return(result);
1560*2fd737d3SMatthias Ringwald }
1561*2fd737d3SMatthias Ringwald
__UADD8(uint32_t op1,uint32_t op2)1562*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1563*2fd737d3SMatthias Ringwald {
1564*2fd737d3SMatthias Ringwald uint32_t result;
1565*2fd737d3SMatthias Ringwald
1566*2fd737d3SMatthias Ringwald __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1567*2fd737d3SMatthias Ringwald return(result);
1568*2fd737d3SMatthias Ringwald }
1569*2fd737d3SMatthias Ringwald
__UQADD8(uint32_t op1,uint32_t op2)1570*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1571*2fd737d3SMatthias Ringwald {
1572*2fd737d3SMatthias Ringwald uint32_t result;
1573*2fd737d3SMatthias Ringwald
1574*2fd737d3SMatthias Ringwald __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1575*2fd737d3SMatthias Ringwald return(result);
1576*2fd737d3SMatthias Ringwald }
1577*2fd737d3SMatthias Ringwald
__UHADD8(uint32_t op1,uint32_t op2)1578*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1579*2fd737d3SMatthias Ringwald {
1580*2fd737d3SMatthias Ringwald uint32_t result;
1581*2fd737d3SMatthias Ringwald
1582*2fd737d3SMatthias Ringwald __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1583*2fd737d3SMatthias Ringwald return(result);
1584*2fd737d3SMatthias Ringwald }
1585*2fd737d3SMatthias Ringwald
1586*2fd737d3SMatthias Ringwald
__SSUB8(uint32_t op1,uint32_t op2)1587*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1588*2fd737d3SMatthias Ringwald {
1589*2fd737d3SMatthias Ringwald uint32_t result;
1590*2fd737d3SMatthias Ringwald
1591*2fd737d3SMatthias Ringwald __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1592*2fd737d3SMatthias Ringwald return(result);
1593*2fd737d3SMatthias Ringwald }
1594*2fd737d3SMatthias Ringwald
__QSUB8(uint32_t op1,uint32_t op2)1595*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1596*2fd737d3SMatthias Ringwald {
1597*2fd737d3SMatthias Ringwald uint32_t result;
1598*2fd737d3SMatthias Ringwald
1599*2fd737d3SMatthias Ringwald __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1600*2fd737d3SMatthias Ringwald return(result);
1601*2fd737d3SMatthias Ringwald }
1602*2fd737d3SMatthias Ringwald
__SHSUB8(uint32_t op1,uint32_t op2)1603*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1604*2fd737d3SMatthias Ringwald {
1605*2fd737d3SMatthias Ringwald uint32_t result;
1606*2fd737d3SMatthias Ringwald
1607*2fd737d3SMatthias Ringwald __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1608*2fd737d3SMatthias Ringwald return(result);
1609*2fd737d3SMatthias Ringwald }
1610*2fd737d3SMatthias Ringwald
__USUB8(uint32_t op1,uint32_t op2)1611*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1612*2fd737d3SMatthias Ringwald {
1613*2fd737d3SMatthias Ringwald uint32_t result;
1614*2fd737d3SMatthias Ringwald
1615*2fd737d3SMatthias Ringwald __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1616*2fd737d3SMatthias Ringwald return(result);
1617*2fd737d3SMatthias Ringwald }
1618*2fd737d3SMatthias Ringwald
__UQSUB8(uint32_t op1,uint32_t op2)1619*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1620*2fd737d3SMatthias Ringwald {
1621*2fd737d3SMatthias Ringwald uint32_t result;
1622*2fd737d3SMatthias Ringwald
1623*2fd737d3SMatthias Ringwald __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1624*2fd737d3SMatthias Ringwald return(result);
1625*2fd737d3SMatthias Ringwald }
1626*2fd737d3SMatthias Ringwald
__UHSUB8(uint32_t op1,uint32_t op2)1627*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1628*2fd737d3SMatthias Ringwald {
1629*2fd737d3SMatthias Ringwald uint32_t result;
1630*2fd737d3SMatthias Ringwald
1631*2fd737d3SMatthias Ringwald __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1632*2fd737d3SMatthias Ringwald return(result);
1633*2fd737d3SMatthias Ringwald }
1634*2fd737d3SMatthias Ringwald
1635*2fd737d3SMatthias Ringwald
__SADD16(uint32_t op1,uint32_t op2)1636*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1637*2fd737d3SMatthias Ringwald {
1638*2fd737d3SMatthias Ringwald uint32_t result;
1639*2fd737d3SMatthias Ringwald
1640*2fd737d3SMatthias Ringwald __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1641*2fd737d3SMatthias Ringwald return(result);
1642*2fd737d3SMatthias Ringwald }
1643*2fd737d3SMatthias Ringwald
__QADD16(uint32_t op1,uint32_t op2)1644*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1645*2fd737d3SMatthias Ringwald {
1646*2fd737d3SMatthias Ringwald uint32_t result;
1647*2fd737d3SMatthias Ringwald
1648*2fd737d3SMatthias Ringwald __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1649*2fd737d3SMatthias Ringwald return(result);
1650*2fd737d3SMatthias Ringwald }
1651*2fd737d3SMatthias Ringwald
__SHADD16(uint32_t op1,uint32_t op2)1652*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1653*2fd737d3SMatthias Ringwald {
1654*2fd737d3SMatthias Ringwald uint32_t result;
1655*2fd737d3SMatthias Ringwald
1656*2fd737d3SMatthias Ringwald __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1657*2fd737d3SMatthias Ringwald return(result);
1658*2fd737d3SMatthias Ringwald }
1659*2fd737d3SMatthias Ringwald
__UADD16(uint32_t op1,uint32_t op2)1660*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1661*2fd737d3SMatthias Ringwald {
1662*2fd737d3SMatthias Ringwald uint32_t result;
1663*2fd737d3SMatthias Ringwald
1664*2fd737d3SMatthias Ringwald __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1665*2fd737d3SMatthias Ringwald return(result);
1666*2fd737d3SMatthias Ringwald }
1667*2fd737d3SMatthias Ringwald
__UQADD16(uint32_t op1,uint32_t op2)1668*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1669*2fd737d3SMatthias Ringwald {
1670*2fd737d3SMatthias Ringwald uint32_t result;
1671*2fd737d3SMatthias Ringwald
1672*2fd737d3SMatthias Ringwald __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1673*2fd737d3SMatthias Ringwald return(result);
1674*2fd737d3SMatthias Ringwald }
1675*2fd737d3SMatthias Ringwald
__UHADD16(uint32_t op1,uint32_t op2)1676*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1677*2fd737d3SMatthias Ringwald {
1678*2fd737d3SMatthias Ringwald uint32_t result;
1679*2fd737d3SMatthias Ringwald
1680*2fd737d3SMatthias Ringwald __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1681*2fd737d3SMatthias Ringwald return(result);
1682*2fd737d3SMatthias Ringwald }
1683*2fd737d3SMatthias Ringwald
__SSUB16(uint32_t op1,uint32_t op2)1684*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1685*2fd737d3SMatthias Ringwald {
1686*2fd737d3SMatthias Ringwald uint32_t result;
1687*2fd737d3SMatthias Ringwald
1688*2fd737d3SMatthias Ringwald __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1689*2fd737d3SMatthias Ringwald return(result);
1690*2fd737d3SMatthias Ringwald }
1691*2fd737d3SMatthias Ringwald
__QSUB16(uint32_t op1,uint32_t op2)1692*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1693*2fd737d3SMatthias Ringwald {
1694*2fd737d3SMatthias Ringwald uint32_t result;
1695*2fd737d3SMatthias Ringwald
1696*2fd737d3SMatthias Ringwald __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1697*2fd737d3SMatthias Ringwald return(result);
1698*2fd737d3SMatthias Ringwald }
1699*2fd737d3SMatthias Ringwald
__SHSUB16(uint32_t op1,uint32_t op2)1700*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1701*2fd737d3SMatthias Ringwald {
1702*2fd737d3SMatthias Ringwald uint32_t result;
1703*2fd737d3SMatthias Ringwald
1704*2fd737d3SMatthias Ringwald __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1705*2fd737d3SMatthias Ringwald return(result);
1706*2fd737d3SMatthias Ringwald }
1707*2fd737d3SMatthias Ringwald
__USUB16(uint32_t op1,uint32_t op2)1708*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1709*2fd737d3SMatthias Ringwald {
1710*2fd737d3SMatthias Ringwald uint32_t result;
1711*2fd737d3SMatthias Ringwald
1712*2fd737d3SMatthias Ringwald __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1713*2fd737d3SMatthias Ringwald return(result);
1714*2fd737d3SMatthias Ringwald }
1715*2fd737d3SMatthias Ringwald
__UQSUB16(uint32_t op1,uint32_t op2)1716*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1717*2fd737d3SMatthias Ringwald {
1718*2fd737d3SMatthias Ringwald uint32_t result;
1719*2fd737d3SMatthias Ringwald
1720*2fd737d3SMatthias Ringwald __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1721*2fd737d3SMatthias Ringwald return(result);
1722*2fd737d3SMatthias Ringwald }
1723*2fd737d3SMatthias Ringwald
__UHSUB16(uint32_t op1,uint32_t op2)1724*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1725*2fd737d3SMatthias Ringwald {
1726*2fd737d3SMatthias Ringwald uint32_t result;
1727*2fd737d3SMatthias Ringwald
1728*2fd737d3SMatthias Ringwald __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1729*2fd737d3SMatthias Ringwald return(result);
1730*2fd737d3SMatthias Ringwald }
1731*2fd737d3SMatthias Ringwald
__SASX(uint32_t op1,uint32_t op2)1732*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1733*2fd737d3SMatthias Ringwald {
1734*2fd737d3SMatthias Ringwald uint32_t result;
1735*2fd737d3SMatthias Ringwald
1736*2fd737d3SMatthias Ringwald __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1737*2fd737d3SMatthias Ringwald return(result);
1738*2fd737d3SMatthias Ringwald }
1739*2fd737d3SMatthias Ringwald
__QASX(uint32_t op1,uint32_t op2)1740*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1741*2fd737d3SMatthias Ringwald {
1742*2fd737d3SMatthias Ringwald uint32_t result;
1743*2fd737d3SMatthias Ringwald
1744*2fd737d3SMatthias Ringwald __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1745*2fd737d3SMatthias Ringwald return(result);
1746*2fd737d3SMatthias Ringwald }
1747*2fd737d3SMatthias Ringwald
__SHASX(uint32_t op1,uint32_t op2)1748*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1749*2fd737d3SMatthias Ringwald {
1750*2fd737d3SMatthias Ringwald uint32_t result;
1751*2fd737d3SMatthias Ringwald
1752*2fd737d3SMatthias Ringwald __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1753*2fd737d3SMatthias Ringwald return(result);
1754*2fd737d3SMatthias Ringwald }
1755*2fd737d3SMatthias Ringwald
__UASX(uint32_t op1,uint32_t op2)1756*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1757*2fd737d3SMatthias Ringwald {
1758*2fd737d3SMatthias Ringwald uint32_t result;
1759*2fd737d3SMatthias Ringwald
1760*2fd737d3SMatthias Ringwald __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1761*2fd737d3SMatthias Ringwald return(result);
1762*2fd737d3SMatthias Ringwald }
1763*2fd737d3SMatthias Ringwald
__UQASX(uint32_t op1,uint32_t op2)1764*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1765*2fd737d3SMatthias Ringwald {
1766*2fd737d3SMatthias Ringwald uint32_t result;
1767*2fd737d3SMatthias Ringwald
1768*2fd737d3SMatthias Ringwald __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1769*2fd737d3SMatthias Ringwald return(result);
1770*2fd737d3SMatthias Ringwald }
1771*2fd737d3SMatthias Ringwald
__UHASX(uint32_t op1,uint32_t op2)1772*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1773*2fd737d3SMatthias Ringwald {
1774*2fd737d3SMatthias Ringwald uint32_t result;
1775*2fd737d3SMatthias Ringwald
1776*2fd737d3SMatthias Ringwald __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1777*2fd737d3SMatthias Ringwald return(result);
1778*2fd737d3SMatthias Ringwald }
1779*2fd737d3SMatthias Ringwald
__SSAX(uint32_t op1,uint32_t op2)1780*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1781*2fd737d3SMatthias Ringwald {
1782*2fd737d3SMatthias Ringwald uint32_t result;
1783*2fd737d3SMatthias Ringwald
1784*2fd737d3SMatthias Ringwald __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1785*2fd737d3SMatthias Ringwald return(result);
1786*2fd737d3SMatthias Ringwald }
1787*2fd737d3SMatthias Ringwald
__QSAX(uint32_t op1,uint32_t op2)1788*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1789*2fd737d3SMatthias Ringwald {
1790*2fd737d3SMatthias Ringwald uint32_t result;
1791*2fd737d3SMatthias Ringwald
1792*2fd737d3SMatthias Ringwald __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1793*2fd737d3SMatthias Ringwald return(result);
1794*2fd737d3SMatthias Ringwald }
1795*2fd737d3SMatthias Ringwald
__SHSAX(uint32_t op1,uint32_t op2)1796*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1797*2fd737d3SMatthias Ringwald {
1798*2fd737d3SMatthias Ringwald uint32_t result;
1799*2fd737d3SMatthias Ringwald
1800*2fd737d3SMatthias Ringwald __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1801*2fd737d3SMatthias Ringwald return(result);
1802*2fd737d3SMatthias Ringwald }
1803*2fd737d3SMatthias Ringwald
__USAX(uint32_t op1,uint32_t op2)1804*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1805*2fd737d3SMatthias Ringwald {
1806*2fd737d3SMatthias Ringwald uint32_t result;
1807*2fd737d3SMatthias Ringwald
1808*2fd737d3SMatthias Ringwald __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1809*2fd737d3SMatthias Ringwald return(result);
1810*2fd737d3SMatthias Ringwald }
1811*2fd737d3SMatthias Ringwald
__UQSAX(uint32_t op1,uint32_t op2)1812*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1813*2fd737d3SMatthias Ringwald {
1814*2fd737d3SMatthias Ringwald uint32_t result;
1815*2fd737d3SMatthias Ringwald
1816*2fd737d3SMatthias Ringwald __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1817*2fd737d3SMatthias Ringwald return(result);
1818*2fd737d3SMatthias Ringwald }
1819*2fd737d3SMatthias Ringwald
__UHSAX(uint32_t op1,uint32_t op2)1820*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1821*2fd737d3SMatthias Ringwald {
1822*2fd737d3SMatthias Ringwald uint32_t result;
1823*2fd737d3SMatthias Ringwald
1824*2fd737d3SMatthias Ringwald __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1825*2fd737d3SMatthias Ringwald return(result);
1826*2fd737d3SMatthias Ringwald }
1827*2fd737d3SMatthias Ringwald
__USAD8(uint32_t op1,uint32_t op2)1828*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1829*2fd737d3SMatthias Ringwald {
1830*2fd737d3SMatthias Ringwald uint32_t result;
1831*2fd737d3SMatthias Ringwald
1832*2fd737d3SMatthias Ringwald __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1833*2fd737d3SMatthias Ringwald return(result);
1834*2fd737d3SMatthias Ringwald }
1835*2fd737d3SMatthias Ringwald
__USADA8(uint32_t op1,uint32_t op2,uint32_t op3)1836*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1837*2fd737d3SMatthias Ringwald {
1838*2fd737d3SMatthias Ringwald uint32_t result;
1839*2fd737d3SMatthias Ringwald
1840*2fd737d3SMatthias Ringwald __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1841*2fd737d3SMatthias Ringwald return(result);
1842*2fd737d3SMatthias Ringwald }
1843*2fd737d3SMatthias Ringwald
1844*2fd737d3SMatthias Ringwald #define __SSAT16(ARG1,ARG2) \
1845*2fd737d3SMatthias Ringwald ({ \
1846*2fd737d3SMatthias Ringwald int32_t __RES, __ARG1 = (ARG1); \
1847*2fd737d3SMatthias Ringwald __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1848*2fd737d3SMatthias Ringwald __RES; \
1849*2fd737d3SMatthias Ringwald })
1850*2fd737d3SMatthias Ringwald
1851*2fd737d3SMatthias Ringwald #define __USAT16(ARG1,ARG2) \
1852*2fd737d3SMatthias Ringwald ({ \
1853*2fd737d3SMatthias Ringwald uint32_t __RES, __ARG1 = (ARG1); \
1854*2fd737d3SMatthias Ringwald __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1855*2fd737d3SMatthias Ringwald __RES; \
1856*2fd737d3SMatthias Ringwald })
1857*2fd737d3SMatthias Ringwald
__UXTB16(uint32_t op1)1858*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1859*2fd737d3SMatthias Ringwald {
1860*2fd737d3SMatthias Ringwald uint32_t result;
1861*2fd737d3SMatthias Ringwald
1862*2fd737d3SMatthias Ringwald __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1863*2fd737d3SMatthias Ringwald return(result);
1864*2fd737d3SMatthias Ringwald }
1865*2fd737d3SMatthias Ringwald
__UXTAB16(uint32_t op1,uint32_t op2)1866*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1867*2fd737d3SMatthias Ringwald {
1868*2fd737d3SMatthias Ringwald uint32_t result;
1869*2fd737d3SMatthias Ringwald
1870*2fd737d3SMatthias Ringwald __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1871*2fd737d3SMatthias Ringwald return(result);
1872*2fd737d3SMatthias Ringwald }
1873*2fd737d3SMatthias Ringwald
__SXTB16(uint32_t op1)1874*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1875*2fd737d3SMatthias Ringwald {
1876*2fd737d3SMatthias Ringwald uint32_t result;
1877*2fd737d3SMatthias Ringwald
1878*2fd737d3SMatthias Ringwald __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1879*2fd737d3SMatthias Ringwald return(result);
1880*2fd737d3SMatthias Ringwald }
1881*2fd737d3SMatthias Ringwald
__SXTAB16(uint32_t op1,uint32_t op2)1882*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1883*2fd737d3SMatthias Ringwald {
1884*2fd737d3SMatthias Ringwald uint32_t result;
1885*2fd737d3SMatthias Ringwald
1886*2fd737d3SMatthias Ringwald __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1887*2fd737d3SMatthias Ringwald return(result);
1888*2fd737d3SMatthias Ringwald }
1889*2fd737d3SMatthias Ringwald
__SMUAD(uint32_t op1,uint32_t op2)1890*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1891*2fd737d3SMatthias Ringwald {
1892*2fd737d3SMatthias Ringwald uint32_t result;
1893*2fd737d3SMatthias Ringwald
1894*2fd737d3SMatthias Ringwald __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1895*2fd737d3SMatthias Ringwald return(result);
1896*2fd737d3SMatthias Ringwald }
1897*2fd737d3SMatthias Ringwald
__SMUADX(uint32_t op1,uint32_t op2)1898*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1899*2fd737d3SMatthias Ringwald {
1900*2fd737d3SMatthias Ringwald uint32_t result;
1901*2fd737d3SMatthias Ringwald
1902*2fd737d3SMatthias Ringwald __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1903*2fd737d3SMatthias Ringwald return(result);
1904*2fd737d3SMatthias Ringwald }
1905*2fd737d3SMatthias Ringwald
__SMLAD(uint32_t op1,uint32_t op2,uint32_t op3)1906*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1907*2fd737d3SMatthias Ringwald {
1908*2fd737d3SMatthias Ringwald uint32_t result;
1909*2fd737d3SMatthias Ringwald
1910*2fd737d3SMatthias Ringwald __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1911*2fd737d3SMatthias Ringwald return(result);
1912*2fd737d3SMatthias Ringwald }
1913*2fd737d3SMatthias Ringwald
__SMLADX(uint32_t op1,uint32_t op2,uint32_t op3)1914*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1915*2fd737d3SMatthias Ringwald {
1916*2fd737d3SMatthias Ringwald uint32_t result;
1917*2fd737d3SMatthias Ringwald
1918*2fd737d3SMatthias Ringwald __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1919*2fd737d3SMatthias Ringwald return(result);
1920*2fd737d3SMatthias Ringwald }
1921*2fd737d3SMatthias Ringwald
__SMLALD(uint32_t op1,uint32_t op2,uint64_t acc)1922*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1923*2fd737d3SMatthias Ringwald {
1924*2fd737d3SMatthias Ringwald union llreg_u{
1925*2fd737d3SMatthias Ringwald uint32_t w32[2];
1926*2fd737d3SMatthias Ringwald uint64_t w64;
1927*2fd737d3SMatthias Ringwald } llr;
1928*2fd737d3SMatthias Ringwald llr.w64 = acc;
1929*2fd737d3SMatthias Ringwald
1930*2fd737d3SMatthias Ringwald #ifndef __ARMEB__ /* Little endian */
1931*2fd737d3SMatthias Ringwald __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1932*2fd737d3SMatthias Ringwald #else /* Big endian */
1933*2fd737d3SMatthias Ringwald __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1934*2fd737d3SMatthias Ringwald #endif
1935*2fd737d3SMatthias Ringwald
1936*2fd737d3SMatthias Ringwald return(llr.w64);
1937*2fd737d3SMatthias Ringwald }
1938*2fd737d3SMatthias Ringwald
__SMLALDX(uint32_t op1,uint32_t op2,uint64_t acc)1939*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1940*2fd737d3SMatthias Ringwald {
1941*2fd737d3SMatthias Ringwald union llreg_u{
1942*2fd737d3SMatthias Ringwald uint32_t w32[2];
1943*2fd737d3SMatthias Ringwald uint64_t w64;
1944*2fd737d3SMatthias Ringwald } llr;
1945*2fd737d3SMatthias Ringwald llr.w64 = acc;
1946*2fd737d3SMatthias Ringwald
1947*2fd737d3SMatthias Ringwald #ifndef __ARMEB__ /* Little endian */
1948*2fd737d3SMatthias Ringwald __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1949*2fd737d3SMatthias Ringwald #else /* Big endian */
1950*2fd737d3SMatthias Ringwald __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1951*2fd737d3SMatthias Ringwald #endif
1952*2fd737d3SMatthias Ringwald
1953*2fd737d3SMatthias Ringwald return(llr.w64);
1954*2fd737d3SMatthias Ringwald }
1955*2fd737d3SMatthias Ringwald
__SMUSD(uint32_t op1,uint32_t op2)1956*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1957*2fd737d3SMatthias Ringwald {
1958*2fd737d3SMatthias Ringwald uint32_t result;
1959*2fd737d3SMatthias Ringwald
1960*2fd737d3SMatthias Ringwald __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1961*2fd737d3SMatthias Ringwald return(result);
1962*2fd737d3SMatthias Ringwald }
1963*2fd737d3SMatthias Ringwald
__SMUSDX(uint32_t op1,uint32_t op2)1964*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1965*2fd737d3SMatthias Ringwald {
1966*2fd737d3SMatthias Ringwald uint32_t result;
1967*2fd737d3SMatthias Ringwald
1968*2fd737d3SMatthias Ringwald __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1969*2fd737d3SMatthias Ringwald return(result);
1970*2fd737d3SMatthias Ringwald }
1971*2fd737d3SMatthias Ringwald
__SMLSD(uint32_t op1,uint32_t op2,uint32_t op3)1972*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1973*2fd737d3SMatthias Ringwald {
1974*2fd737d3SMatthias Ringwald uint32_t result;
1975*2fd737d3SMatthias Ringwald
1976*2fd737d3SMatthias Ringwald __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1977*2fd737d3SMatthias Ringwald return(result);
1978*2fd737d3SMatthias Ringwald }
1979*2fd737d3SMatthias Ringwald
__SMLSDX(uint32_t op1,uint32_t op2,uint32_t op3)1980*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1981*2fd737d3SMatthias Ringwald {
1982*2fd737d3SMatthias Ringwald uint32_t result;
1983*2fd737d3SMatthias Ringwald
1984*2fd737d3SMatthias Ringwald __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1985*2fd737d3SMatthias Ringwald return(result);
1986*2fd737d3SMatthias Ringwald }
1987*2fd737d3SMatthias Ringwald
__SMLSLD(uint32_t op1,uint32_t op2,uint64_t acc)1988*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1989*2fd737d3SMatthias Ringwald {
1990*2fd737d3SMatthias Ringwald union llreg_u{
1991*2fd737d3SMatthias Ringwald uint32_t w32[2];
1992*2fd737d3SMatthias Ringwald uint64_t w64;
1993*2fd737d3SMatthias Ringwald } llr;
1994*2fd737d3SMatthias Ringwald llr.w64 = acc;
1995*2fd737d3SMatthias Ringwald
1996*2fd737d3SMatthias Ringwald #ifndef __ARMEB__ /* Little endian */
1997*2fd737d3SMatthias Ringwald __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1998*2fd737d3SMatthias Ringwald #else /* Big endian */
1999*2fd737d3SMatthias Ringwald __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2000*2fd737d3SMatthias Ringwald #endif
2001*2fd737d3SMatthias Ringwald
2002*2fd737d3SMatthias Ringwald return(llr.w64);
2003*2fd737d3SMatthias Ringwald }
2004*2fd737d3SMatthias Ringwald
__SMLSLDX(uint32_t op1,uint32_t op2,uint64_t acc)2005*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2006*2fd737d3SMatthias Ringwald {
2007*2fd737d3SMatthias Ringwald union llreg_u{
2008*2fd737d3SMatthias Ringwald uint32_t w32[2];
2009*2fd737d3SMatthias Ringwald uint64_t w64;
2010*2fd737d3SMatthias Ringwald } llr;
2011*2fd737d3SMatthias Ringwald llr.w64 = acc;
2012*2fd737d3SMatthias Ringwald
2013*2fd737d3SMatthias Ringwald #ifndef __ARMEB__ /* Little endian */
2014*2fd737d3SMatthias Ringwald __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2015*2fd737d3SMatthias Ringwald #else /* Big endian */
2016*2fd737d3SMatthias Ringwald __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2017*2fd737d3SMatthias Ringwald #endif
2018*2fd737d3SMatthias Ringwald
2019*2fd737d3SMatthias Ringwald return(llr.w64);
2020*2fd737d3SMatthias Ringwald }
2021*2fd737d3SMatthias Ringwald
__SEL(uint32_t op1,uint32_t op2)2022*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
2023*2fd737d3SMatthias Ringwald {
2024*2fd737d3SMatthias Ringwald uint32_t result;
2025*2fd737d3SMatthias Ringwald
2026*2fd737d3SMatthias Ringwald __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2027*2fd737d3SMatthias Ringwald return(result);
2028*2fd737d3SMatthias Ringwald }
2029*2fd737d3SMatthias Ringwald
__QADD(int32_t op1,int32_t op2)2030*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
2031*2fd737d3SMatthias Ringwald {
2032*2fd737d3SMatthias Ringwald int32_t result;
2033*2fd737d3SMatthias Ringwald
2034*2fd737d3SMatthias Ringwald __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2035*2fd737d3SMatthias Ringwald return(result);
2036*2fd737d3SMatthias Ringwald }
2037*2fd737d3SMatthias Ringwald
__QSUB(int32_t op1,int32_t op2)2038*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
2039*2fd737d3SMatthias Ringwald {
2040*2fd737d3SMatthias Ringwald int32_t result;
2041*2fd737d3SMatthias Ringwald
2042*2fd737d3SMatthias Ringwald __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2043*2fd737d3SMatthias Ringwald return(result);
2044*2fd737d3SMatthias Ringwald }
2045*2fd737d3SMatthias Ringwald
2046*2fd737d3SMatthias Ringwald #if 0
2047*2fd737d3SMatthias Ringwald #define __PKHBT(ARG1,ARG2,ARG3) \
2048*2fd737d3SMatthias Ringwald ({ \
2049*2fd737d3SMatthias Ringwald uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2050*2fd737d3SMatthias Ringwald __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2051*2fd737d3SMatthias Ringwald __RES; \
2052*2fd737d3SMatthias Ringwald })
2053*2fd737d3SMatthias Ringwald
2054*2fd737d3SMatthias Ringwald #define __PKHTB(ARG1,ARG2,ARG3) \
2055*2fd737d3SMatthias Ringwald ({ \
2056*2fd737d3SMatthias Ringwald uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2057*2fd737d3SMatthias Ringwald if (ARG3 == 0) \
2058*2fd737d3SMatthias Ringwald __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
2059*2fd737d3SMatthias Ringwald else \
2060*2fd737d3SMatthias Ringwald __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2061*2fd737d3SMatthias Ringwald __RES; \
2062*2fd737d3SMatthias Ringwald })
2063*2fd737d3SMatthias Ringwald #endif
2064*2fd737d3SMatthias Ringwald
2065*2fd737d3SMatthias Ringwald #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
2066*2fd737d3SMatthias Ringwald ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
2067*2fd737d3SMatthias Ringwald
2068*2fd737d3SMatthias Ringwald #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
2069*2fd737d3SMatthias Ringwald ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
2070*2fd737d3SMatthias Ringwald
__SMMLA(int32_t op1,int32_t op2,int32_t op3)2071*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2072*2fd737d3SMatthias Ringwald {
2073*2fd737d3SMatthias Ringwald int32_t result;
2074*2fd737d3SMatthias Ringwald
2075*2fd737d3SMatthias Ringwald __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
2076*2fd737d3SMatthias Ringwald return(result);
2077*2fd737d3SMatthias Ringwald }
2078*2fd737d3SMatthias Ringwald
2079*2fd737d3SMatthias Ringwald #endif /* (__ARM_FEATURE_DSP == 1) */
2080*2fd737d3SMatthias Ringwald /*@} end of group CMSIS_SIMD_intrinsics */
2081*2fd737d3SMatthias Ringwald
2082*2fd737d3SMatthias Ringwald
2083*2fd737d3SMatthias Ringwald #pragma GCC diagnostic pop
2084*2fd737d3SMatthias Ringwald
2085*2fd737d3SMatthias Ringwald #endif /* __CMSIS_GCC_H */
2086