11/**************************************************************************/ /**
22 * @file cmsis_gcc.h
33 * @brief CMSIS compiler specific macros, functions, instructions
4- * @version V1.2 .0
5- * @date 17. May 2019
4+ * @version V1.3 .0
5+ * @date 17. December 2019
66 ******************************************************************************/
77/*
88 * Copyright (c) 2009-2019 Arm Limited. All rights reserved.
@@ -119,6 +119,15 @@ __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
119119}
120120
121121
122+ __STATIC_FORCEINLINE uint32_t __QSUB8 (uint32_t op1 , uint32_t op2 )
123+ {
124+ uint32_t result ;
125+
126+ __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result ) : "r" (op1 ), "r" (op2 ) );
127+ return (result );
128+ }
129+
130+
122131__STATIC_FORCEINLINE uint32_t __QADD16 (uint32_t op1 , uint32_t op2 )
123132{
124133 uint32_t result ;
@@ -127,6 +136,14 @@ __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
127136 return (result );
128137}
129138
139+ __STATIC_FORCEINLINE uint32_t __QADD8 (uint32_t op1 , uint32_t op2 )
140+ {
141+ uint32_t result ;
142+
143+ __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result ) : "r" (op1 ), "r" (op2 ) );
144+ return (result );
145+ }
146+
130147__STATIC_FORCEINLINE int32_t __QADD ( int32_t op1 , int32_t op2 )
131148{
132149 int32_t result ;
@@ -135,6 +152,22 @@ __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
135152 return (result );
136153}
137154
155+ __STATIC_FORCEINLINE uint32_t __QSAX (uint32_t op1 , uint32_t op2 )
156+ {
157+ uint32_t result ;
158+
159+ __ASM ("qsax %0, %1, %2" : "=r" (result ) : "r" (op1 ), "r" (op2 ) );
160+ return (result );
161+ }
162+
163+ __STATIC_FORCEINLINE uint32_t __SHSAX (uint32_t op1 , uint32_t op2 )
164+ {
165+ uint32_t result ;
166+
167+ __ASM ("shsax %0, %1, %2" : "=r" (result ) : "r" (op1 ), "r" (op2 ) );
168+ return (result );
169+ }
170+
138171__STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1 , uint32_t op2 , uint64_t acc )
139172{
140173 union llreg_u {
@@ -160,6 +193,15 @@ __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
160193 return (result );
161194}
162195
196+ __STATIC_FORCEINLINE uint32_t __SXTB16 (uint32_t op1 )
197+ {
198+ uint32_t result ;
199+
200+ __ASM ("sxtb16 %0, %1" : "=r" (result ) : "r" (op1 ));
201+ return (result );
202+ }
203+
204+
163205__STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1 , uint32_t op2 )
164206{
165207 uint32_t result ;
@@ -168,9 +210,14 @@ __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
168210 return (result );
169211}
170212
213+
214+
171215#define __PKHBT (ARG1 ,ARG2 ,ARG3 ) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
172216 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
173217
218+ #define __PKHTB (ARG1 ,ARG2 ,ARG3 ) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
219+ ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
220+
174221__STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1 , uint32_t op2 , uint32_t op3 )
175222{
176223 uint32_t result ;
@@ -220,7 +267,61 @@ __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
220267 return (result );
221268}
222269
270+ __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1 , uint32_t op2 )
271+ {
272+ uint32_t result ;
273+
274+ __ASM volatile ("smusd %0, %1, %2" : "=r" (result ) : "r" (op1 ), "r" (op2 ) );
275+ return (result );
276+ }
277+
278+ __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1 , uint32_t op2 )
279+ {
280+ uint32_t result ;
281+
282+ __ASM volatile ("smusdx %0, %1, %2" : "=r" (result ) : "r" (op1 ), "r" (op2 ) );
283+ return (result );
284+ }
285+
286+ __STATIC_FORCEINLINE uint32_t __QASX (uint32_t op1 , uint32_t op2 )
287+ {
288+ uint32_t result ;
289+
290+ __ASM ("qasx %0, %1, %2" : "=r" (result ) : "r" (op1 ), "r" (op2 ) );
291+ return (result );
292+ }
293+
294+ __STATIC_FORCEINLINE uint32_t __SHADD16 (uint32_t op1 , uint32_t op2 )
295+ {
296+ uint32_t result ;
297+
298+ __ASM ("shadd16 %0, %1, %2" : "=r" (result ) : "r" (op1 ), "r" (op2 ) );
299+ return (result );
300+ }
301+
302+ __STATIC_FORCEINLINE uint32_t __SHSUB16 (uint32_t op1 , uint32_t op2 )
303+ {
304+ uint32_t result ;
223305
306+ __ASM ("shsub16 %0, %1, %2" : "=r" (result ) : "r" (op1 ), "r" (op2 ) );
307+ return (result );
308+ }
309+
310+ __STATIC_FORCEINLINE uint32_t __SHASX (uint32_t op1 , uint32_t op2 )
311+ {
312+ uint32_t result ;
313+
314+ __ASM ("shasx %0, %1, %2" : "=r" (result ) : "r" (op1 ), "r" (op2 ) );
315+ return (result );
316+ }
317+
318+ __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1 , uint32_t op2 , uint32_t op3 )
319+ {
320+ uint32_t result ;
321+
322+ __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result ) : "r" (op1 ), "r" (op2 ), "r" (op3 ) );
323+ return (result );
324+ }
224325
225326
226327/* ########################## Core Instruction Access ######################### */
@@ -232,12 +333,12 @@ __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
232333/**
233334 \brief Wait For Interrupt
234335 */
235- #define __WFI () __ASM volatile ("wfi")
336+ #define __WFI () __ASM volatile ("wfi":::"memory" )
236337
237338/**
238339 \brief Wait For Event
239340 */
240- #define __WFE () __ASM volatile ("wfe")
341+ #define __WFE () __ASM volatile ("wfe":::"memory" )
241342
242343/**
243344 \brief Send Event
@@ -289,7 +390,7 @@ __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
289390#else
290391 uint32_t result ;
291392
292- __ASM volatile ("rev %0, %1" : "=r" (result ) : "r" (value ) );
393+ __ASM ("rev %0, %1" : "=r" (result ) : "r" (value ) );
293394 return result ;
294395#endif
295396}
@@ -300,14 +401,12 @@ __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
300401 \param [in] value Value to reverse
301402 \return Reversed value
302403 */
303- #ifndef __NO_EMBEDDED_ASM
304- __attribute__((section (".rev16_text" ))) __STATIC_INLINE uint32_t __REV16 (uint32_t value )
404+ __STATIC_FORCEINLINE uint32_t __REV16 (uint32_t value )
305405{
306406 uint32_t result ;
307- __ASM volatile ("rev16 %0, %1" : "=r" (result ) : "r" (value ));
407+ __ASM ("rev16 %0, %1" : "=r" (result ) : "r" (value ));
308408 return result ;
309409}
310- #endif
311410
312411/**
313412 \brief Reverse byte order (16 bit)
@@ -322,7 +421,7 @@ __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
322421#else
323422 int16_t result ;
324423
325- __ASM volatile ("revsh %0, %1" : "=r" (result ) : "r" (value ) );
424+ __ASM ("revsh %0, %1" : "=r" (result ) : "r" (value ) );
326425 return result ;
327426#endif
328427}
@@ -364,7 +463,7 @@ __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
364463#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1 )) || \
365464 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1 )) || \
366465 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1 )) )
367- __ASM volatile ("rbit %0, %1" : "=r" (result ) : "r" (value ) );
466+ __ASM ("rbit %0, %1" : "=r" (result ) : "r" (value ) );
368467#else
369468 int32_t s = (4U /*sizeof(v)*/ * 8U ) - 1U ; /* extra shift needed at end */
370469
@@ -529,11 +628,11 @@ __STATIC_FORCEINLINE void __CLREX(void)
529628 \param [in] sat Bit position to saturate to (1..32)
530629 \return Saturated value
531630 */
532- #define __SSAT (ARG1 ,ARG2 ) \
631+ #define __SSAT (ARG1 , ARG2 ) \
533632__extension__ \
534633({ \
535634 int32_t __RES, __ARG1 = (ARG1); \
536- __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
635+ __ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
537636 __RES; \
538637 })
539638
@@ -545,11 +644,11 @@ __extension__ \
545644 \param [in] sat Bit position to saturate to (0..31)
546645 \return Saturated value
547646 */
548- #define __USAT (ARG1 ,ARG2 ) \
647+ #define __USAT (ARG1 , ARG2 ) \
549648__extension__ \
550649({ \
551650 uint32_t __RES, __ARG1 = (ARG1); \
552- __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
651+ __ASM volatile ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
553652 __RES; \
554653 })
555654
@@ -637,7 +736,7 @@ __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
637736 */
638737__STATIC_FORCEINLINE void __set_CPSR (uint32_t cpsr )
639738{
640- __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr ) : "memory" );
739+ __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr ) : "cc" , " memory" );
641740}
642741
643742/** \brief Get Mode
0 commit comments