blob: 3ddc308fc13575b21ad46bde580911946188d28a [file] [log] [blame]
Kévin Redon69b92d92019-01-24 16:39:20 +01001/**************************************************************************//**
2 * @file cmsis_armcc.h
3 * @brief CMSIS compiler ARMCC (ARM compiler V5) header file
4 * @version V5.0.1
5 * @date 03. February 2017
6 ******************************************************************************/
7/*
8 * Copyright (c) 2009-2017 ARM Limited. All rights reserved.
9 *
10 * SPDX-License-Identifier: Apache-2.0
11 *
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
15 *
16 * www.apache.org/licenses/LICENSE-2.0
17 *
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
23 */
24
25#ifndef __CMSIS_ARMCC_H
26#define __CMSIS_ARMCC_H
27
28
29#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677)
30 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
31#endif
32
33/* CMSIS compiler control architecture macros */
34#if ((defined (__TARGET_ARCH_6_M ) && (__TARGET_ARCH_6_M == 1)) || \
35 (defined (__TARGET_ARCH_6S_M ) && (__TARGET_ARCH_6S_M == 1)) )
36 #define __ARM_ARCH_6M__ 1
37#endif
38
39#if (defined (__TARGET_ARCH_7_M ) && (__TARGET_ARCH_7_M == 1))
40 #define __ARM_ARCH_7M__ 1
41#endif
42
43#if (defined (__TARGET_ARCH_7E_M) && (__TARGET_ARCH_7E_M == 1))
44 #define __ARM_ARCH_7EM__ 1
45#endif
46
47 /* __ARM_ARCH_8M_BASE__ not applicable */
48 /* __ARM_ARCH_8M_MAIN__ not applicable */
49
50
51/* CMSIS compiler specific defines */
52#ifndef __ASM
53 #define __ASM __asm
54#endif
55#ifndef __INLINE
56 #define __INLINE __inline
57#endif
58#ifndef __STATIC_INLINE
59 #define __STATIC_INLINE static __inline
60#endif
61#ifndef __NO_RETURN
62 #define __NO_RETURN __declspec(noreturn)
63#endif
64#ifndef __USED
65 #define __USED __attribute__((used))
66#endif
67#ifndef __WEAK
68 #define __WEAK __attribute__((weak))
69#endif
70#ifndef __UNALIGNED_UINT32
71 #define __UNALIGNED_UINT32(x) (*((__packed uint32_t *)(x)))
72#endif
73#ifndef __ALIGNED
74 #define __ALIGNED(x) __attribute__((aligned(x)))
75#endif
76#ifndef __PACKED
77 #define __PACKED __attribute__((packed))
78#endif
79#ifndef __PACKED_STRUCT
80 #define __PACKED_STRUCT __packed struct
81#endif
82
83
84/* ########################### Core Function Access ########################### */
85/** \ingroup CMSIS_Core_FunctionInterface
86 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
87 @{
88 */
89
90/**
91 \brief Enable IRQ Interrupts
92 \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
93 Can only be executed in Privileged modes.
94 */
95/* intrinsic void __enable_irq(); */
96
97
98/**
99 \brief Disable IRQ Interrupts
100 \details Disables IRQ interrupts by setting the I-bit in the CPSR.
101 Can only be executed in Privileged modes.
102 */
103/* intrinsic void __disable_irq(); */
104
105/**
106 \brief Get Control Register
107 \details Returns the content of the Control Register.
108 \return Control Register value
109 */
110__STATIC_INLINE uint32_t __get_CONTROL(void)
111{
112 register uint32_t __regControl __ASM("control");
113 return(__regControl);
114}
115
116
117/**
118 \brief Set Control Register
119 \details Writes the given value to the Control Register.
120 \param [in] control Control Register value to set
121 */
122__STATIC_INLINE void __set_CONTROL(uint32_t control)
123{
124 register uint32_t __regControl __ASM("control");
125 __regControl = control;
126}
127
128
129/**
130 \brief Get IPSR Register
131 \details Returns the content of the IPSR Register.
132 \return IPSR Register value
133 */
134__STATIC_INLINE uint32_t __get_IPSR(void)
135{
136 register uint32_t __regIPSR __ASM("ipsr");
137 return(__regIPSR);
138}
139
140
141/**
142 \brief Get APSR Register
143 \details Returns the content of the APSR Register.
144 \return APSR Register value
145 */
146__STATIC_INLINE uint32_t __get_APSR(void)
147{
148 register uint32_t __regAPSR __ASM("apsr");
149 return(__regAPSR);
150}
151
152
153/**
154 \brief Get xPSR Register
155 \details Returns the content of the xPSR Register.
156 \return xPSR Register value
157 */
158__STATIC_INLINE uint32_t __get_xPSR(void)
159{
160 register uint32_t __regXPSR __ASM("xpsr");
161 return(__regXPSR);
162}
163
164
165/**
166 \brief Get Process Stack Pointer
167 \details Returns the current value of the Process Stack Pointer (PSP).
168 \return PSP Register value
169 */
170__STATIC_INLINE uint32_t __get_PSP(void)
171{
172 register uint32_t __regProcessStackPointer __ASM("psp");
173 return(__regProcessStackPointer);
174}
175
176
177/**
178 \brief Set Process Stack Pointer
179 \details Assigns the given value to the Process Stack Pointer (PSP).
180 \param [in] topOfProcStack Process Stack Pointer value to set
181 */
182__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
183{
184 register uint32_t __regProcessStackPointer __ASM("psp");
185 __regProcessStackPointer = topOfProcStack;
186}
187
188
189/**
190 \brief Get Main Stack Pointer
191 \details Returns the current value of the Main Stack Pointer (MSP).
192 \return MSP Register value
193 */
194__STATIC_INLINE uint32_t __get_MSP(void)
195{
196 register uint32_t __regMainStackPointer __ASM("msp");
197 return(__regMainStackPointer);
198}
199
200
201/**
202 \brief Set Main Stack Pointer
203 \details Assigns the given value to the Main Stack Pointer (MSP).
204 \param [in] topOfMainStack Main Stack Pointer value to set
205 */
206__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
207{
208 register uint32_t __regMainStackPointer __ASM("msp");
209 __regMainStackPointer = topOfMainStack;
210}
211
212
213/**
214 \brief Get Priority Mask
215 \details Returns the current state of the priority mask bit from the Priority Mask Register.
216 \return Priority Mask value
217 */
218__STATIC_INLINE uint32_t __get_PRIMASK(void)
219{
220 register uint32_t __regPriMask __ASM("primask");
221 return(__regPriMask);
222}
223
224
225/**
226 \brief Set Priority Mask
227 \details Assigns the given value to the Priority Mask Register.
228 \param [in] priMask Priority Mask
229 */
230__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
231{
232 register uint32_t __regPriMask __ASM("primask");
233 __regPriMask = (priMask);
234}
235
236
237#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
238 (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
239
240/**
241 \brief Enable FIQ
242 \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
243 Can only be executed in Privileged modes.
244 */
245#define __enable_fault_irq __enable_fiq
246
247
248/**
249 \brief Disable FIQ
250 \details Disables FIQ interrupts by setting the F-bit in the CPSR.
251 Can only be executed in Privileged modes.
252 */
253#define __disable_fault_irq __disable_fiq
254
255
256/**
257 \brief Get Base Priority
258 \details Returns the current value of the Base Priority register.
259 \return Base Priority register value
260 */
261__STATIC_INLINE uint32_t __get_BASEPRI(void)
262{
263 register uint32_t __regBasePri __ASM("basepri");
264 return(__regBasePri);
265}
266
267
268/**
269 \brief Set Base Priority
270 \details Assigns the given value to the Base Priority register.
271 \param [in] basePri Base Priority value to set
272 */
273__STATIC_INLINE void __set_BASEPRI(uint32_t basePri)
274{
275 register uint32_t __regBasePri __ASM("basepri");
276 __regBasePri = (basePri & 0xFFU);
277}
278
279
280/**
281 \brief Set Base Priority with condition
282 \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
283 or the new value increases the BASEPRI priority level.
284 \param [in] basePri Base Priority value to set
285 */
286__STATIC_INLINE void __set_BASEPRI_MAX(uint32_t basePri)
287{
288 register uint32_t __regBasePriMax __ASM("basepri_max");
289 __regBasePriMax = (basePri & 0xFFU);
290}
291
292
293/**
294 \brief Get Fault Mask
295 \details Returns the current value of the Fault Mask register.
296 \return Fault Mask register value
297 */
298__STATIC_INLINE uint32_t __get_FAULTMASK(void)
299{
300 register uint32_t __regFaultMask __ASM("faultmask");
301 return(__regFaultMask);
302}
303
304
305/**
306 \brief Set Fault Mask
307 \details Assigns the given value to the Fault Mask register.
308 \param [in] faultMask Fault Mask value to set
309 */
310__STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)
311{
312 register uint32_t __regFaultMask __ASM("faultmask");
313 __regFaultMask = (faultMask & (uint32_t)1U);
314}
315
316#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
317 (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */
318
319
320#if ((defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
321
322/**
323 \brief Get FPSCR
324 \details Returns the current value of the Floating Point Status/Control register.
325 \return Floating Point Status/Control register value
326 */
327__STATIC_INLINE uint32_t __get_FPSCR(void)
328{
329#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
330 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
331 register uint32_t __regfpscr __ASM("fpscr");
332 return(__regfpscr);
333#else
334 return(0U);
335#endif
336}
337
338
339/**
340 \brief Set FPSCR
341 \details Assigns the given value to the Floating Point Status/Control register.
342 \param [in] fpscr Floating Point Status/Control value to set
343 */
344__STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
345{
346#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
347 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
348 register uint32_t __regfpscr __ASM("fpscr");
349 __regfpscr = (fpscr);
350#else
351 (void)fpscr;
352#endif
353}
354
355#endif /* ((defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */
356
357
358
359/*@} end of CMSIS_Core_RegAccFunctions */
360
361
362/* ########################## Core Instruction Access ######################### */
363/** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
364 Access to dedicated instructions
365 @{
366*/
367
368/**
369 \brief No Operation
370 \details No Operation does nothing. This instruction can be used for code alignment purposes.
371 */
372#define __NOP __nop
373
374
375/**
376 \brief Wait For Interrupt
377 \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
378 */
379#define __WFI __wfi
380
381
382/**
383 \brief Wait For Event
384 \details Wait For Event is a hint instruction that permits the processor to enter
385 a low-power state until one of a number of events occurs.
386 */
387#define __WFE __wfe
388
389
390/**
391 \brief Send Event
392 \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
393 */
394#define __SEV __sev
395
396
397/**
398 \brief Instruction Synchronization Barrier
399 \details Instruction Synchronization Barrier flushes the pipeline in the processor,
400 so that all instructions following the ISB are fetched from cache or memory,
401 after the instruction has been completed.
402 */
403#define __ISB() do {\
404 __schedule_barrier();\
405 __isb(0xF);\
406 __schedule_barrier();\
407 } while (0U)
408
409/**
410 \brief Data Synchronization Barrier
411 \details Acts as a special kind of Data Memory Barrier.
412 It completes when all explicit memory accesses before this instruction complete.
413 */
414#define __DSB() do {\
415 __schedule_barrier();\
416 __dsb(0xF);\
417 __schedule_barrier();\
418 } while (0U)
419
420/**
421 \brief Data Memory Barrier
422 \details Ensures the apparent order of the explicit memory operations before
423 and after the instruction, without ensuring their completion.
424 */
425#define __DMB() do {\
426 __schedule_barrier();\
427 __dmb(0xF);\
428 __schedule_barrier();\
429 } while (0U)
430
431/**
432 \brief Reverse byte order (32 bit)
433 \details Reverses the byte order in integer value.
434 \param [in] value Value to reverse
435 \return Reversed value
436 */
437#define __REV __rev
438
439
440/**
441 \brief Reverse byte order (16 bit)
442 \details Reverses the byte order in two unsigned short values.
443 \param [in] value Value to reverse
444 \return Reversed value
445 */
446#ifndef __NO_EMBEDDED_ASM
447__attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
448{
449 rev16 r0, r0
450 bx lr
451}
452#endif
453
454
455/**
456 \brief Reverse byte order in signed short value
457 \details Reverses the byte order in a signed short value with sign extension to integer.
458 \param [in] value Value to reverse
459 \return Reversed value
460 */
461#ifndef __NO_EMBEDDED_ASM
462__attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
463{
464 revsh r0, r0
465 bx lr
466}
467#endif
468
469
470/**
471 \brief Rotate Right in unsigned value (32 bit)
472 \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
473 \param [in] op1 Value to rotate
474 \param [in] op2 Number of Bits to rotate
475 \return Rotated value
476 */
477#define __ROR __ror
478
479
480/**
481 \brief Breakpoint
482 \details Causes the processor to enter Debug state.
483 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
484 \param [in] value is ignored by the processor.
485 If required, a debugger can use it to store additional information about the breakpoint.
486 */
487#define __BKPT(value) __breakpoint(value)
488
489
490/**
491 \brief Reverse bit order of value
492 \details Reverses the bit order of the given value.
493 \param [in] value Value to reverse
494 \return Reversed value
495 */
496#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
497 (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
498 #define __RBIT __rbit
499#else
500__attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
501{
502 uint32_t result;
503 int32_t s = (4 /*sizeof(v)*/ * 8) - 1; /* extra shift needed at end */
504
505 result = value; /* r will be reversed bits of v; first get LSB of v */
506 for (value >>= 1U; value; value >>= 1U)
507 {
508 result <<= 1U;
509 result |= value & 1U;
510 s--;
511 }
512 result <<= s; /* shift when v's highest bits are zero */
513 return(result);
514}
515#endif
516
517
518/**
519 \brief Count leading zeros
520 \details Counts the number of leading zeros of a data value.
521 \param [in] value Value to count the leading zeros
522 \return number of leading zeros in value
523 */
524#define __CLZ __clz
525
526
527#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
528 (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
529
530/**
531 \brief LDR Exclusive (8 bit)
532 \details Executes a exclusive LDR instruction for 8 bit value.
533 \param [in] ptr Pointer to data
534 \return value of type uint8_t at (*ptr)
535 */
536#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
537 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
538#else
539 #define __LDREXB(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint8_t ) __ldrex(ptr)) _Pragma("pop")
540#endif
541
542
543/**
544 \brief LDR Exclusive (16 bit)
545 \details Executes a exclusive LDR instruction for 16 bit values.
546 \param [in] ptr Pointer to data
547 \return value of type uint16_t at (*ptr)
548 */
549#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
550 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
551#else
552 #define __LDREXH(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint16_t) __ldrex(ptr)) _Pragma("pop")
553#endif
554
555
556/**
557 \brief LDR Exclusive (32 bit)
558 \details Executes a exclusive LDR instruction for 32 bit values.
559 \param [in] ptr Pointer to data
560 \return value of type uint32_t at (*ptr)
561 */
562#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
563 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
564#else
565 #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint32_t ) __ldrex(ptr)) _Pragma("pop")
566#endif
567
568
569/**
570 \brief STR Exclusive (8 bit)
571 \details Executes a exclusive STR instruction for 8 bit values.
572 \param [in] value Value to store
573 \param [in] ptr Pointer to location
574 \return 0 Function succeeded
575 \return 1 Function failed
576 */
577#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
578 #define __STREXB(value, ptr) __strex(value, ptr)
579#else
580 #define __STREXB(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
581#endif
582
583
584/**
585 \brief STR Exclusive (16 bit)
586 \details Executes a exclusive STR instruction for 16 bit values.
587 \param [in] value Value to store
588 \param [in] ptr Pointer to location
589 \return 0 Function succeeded
590 \return 1 Function failed
591 */
592#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
593 #define __STREXH(value, ptr) __strex(value, ptr)
594#else
595 #define __STREXH(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
596#endif
597
598
599/**
600 \brief STR Exclusive (32 bit)
601 \details Executes a exclusive STR instruction for 32 bit values.
602 \param [in] value Value to store
603 \param [in] ptr Pointer to location
604 \return 0 Function succeeded
605 \return 1 Function failed
606 */
607#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
608 #define __STREXW(value, ptr) __strex(value, ptr)
609#else
610 #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
611#endif
612
613
614/**
615 \brief Remove the exclusive lock
616 \details Removes the exclusive lock which is created by LDREX.
617 */
618#define __CLREX __clrex
619
620
621/**
622 \brief Signed Saturate
623 \details Saturates a signed value.
624 \param [in] value Value to be saturated
625 \param [in] sat Bit position to saturate to (1..32)
626 \return Saturated value
627 */
628#define __SSAT __ssat
629
630
631/**
632 \brief Unsigned Saturate
633 \details Saturates an unsigned value.
634 \param [in] value Value to be saturated
635 \param [in] sat Bit position to saturate to (0..31)
636 \return Saturated value
637 */
638#define __USAT __usat
639
640
641/**
642 \brief Rotate Right with Extend (32 bit)
643 \details Moves each bit of a bitstring right by one bit.
644 The carry input is shifted in at the left end of the bitstring.
645 \param [in] value Value to rotate
646 \return Rotated value
647 */
648#ifndef __NO_EMBEDDED_ASM
649__attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint32_t value)
650{
651 rrx r0, r0
652 bx lr
653}
654#endif
655
656
657/**
658 \brief LDRT Unprivileged (8 bit)
659 \details Executes a Unprivileged LDRT instruction for 8 bit value.
660 \param [in] ptr Pointer to data
661 \return value of type uint8_t at (*ptr)
662 */
663#define __LDRBT(ptr) ((uint8_t ) __ldrt(ptr))
664
665
666/**
667 \brief LDRT Unprivileged (16 bit)
668 \details Executes a Unprivileged LDRT instruction for 16 bit values.
669 \param [in] ptr Pointer to data
670 \return value of type uint16_t at (*ptr)
671 */
672#define __LDRHT(ptr) ((uint16_t) __ldrt(ptr))
673
674
675/**
676 \brief LDRT Unprivileged (32 bit)
677 \details Executes a Unprivileged LDRT instruction for 32 bit values.
678 \param [in] ptr Pointer to data
679 \return value of type uint32_t at (*ptr)
680 */
681#define __LDRT(ptr) ((uint32_t ) __ldrt(ptr))
682
683
684/**
685 \brief STRT Unprivileged (8 bit)
686 \details Executes a Unprivileged STRT instruction for 8 bit values.
687 \param [in] value Value to store
688 \param [in] ptr Pointer to location
689 */
690#define __STRBT(value, ptr) __strt(value, ptr)
691
692
693/**
694 \brief STRT Unprivileged (16 bit)
695 \details Executes a Unprivileged STRT instruction for 16 bit values.
696 \param [in] value Value to store
697 \param [in] ptr Pointer to location
698 */
699#define __STRHT(value, ptr) __strt(value, ptr)
700
701
702/**
703 \brief STRT Unprivileged (32 bit)
704 \details Executes a Unprivileged STRT instruction for 32 bit values.
705 \param [in] value Value to store
706 \param [in] ptr Pointer to location
707 */
708#define __STRT(value, ptr) __strt(value, ptr)
709
710#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
711 (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */
712
713/*@}*/ /* end of group CMSIS_Core_InstructionInterface */
714
715
716/* ################### Compiler specific Intrinsics ########################### */
717/** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
718 Access to dedicated SIMD instructions
719 @{
720*/
721
722#if ((defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
723
724#define __SADD8 __sadd8
725#define __QADD8 __qadd8
726#define __SHADD8 __shadd8
727#define __UADD8 __uadd8
728#define __UQADD8 __uqadd8
729#define __UHADD8 __uhadd8
730#define __SSUB8 __ssub8
731#define __QSUB8 __qsub8
732#define __SHSUB8 __shsub8
733#define __USUB8 __usub8
734#define __UQSUB8 __uqsub8
735#define __UHSUB8 __uhsub8
736#define __SADD16 __sadd16
737#define __QADD16 __qadd16
738#define __SHADD16 __shadd16
739#define __UADD16 __uadd16
740#define __UQADD16 __uqadd16
741#define __UHADD16 __uhadd16
742#define __SSUB16 __ssub16
743#define __QSUB16 __qsub16
744#define __SHSUB16 __shsub16
745#define __USUB16 __usub16
746#define __UQSUB16 __uqsub16
747#define __UHSUB16 __uhsub16
748#define __SASX __sasx
749#define __QASX __qasx
750#define __SHASX __shasx
751#define __UASX __uasx
752#define __UQASX __uqasx
753#define __UHASX __uhasx
754#define __SSAX __ssax
755#define __QSAX __qsax
756#define __SHSAX __shsax
757#define __USAX __usax
758#define __UQSAX __uqsax
759#define __UHSAX __uhsax
760#define __USAD8 __usad8
761#define __USADA8 __usada8
762#define __SSAT16 __ssat16
763#define __USAT16 __usat16
764#define __UXTB16 __uxtb16
765#define __UXTAB16 __uxtab16
766#define __SXTB16 __sxtb16
767#define __SXTAB16 __sxtab16
768#define __SMUAD __smuad
769#define __SMUADX __smuadx
770#define __SMLAD __smlad
771#define __SMLADX __smladx
772#define __SMLALD __smlald
773#define __SMLALDX __smlaldx
774#define __SMUSD __smusd
775#define __SMUSDX __smusdx
776#define __SMLSD __smlsd
777#define __SMLSDX __smlsdx
778#define __SMLSLD __smlsld
779#define __SMLSLDX __smlsldx
780#define __SEL __sel
781#define __QADD __qadd
782#define __QSUB __qsub
783
784#define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
785 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
786
787#define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
788 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
789
790#define __SMMLA(ARG1,ARG2,ARG3) ( (int32_t)((((int64_t)(ARG1) * (ARG2)) + \
791 ((int64_t)(ARG3) << 32U) ) >> 32U))
792
793#endif /* ((defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */
794/*@} end of group CMSIS_SIMD_intrinsics */
795
796
797#endif /* __CMSIS_ARMCC_H */