cmsis_iccarm.h 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559
  1. /**************************************************************************//**
  2. * @file cmsis_iccarm.h
  3. * @brief CMSIS compiler ICCARM (IAR Compiler for Arm) header file
  4. * @version V5.0.6
  5. * @date 02. March 2018
  6. ******************************************************************************/
  7. //------------------------------------------------------------------------------
  8. //
  9. // Copyright (c) 2017-2018 IAR Systems
  10. //
  11. // Licensed under the Apache License, Version 2.0 (the "License")
  12. // you may not use this file except in compliance with the License.
  13. // You may obtain a copy of the License at
  14. // http://www.apache.org/licenses/LICENSE-2.0
  15. //
  16. // Unless required by applicable law or agreed to in writing, software
  17. // distributed under the License is distributed on an "AS IS" BASIS,
  18. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  19. // See the License for the specific language governing permissions and
  20. // limitations under the License.
  21. //
  22. //------------------------------------------------------------------------------
  23. #ifndef __CMSIS_ICCARM_H__
  24. #define __CMSIS_ICCARM_H__
  25. #ifndef __ICCARM__
  26. #error This file should only be compiled by ICCARM
  27. #endif
  28. #pragma system_include
  29. #define __IAR_FT _Pragma("inline=forced") __intrinsic
  30. #if (__VER__ >= 8000000)
  31. #define __ICCARM_V8 1
  32. #else
  33. #define __ICCARM_V8 0
  34. #endif
  35. #pragma language=extended
  36. #ifndef __ALIGNED
  37. #if __ICCARM_V8
  38. #define __ALIGNED(x) __attribute__((aligned(x)))
  39. #elif (__VER__ >= 7080000)
  40. /* Needs IAR language extensions */
  41. #define __ALIGNED(x) __attribute__((aligned(x)))
  42. #else
  43. #warning No compiler specific solution for __ALIGNED.__ALIGNED is ignored.
  44. #define __ALIGNED(x)
  45. #endif
  46. #endif
  47. /* Define compiler macros for CPU architecture, used in CMSIS 5.
  48. */
  49. #if __ARM_ARCH_7A__
  50. /* Macro already defined */
  51. #else
  52. #if defined(__ARM7A__)
  53. #define __ARM_ARCH_7A__ 1
  54. #endif
  55. #endif
  56. #ifndef __ASM
  57. #define __ASM __asm
  58. #endif
  59. #ifndef __INLINE
  60. #define __INLINE inline
  61. #endif
  62. #ifndef __NO_RETURN
  63. #if __ICCARM_V8
  64. #define __NO_RETURN __attribute__((__noreturn__))
  65. #else
  66. #define __NO_RETURN _Pragma("object_attribute=__noreturn")
  67. #endif
  68. #endif
  69. #ifndef __PACKED
  70. /* Needs IAR language extensions */
  71. #if __ICCARM_V8
  72. #define __PACKED __attribute__((packed, aligned(1)))
  73. #else
  74. #define __PACKED __packed
  75. #endif
  76. #endif
  77. #ifndef __PACKED_STRUCT
  78. /* Needs IAR language extensions */
  79. #if __ICCARM_V8
  80. #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
  81. #else
  82. #define __PACKED_STRUCT __packed struct
  83. #endif
  84. #endif
  85. #ifndef __PACKED_UNION
  86. /* Needs IAR language extensions */
  87. #if __ICCARM_V8
  88. #define __PACKED_UNION union __attribute__((packed, aligned(1)))
  89. #else
  90. #define __PACKED_UNION __packed union
  91. #endif
  92. #endif
  93. #ifndef __RESTRICT
  94. #define __RESTRICT __restrict
  95. #endif
  96. #ifndef __STATIC_INLINE
  97. #define __STATIC_INLINE static inline
  98. #endif
  99. #ifndef __FORCEINLINE
  100. #define __FORCEINLINE _Pragma("inline=forced")
  101. #endif
  102. #ifndef __STATIC_FORCEINLINE
  103. #define __STATIC_FORCEINLINE __FORCEINLINE __STATIC_INLINE
  104. #endif
  105. #ifndef CMSIS_DEPRECATED
  106. #define CMSIS_DEPRECATED __attribute__((deprecated))
  107. #endif
  108. #ifndef __UNALIGNED_UINT16_READ
  109. #pragma language=save
  110. #pragma language=extended
  111. __IAR_FT uint16_t __iar_uint16_read(void const *ptr)
  112. {
  113. return *(__packed uint16_t*)(ptr);
  114. }
  115. #pragma language=restore
  116. #define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR)
  117. #endif
  118. #ifndef __UNALIGNED_UINT16_WRITE
  119. #pragma language=save
  120. #pragma language=extended
  121. __IAR_FT void __iar_uint16_write(void const *ptr, uint16_t val)
  122. {
  123. *(__packed uint16_t*)(ptr) = val;;
  124. }
  125. #pragma language=restore
  126. #define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL)
  127. #endif
  128. #ifndef __UNALIGNED_UINT32_READ
  129. #pragma language=save
  130. #pragma language=extended
  131. __IAR_FT uint32_t __iar_uint32_read(void const *ptr)
  132. {
  133. return *(__packed uint32_t*)(ptr);
  134. }
  135. #pragma language=restore
  136. #define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR)
  137. #endif
  138. #ifndef __UNALIGNED_UINT32_WRITE
  139. #pragma language=save
  140. #pragma language=extended
  141. __IAR_FT void __iar_uint32_write(void const *ptr, uint32_t val)
  142. {
  143. *(__packed uint32_t*)(ptr) = val;;
  144. }
  145. #pragma language=restore
  146. #define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL)
  147. #endif
  148. #if 0
  149. #ifndef __UNALIGNED_UINT32 /* deprecated */
  150. #pragma language=save
  151. #pragma language=extended
  152. __packed struct __iar_u32 { uint32_t v; };
  153. #pragma language=restore
  154. #define __UNALIGNED_UINT32(PTR) (((struct __iar_u32 *)(PTR))->v)
  155. #endif
  156. #endif
  157. #ifndef __USED
  158. #if __ICCARM_V8
  159. #define __USED __attribute__((used))
  160. #else
  161. #define __USED _Pragma("__root")
  162. #endif
  163. #endif
  164. #ifndef __WEAK
  165. #if __ICCARM_V8
  166. #define __WEAK __attribute__((weak))
  167. #else
  168. #define __WEAK _Pragma("__weak")
  169. #endif
  170. #endif
  171. #ifndef __ICCARM_INTRINSICS_VERSION__
  172. #define __ICCARM_INTRINSICS_VERSION__ 0
  173. #endif
  174. #if __ICCARM_INTRINSICS_VERSION__ == 2
  175. #if defined(__CLZ)
  176. #undef __CLZ
  177. #endif
  178. #if defined(__REVSH)
  179. #undef __REVSH
  180. #endif
  181. #if defined(__RBIT)
  182. #undef __RBIT
  183. #endif
  184. #if defined(__SSAT)
  185. #undef __SSAT
  186. #endif
  187. #if defined(__USAT)
  188. #undef __USAT
  189. #endif
  190. #include "iccarm_builtin.h"
  191. #define __enable_irq __iar_builtin_enable_interrupt
  192. #define __disable_irq __iar_builtin_disable_interrupt
  193. #define __enable_fault_irq __iar_builtin_enable_fiq
  194. #define __disable_fault_irq __iar_builtin_disable_fiq
  195. #define __arm_rsr __iar_builtin_rsr
  196. #define __arm_wsr __iar_builtin_wsr
  197. #if __FPU_PRESENT
  198. #define __get_FPSCR() (__arm_rsr("FPSCR"))
  199. #else
  200. #define __get_FPSCR() ( 0 )
  201. #endif
  202. #define __set_FPSCR(VALUE) (__arm_wsr("FPSCR", VALUE))
  203. #define __get_CPSR() (__arm_rsr("CPSR"))
  204. #define __get_mode() (__get_CPSR() & 0x1FU)
  205. #define __set_CPSR(VALUE) (__arm_wsr("CPSR", (VALUE)))
  206. #define __set_mode(VALUE) (__arm_wsr("CPSR_c", (VALUE)))
  207. #define __get_FPEXC() (__arm_rsr("FPEXC"))
  208. #define __set_FPEXC(VALUE) (__arm_wsr("FPEXC", VALUE))
  209. #define __get_CP(cp, op1, RT, CRn, CRm, op2) \
  210. ((RT) = __arm_rsr("p" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2))
  211. #define __set_CP(cp, op1, RT, CRn, CRm, op2) \
  212. (__arm_wsr("p" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2, (RT)))
  213. #define __get_CP64(cp, op1, Rt, CRm) \
  214. __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
  215. #define __set_CP64(cp, op1, Rt, CRm) \
  216. __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
  217. #include "cmsis_cp15.h"
  218. #define __NOP __iar_builtin_no_operation
  219. #define __CLZ __iar_builtin_CLZ
  220. #define __CLREX __iar_builtin_CLREX
  221. #define __DMB __iar_builtin_DMB
  222. #define __DSB __iar_builtin_DSB
  223. #define __ISB __iar_builtin_ISB
  224. #define __LDREXB __iar_builtin_LDREXB
  225. #define __LDREXH __iar_builtin_LDREXH
  226. #define __LDREXW __iar_builtin_LDREX
  227. #define __RBIT __iar_builtin_RBIT
  228. #define __REV __iar_builtin_REV
  229. #define __REV16 __iar_builtin_REV16
  230. __IAR_FT int16_t __REVSH(int16_t val)
  231. {
  232. return (int16_t) __iar_builtin_REVSH(val);
  233. }
  234. #define __ROR __iar_builtin_ROR
  235. #define __RRX __iar_builtin_RRX
  236. #define __SEV __iar_builtin_SEV
  237. #define __SSAT __iar_builtin_SSAT
  238. #define __STREXB __iar_builtin_STREXB
  239. #define __STREXH __iar_builtin_STREXH
  240. #define __STREXW __iar_builtin_STREX
  241. #define __USAT __iar_builtin_USAT
  242. #define __WFE __iar_builtin_WFE
  243. #define __WFI __iar_builtin_WFI
  244. #define __SADD8 __iar_builtin_SADD8
  245. #define __QADD8 __iar_builtin_QADD8
  246. #define __SHADD8 __iar_builtin_SHADD8
  247. #define __UADD8 __iar_builtin_UADD8
  248. #define __UQADD8 __iar_builtin_UQADD8
  249. #define __UHADD8 __iar_builtin_UHADD8
  250. #define __SSUB8 __iar_builtin_SSUB8
  251. #define __QSUB8 __iar_builtin_QSUB8
  252. #define __SHSUB8 __iar_builtin_SHSUB8
  253. #define __USUB8 __iar_builtin_USUB8
  254. #define __UQSUB8 __iar_builtin_UQSUB8
  255. #define __UHSUB8 __iar_builtin_UHSUB8
  256. #define __SADD16 __iar_builtin_SADD16
  257. #define __QADD16 __iar_builtin_QADD16
  258. #define __SHADD16 __iar_builtin_SHADD16
  259. #define __UADD16 __iar_builtin_UADD16
  260. #define __UQADD16 __iar_builtin_UQADD16
  261. #define __UHADD16 __iar_builtin_UHADD16
  262. #define __SSUB16 __iar_builtin_SSUB16
  263. #define __QSUB16 __iar_builtin_QSUB16
  264. #define __SHSUB16 __iar_builtin_SHSUB16
  265. #define __USUB16 __iar_builtin_USUB16
  266. #define __UQSUB16 __iar_builtin_UQSUB16
  267. #define __UHSUB16 __iar_builtin_UHSUB16
  268. #define __SASX __iar_builtin_SASX
  269. #define __QASX __iar_builtin_QASX
  270. #define __SHASX __iar_builtin_SHASX
  271. #define __UASX __iar_builtin_UASX
  272. #define __UQASX __iar_builtin_UQASX
  273. #define __UHASX __iar_builtin_UHASX
  274. #define __SSAX __iar_builtin_SSAX
  275. #define __QSAX __iar_builtin_QSAX
  276. #define __SHSAX __iar_builtin_SHSAX
  277. #define __USAX __iar_builtin_USAX
  278. #define __UQSAX __iar_builtin_UQSAX
  279. #define __UHSAX __iar_builtin_UHSAX
  280. #define __USAD8 __iar_builtin_USAD8
  281. #define __USADA8 __iar_builtin_USADA8
  282. #define __SSAT16 __iar_builtin_SSAT16
  283. #define __USAT16 __iar_builtin_USAT16
  284. #define __UXTB16 __iar_builtin_UXTB16
  285. #define __UXTAB16 __iar_builtin_UXTAB16
  286. #define __SXTB16 __iar_builtin_SXTB16
  287. #define __SXTAB16 __iar_builtin_SXTAB16
  288. #define __SMUAD __iar_builtin_SMUAD
  289. #define __SMUADX __iar_builtin_SMUADX
  290. #define __SMMLA __iar_builtin_SMMLA
  291. #define __SMLAD __iar_builtin_SMLAD
  292. #define __SMLADX __iar_builtin_SMLADX
  293. #define __SMLALD __iar_builtin_SMLALD
  294. #define __SMLALDX __iar_builtin_SMLALDX
  295. #define __SMUSD __iar_builtin_SMUSD
  296. #define __SMUSDX __iar_builtin_SMUSDX
  297. #define __SMLSD __iar_builtin_SMLSD
  298. #define __SMLSDX __iar_builtin_SMLSDX
  299. #define __SMLSLD __iar_builtin_SMLSLD
  300. #define __SMLSLDX __iar_builtin_SMLSLDX
  301. #define __SEL __iar_builtin_SEL
  302. #define __QADD __iar_builtin_QADD
  303. #define __QSUB __iar_builtin_QSUB
  304. #define __PKHBT __iar_builtin_PKHBT
  305. #define __PKHTB __iar_builtin_PKHTB
  306. #else /* __ICCARM_INTRINSICS_VERSION__ == 2 */
  307. #if !__FPU_PRESENT
  308. #define __get_FPSCR __cmsis_iar_get_FPSR_not_active
  309. #endif
  310. #ifdef __INTRINSICS_INCLUDED
  311. #error intrinsics.h is already included previously!
  312. #endif
  313. #include <intrinsics.h>
  314. #if !__FPU_PRESENT
  315. #define __get_FPSCR() (0)
  316. #endif
  317. #pragma diag_suppress=Pe940
  318. #pragma diag_suppress=Pe177
  319. #define __enable_irq __enable_interrupt
  320. #define __disable_irq __disable_interrupt
  321. #define __enable_fault_irq __enable_fiq
  322. #define __disable_fault_irq __disable_fiq
  323. #define __NOP __no_operation
  324. #define __get_xPSR __get_PSR
  325. __IAR_FT void __set_mode(uint32_t mode)
  326. {
  327. __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory");
  328. }
  329. __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr)
  330. {
  331. return __LDREX((unsigned long *)ptr);
  332. }
  333. __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr)
  334. {
  335. return __STREX(value, (unsigned long *)ptr);
  336. }
  337. __IAR_FT uint32_t __RRX(uint32_t value)
  338. {
  339. uint32_t result;
  340. __ASM("RRX %0, %1" : "=r"(result) : "r" (value) : "cc");
  341. return(result);
  342. }
  343. __IAR_FT uint32_t __ROR(uint32_t op1, uint32_t op2)
  344. {
  345. return (op1 >> op2) | (op1 << ((sizeof(op1)*8)-op2));
  346. }
  347. __IAR_FT uint32_t __get_FPEXC(void)
  348. {
  349. #if (__FPU_PRESENT == 1)
  350. uint32_t result;
  351. __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory");
  352. return(result);
  353. #else
  354. return(0);
  355. #endif
  356. }
  357. __IAR_FT void __set_FPEXC(uint32_t fpexc)
  358. {
  359. #if (__FPU_PRESENT == 1)
  360. __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
  361. #endif
  362. }
  363. #define __get_CP(cp, op1, Rt, CRn, CRm, op2) \
  364. __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
  365. #define __set_CP(cp, op1, Rt, CRn, CRm, op2) \
  366. __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
  367. #define __get_CP64(cp, op1, Rt, CRm) \
  368. __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
  369. #define __set_CP64(cp, op1, Rt, CRm) \
  370. __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
  371. #include "cmsis_cp15.h"
  372. #endif /* __ICCARM_INTRINSICS_VERSION__ == 2 */
  373. #define __BKPT(value) __asm volatile ("BKPT %0" : : "i"(value))
  374. __IAR_FT uint32_t __get_SP_usr(void)
  375. {
  376. uint32_t cpsr;
  377. uint32_t result;
  378. __ASM volatile(
  379. "MRS %0, cpsr \n"
  380. "CPS #0x1F \n" // no effect in USR mode
  381. "MOV %1, sp \n"
  382. "MSR cpsr_c, %2 \n" // no effect in USR mode
  383. "ISB" : "=r"(cpsr), "=r"(result) : "r"(cpsr) : "memory"
  384. );
  385. return result;
  386. }
  387. __IAR_FT void __set_SP_usr(uint32_t topOfProcStack)
  388. {
  389. uint32_t cpsr;
  390. __ASM volatile(
  391. "MRS %0, cpsr \n"
  392. "CPS #0x1F \n" // no effect in USR mode
  393. "MOV sp, %1 \n"
  394. "MSR cpsr_c, %2 \n" // no effect in USR mode
  395. "ISB" : "=r"(cpsr) : "r" (topOfProcStack), "r"(cpsr) : "memory"
  396. );
  397. }
  398. #define __get_mode() (__get_CPSR() & 0x1FU)
  399. __STATIC_INLINE
  400. void __FPU_Enable(void)
  401. {
  402. __ASM volatile(
  403. //Permit access to VFP/NEON, registers by modifying CPACR
  404. " MRC p15,0,R1,c1,c0,2 \n"
  405. " ORR R1,R1,#0x00F00000 \n"
  406. " MCR p15,0,R1,c1,c0,2 \n"
  407. //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
  408. " ISB \n"
  409. //Enable VFP/NEON
  410. " VMRS R1,FPEXC \n"
  411. " ORR R1,R1,#0x40000000 \n"
  412. " VMSR FPEXC,R1 \n"
  413. //Initialise VFP/NEON registers to 0
  414. " MOV R2,#0 \n"
  415. //Initialise D16 registers to 0
  416. " VMOV D0, R2,R2 \n"
  417. " VMOV D1, R2,R2 \n"
  418. " VMOV D2, R2,R2 \n"
  419. " VMOV D3, R2,R2 \n"
  420. " VMOV D4, R2,R2 \n"
  421. " VMOV D5, R2,R2 \n"
  422. " VMOV D6, R2,R2 \n"
  423. " VMOV D7, R2,R2 \n"
  424. " VMOV D8, R2,R2 \n"
  425. " VMOV D9, R2,R2 \n"
  426. " VMOV D10,R2,R2 \n"
  427. " VMOV D11,R2,R2 \n"
  428. " VMOV D12,R2,R2 \n"
  429. " VMOV D13,R2,R2 \n"
  430. " VMOV D14,R2,R2 \n"
  431. " VMOV D15,R2,R2 \n"
  432. #ifdef __ARM_ADVANCED_SIMD__
  433. //Initialise D32 registers to 0
  434. " VMOV D16,R2,R2 \n"
  435. " VMOV D17,R2,R2 \n"
  436. " VMOV D18,R2,R2 \n"
  437. " VMOV D19,R2,R2 \n"
  438. " VMOV D20,R2,R2 \n"
  439. " VMOV D21,R2,R2 \n"
  440. " VMOV D22,R2,R2 \n"
  441. " VMOV D23,R2,R2 \n"
  442. " VMOV D24,R2,R2 \n"
  443. " VMOV D25,R2,R2 \n"
  444. " VMOV D26,R2,R2 \n"
  445. " VMOV D27,R2,R2 \n"
  446. " VMOV D28,R2,R2 \n"
  447. " VMOV D29,R2,R2 \n"
  448. " VMOV D30,R2,R2 \n"
  449. " VMOV D31,R2,R2 \n"
  450. #endif
  451. //Initialise FPSCR to a known state
  452. " VMRS R2,FPSCR \n"
  453. " MOV32 R3,#0x00086060 \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
  454. " AND R2,R2,R3 \n"
  455. " VMSR FPSCR,R2 \n");
  456. }
  457. #undef __IAR_FT
  458. #undef __ICCARM_V8
  459. #pragma diag_default=Pe940
  460. #pragma diag_default=Pe177
  461. #endif /* __CMSIS_ICCARM_H__ */