cmsis_gcc.h 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679
  1. /**************************************************************************//**
  2. * @file cmsis_gcc.h
  3. * @brief CMSIS compiler specific macros, functions, instructions
  4. * @version V1.0.2
  5. * @date 09. April 2018
  6. ******************************************************************************/
  7. /*
  8. * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
  9. *
  10. * SPDX-License-Identifier: Apache-2.0
  11. *
  12. * Licensed under the Apache License, Version 2.0 (the License); you may
  13. * not use this file except in compliance with the License.
  14. * You may obtain a copy of the License at
  15. *
  16. * www.apache.org/licenses/LICENSE-2.0
  17. *
  18. * Unless required by applicable law or agreed to in writing, software
  19. * distributed under the License is distributed on an AS IS BASIS, WITHOUT
  20. * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  21. * See the License for the specific language governing permissions and
  22. * limitations under the License.
  23. */
  24. #ifndef __CMSIS_GCC_H
  25. #define __CMSIS_GCC_H
  26. /* ignore some GCC warnings */
  27. #pragma GCC diagnostic push
  28. #pragma GCC diagnostic ignored "-Wsign-conversion"
  29. #pragma GCC diagnostic ignored "-Wconversion"
  30. #pragma GCC diagnostic ignored "-Wunused-parameter"
  31. /* Fallback for __has_builtin */
  32. #ifndef __has_builtin
  33. #define __has_builtin(x) (0)
  34. #endif
  35. /* CMSIS compiler specific defines */
  36. #ifndef __ASM
  37. #define __ASM asm
  38. #endif
  39. #ifndef __INLINE
  40. #define __INLINE inline
  41. #endif
  42. #ifndef __FORCEINLINE
  43. #define __FORCEINLINE __attribute__((always_inline))
  44. #endif
  45. #ifndef __STATIC_INLINE
  46. #define __STATIC_INLINE static inline
  47. #endif
  48. #ifndef __STATIC_FORCEINLINE
  49. #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
  50. #endif
  51. #ifndef __NO_RETURN
  52. #define __NO_RETURN __attribute__((__noreturn__))
  53. #endif
  54. #ifndef CMSIS_DEPRECATED
  55. #define CMSIS_DEPRECATED __attribute__((deprecated))
  56. #endif
  57. #ifndef __USED
  58. #define __USED __attribute__((used))
  59. #endif
  60. #ifndef __WEAK
  61. #define __WEAK __attribute__((weak))
  62. #endif
  63. #ifndef __PACKED
  64. #define __PACKED __attribute__((packed, aligned(1)))
  65. #endif
  66. #ifndef __PACKED_STRUCT
  67. #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
  68. #endif
  69. #ifndef __UNALIGNED_UINT16_WRITE
  70. #pragma GCC diagnostic push
  71. #pragma GCC diagnostic ignored "-Wpacked"
  72. /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
  73. __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
  74. #pragma GCC diagnostic pop
  75. #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
  76. #endif
  77. #ifndef __UNALIGNED_UINT16_READ
  78. #pragma GCC diagnostic push
  79. #pragma GCC diagnostic ignored "-Wpacked"
  80. /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
  81. __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
  82. #pragma GCC diagnostic pop
  83. #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
  84. #endif
  85. #ifndef __UNALIGNED_UINT32_WRITE
  86. #pragma GCC diagnostic push
  87. #pragma GCC diagnostic ignored "-Wpacked"
  88. /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
  89. __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
  90. #pragma GCC diagnostic pop
  91. #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
  92. #endif
  93. #ifndef __UNALIGNED_UINT32_READ
  94. #pragma GCC diagnostic push
  95. #pragma GCC diagnostic ignored "-Wpacked"
  96. __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
  97. #pragma GCC diagnostic pop
  98. #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
  99. #endif
  100. #ifndef __ALIGNED
  101. #define __ALIGNED(x) __attribute__((aligned(x)))
  102. #endif
  103. /* ########################## Core Instruction Access ######################### */
  104. /**
  105. \brief No Operation
  106. */
  107. #define __NOP() __ASM volatile ("nop")
  108. /**
  109. \brief Wait For Interrupt
  110. */
  111. #define __WFI() __ASM volatile ("wfi")
  112. /**
  113. \brief Wait For Event
  114. */
  115. #define __WFE() __ASM volatile ("wfe")
  116. /**
  117. \brief Send Event
  118. */
  119. #define __SEV() __ASM volatile ("sev")
  120. /**
  121. \brief Instruction Synchronization Barrier
  122. \details Instruction Synchronization Barrier flushes the pipeline in the processor,
  123. so that all instructions following the ISB are fetched from cache or memory,
  124. after the instruction has been completed.
  125. */
  126. __STATIC_FORCEINLINE void __ISB(void)
  127. {
  128. __ASM volatile ("isb 0xF":::"memory");
  129. }
  130. /**
  131. \brief Data Synchronization Barrier
  132. \details Acts as a special kind of Data Memory Barrier.
  133. It completes when all explicit memory accesses before this instruction complete.
  134. */
  135. __STATIC_FORCEINLINE void __DSB(void)
  136. {
  137. __ASM volatile ("dsb 0xF":::"memory");
  138. }
  139. /**
  140. \brief Data Memory Barrier
  141. \details Ensures the apparent order of the explicit memory operations before
  142. and after the instruction, without ensuring their completion.
  143. */
  144. __STATIC_FORCEINLINE void __DMB(void)
  145. {
  146. __ASM volatile ("dmb 0xF":::"memory");
  147. }
  148. /**
  149. \brief Reverse byte order (32 bit)
  150. \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
  151. \param [in] value Value to reverse
  152. \return Reversed value
  153. */
  154. __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
  155. {
  156. #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
  157. return __builtin_bswap32(value);
  158. #else
  159. uint32_t result;
  160. __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
  161. return result;
  162. #endif
  163. }
  164. /**
  165. \brief Reverse byte order (16 bit)
  166. \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
  167. \param [in] value Value to reverse
  168. \return Reversed value
  169. */
  170. #ifndef __NO_EMBEDDED_ASM
  171. __attribute__((section(".rev16_text"))) __STATIC_INLINE uint32_t __REV16(uint32_t value)
  172. {
  173. uint32_t result;
  174. __ASM volatile("rev16 %0, %1" : "=r" (result) : "r" (value));
  175. return result;
  176. }
  177. #endif
  178. /**
  179. \brief Reverse byte order (16 bit)
  180. \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
  181. \param [in] value Value to reverse
  182. \return Reversed value
  183. */
  184. __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
  185. {
  186. #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
  187. return (int16_t)__builtin_bswap16(value);
  188. #else
  189. int16_t result;
  190. __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
  191. return result;
  192. #endif
  193. }
  194. /**
  195. \brief Rotate Right in unsigned value (32 bit)
  196. \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
  197. \param [in] op1 Value to rotate
  198. \param [in] op2 Number of Bits to rotate
  199. \return Rotated value
  200. */
  201. __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
  202. {
  203. op2 %= 32U;
  204. if (op2 == 0U) {
  205. return op1;
  206. }
  207. return (op1 >> op2) | (op1 << (32U - op2));
  208. }
  209. /**
  210. \brief Breakpoint
  211. \param [in] value is ignored by the processor.
  212. If required, a debugger can use it to store additional information about the breakpoint.
  213. */
  214. #define __BKPT(value) __ASM volatile ("bkpt "#value)
  215. /**
  216. \brief Reverse bit order of value
  217. \details Reverses the bit order of the given value.
  218. \param [in] value Value to reverse
  219. \return Reversed value
  220. */
  221. __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
  222. {
  223. uint32_t result;
  224. #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
  225. (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
  226. (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
  227. __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
  228. #else
  229. int32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
  230. result = value; /* r will be reversed bits of v; first get LSB of v */
  231. for (value >>= 1U; value; value >>= 1U)
  232. {
  233. result <<= 1U;
  234. result |= value & 1U;
  235. s--;
  236. }
  237. result <<= s; /* shift when v's highest bits are zero */
  238. #endif
  239. return result;
  240. }
  241. /**
  242. \brief Count leading zeros
  243. \param [in] value Value to count the leading zeros
  244. \return number of leading zeros in value
  245. */
  246. #define __CLZ (uint8_t)__builtin_clz
  247. /**
  248. \brief LDR Exclusive (8 bit)
  249. \details Executes a exclusive LDR instruction for 8 bit value.
  250. \param [in] ptr Pointer to data
  251. \return value of type uint8_t at (*ptr)
  252. */
  253. __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
  254. {
  255. uint32_t result;
  256. #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
  257. __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
  258. #else
  259. /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
  260. accepted by assembler. So has to use following less efficient pattern.
  261. */
  262. __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
  263. #endif
  264. return ((uint8_t) result); /* Add explicit type cast here */
  265. }
  266. /**
  267. \brief LDR Exclusive (16 bit)
  268. \details Executes a exclusive LDR instruction for 16 bit values.
  269. \param [in] ptr Pointer to data
  270. \return value of type uint16_t at (*ptr)
  271. */
  272. __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
  273. {
  274. uint32_t result;
  275. #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
  276. __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
  277. #else
  278. /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
  279. accepted by assembler. So has to use following less efficient pattern.
  280. */
  281. __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
  282. #endif
  283. return ((uint16_t) result); /* Add explicit type cast here */
  284. }
  285. /**
  286. \brief LDR Exclusive (32 bit)
  287. \details Executes a exclusive LDR instruction for 32 bit values.
  288. \param [in] ptr Pointer to data
  289. \return value of type uint32_t at (*ptr)
  290. */
  291. __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
  292. {
  293. uint32_t result;
  294. __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
  295. return(result);
  296. }
  297. /**
  298. \brief STR Exclusive (8 bit)
  299. \details Executes a exclusive STR instruction for 8 bit values.
  300. \param [in] value Value to store
  301. \param [in] ptr Pointer to location
  302. \return 0 Function succeeded
  303. \return 1 Function failed
  304. */
  305. __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
  306. {
  307. uint32_t result;
  308. __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
  309. return(result);
  310. }
  311. /**
  312. \brief STR Exclusive (16 bit)
  313. \details Executes a exclusive STR instruction for 16 bit values.
  314. \param [in] value Value to store
  315. \param [in] ptr Pointer to location
  316. \return 0 Function succeeded
  317. \return 1 Function failed
  318. */
  319. __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
  320. {
  321. uint32_t result;
  322. __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
  323. return(result);
  324. }
  325. /**
  326. \brief STR Exclusive (32 bit)
  327. \details Executes a exclusive STR instruction for 32 bit values.
  328. \param [in] value Value to store
  329. \param [in] ptr Pointer to location
  330. \return 0 Function succeeded
  331. \return 1 Function failed
  332. */
  333. __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
  334. {
  335. uint32_t result;
  336. __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
  337. return(result);
  338. }
  339. /**
  340. \brief Remove the exclusive lock
  341. \details Removes the exclusive lock which is created by LDREX.
  342. */
  343. __STATIC_FORCEINLINE void __CLREX(void)
  344. {
  345. __ASM volatile ("clrex" ::: "memory");
  346. }
  347. /**
  348. \brief Signed Saturate
  349. \details Saturates a signed value.
  350. \param [in] value Value to be saturated
  351. \param [in] sat Bit position to saturate to (1..32)
  352. \return Saturated value
  353. */
  354. #define __SSAT(ARG1,ARG2) \
  355. __extension__ \
  356. ({ \
  357. int32_t __RES, __ARG1 = (ARG1); \
  358. __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
  359. __RES; \
  360. })
  361. /**
  362. \brief Unsigned Saturate
  363. \details Saturates an unsigned value.
  364. \param [in] value Value to be saturated
  365. \param [in] sat Bit position to saturate to (0..31)
  366. \return Saturated value
  367. */
  368. #define __USAT(ARG1,ARG2) \
  369. __extension__ \
  370. ({ \
  371. uint32_t __RES, __ARG1 = (ARG1); \
  372. __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
  373. __RES; \
  374. })
  375. /* ########################### Core Function Access ########################### */
  376. /**
  377. \brief Enable IRQ Interrupts
  378. \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
  379. Can only be executed in Privileged modes.
  380. */
  381. __STATIC_FORCEINLINE void __enable_irq(void)
  382. {
  383. __ASM volatile ("cpsie i" : : : "memory");
  384. }
  385. /**
  386. \brief Disable IRQ Interrupts
  387. \details Disables IRQ interrupts by setting the I-bit in the CPSR.
  388. Can only be executed in Privileged modes.
  389. */
  390. __STATIC_FORCEINLINE void __disable_irq(void)
  391. {
  392. __ASM volatile ("cpsid i" : : : "memory");
  393. }
  394. /**
  395. \brief Get FPSCR
  396. \details Returns the current value of the Floating Point Status/Control register.
  397. \return Floating Point Status/Control register value
  398. */
  399. __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
  400. {
  401. #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
  402. (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
  403. #if __has_builtin(__builtin_arm_get_fpscr)
  404. // Re-enable using built-in when GCC has been fixed
  405. // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
  406. /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
  407. return __builtin_arm_get_fpscr();
  408. #else
  409. uint32_t result;
  410. __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
  411. return(result);
  412. #endif
  413. #else
  414. return(0U);
  415. #endif
  416. }
  417. /**
  418. \brief Set FPSCR
  419. \details Assigns the given value to the Floating Point Status/Control register.
  420. \param [in] fpscr Floating Point Status/Control value to set
  421. */
  422. __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
  423. {
  424. #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
  425. (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
  426. #if __has_builtin(__builtin_arm_set_fpscr)
  427. // Re-enable using built-in when GCC has been fixed
  428. // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
  429. /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
  430. __builtin_arm_set_fpscr(fpscr);
  431. #else
  432. __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
  433. #endif
  434. #else
  435. (void)fpscr;
  436. #endif
  437. }
  438. /** \brief Get CPSR Register
  439. \return CPSR Register value
  440. */
  441. __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
  442. {
  443. uint32_t result;
  444. __ASM volatile("MRS %0, cpsr" : "=r" (result) );
  445. return(result);
  446. }
  447. /** \brief Set CPSR Register
  448. \param [in] cpsr CPSR value to set
  449. */
  450. __STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
  451. {
  452. __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "memory");
  453. }
  454. /** \brief Get Mode
  455. \return Processor Mode
  456. */
  457. __STATIC_FORCEINLINE uint32_t __get_mode(void)
  458. {
  459. return (__get_CPSR() & 0x1FU);
  460. }
  461. /** \brief Set Mode
  462. \param [in] mode Mode value to set
  463. */
  464. __STATIC_FORCEINLINE void __set_mode(uint32_t mode)
  465. {
  466. __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory");
  467. }
  468. /** \brief Get Stack Pointer
  469. \return Stack Pointer value
  470. */
  471. __STATIC_FORCEINLINE uint32_t __get_SP(void)
  472. {
  473. uint32_t result;
  474. __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory");
  475. return result;
  476. }
  477. /** \brief Set Stack Pointer
  478. \param [in] stack Stack Pointer value to set
  479. */
  480. __STATIC_FORCEINLINE void __set_SP(uint32_t stack)
  481. {
  482. __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory");
  483. }
  484. /** \brief Get USR/SYS Stack Pointer
  485. \return USR/SYS Stack Pointer value
  486. */
  487. __STATIC_FORCEINLINE uint32_t __get_SP_usr(void)
  488. {
  489. uint32_t cpsr = __get_CPSR();
  490. uint32_t result;
  491. __ASM volatile(
  492. "CPS #0x1F \n"
  493. "MOV %0, sp " : "=r"(result) : : "memory"
  494. );
  495. __set_CPSR(cpsr);
  496. __ISB();
  497. return result;
  498. }
  499. /** \brief Set USR/SYS Stack Pointer
  500. \param [in] topOfProcStack USR/SYS Stack Pointer value to set
  501. */
  502. __STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack)
  503. {
  504. uint32_t cpsr = __get_CPSR();
  505. __ASM volatile(
  506. "CPS #0x1F \n"
  507. "MOV sp, %0 " : : "r" (topOfProcStack) : "memory"
  508. );
  509. __set_CPSR(cpsr);
  510. __ISB();
  511. }
  512. /** \brief Get FPEXC
  513. \return Floating Point Exception Control register value
  514. */
  515. __STATIC_FORCEINLINE uint32_t __get_FPEXC(void)
  516. {
  517. #if (__FPU_PRESENT == 1)
  518. uint32_t result;
  519. __ASM volatile("VMRS %0, fpexc" : "=r" (result) );
  520. return(result);
  521. #else
  522. return(0);
  523. #endif
  524. }
  525. /** \brief Set FPEXC
  526. \param [in] fpexc Floating Point Exception Control value to set
  527. */
  528. __STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc)
  529. {
  530. #if (__FPU_PRESENT == 1)
  531. __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
  532. #endif
  533. }
  534. /*
  535. * Include common core functions to access Coprocessor 15 registers
  536. */
  537. #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
  538. #define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
  539. #define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
  540. #define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
  541. #include "cmsis_cp15.h"
  542. /** \brief Enable Floating Point Unit
  543. Critical section, called from undef handler, so systick is disabled
  544. */
  545. __STATIC_INLINE void __FPU_Enable(void)
  546. {
  547. __ASM volatile(
  548. //Permit access to VFP/NEON, registers by modifying CPACR
  549. " MRC p15,0,R1,c1,c0,2 \n"
  550. " ORR R1,R1,#0x00F00000 \n"
  551. " MCR p15,0,R1,c1,c0,2 \n"
  552. //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
  553. " ISB \n"
  554. //Enable VFP/NEON
  555. " VMRS R1,FPEXC \n"
  556. " ORR R1,R1,#0x40000000 \n"
  557. " VMSR FPEXC,R1 \n"
  558. //Initialise VFP/NEON registers to 0
  559. " MOV R2,#0 \n"
  560. //Initialise D16 registers to 0
  561. " VMOV D0, R2,R2 \n"
  562. " VMOV D1, R2,R2 \n"
  563. " VMOV D2, R2,R2 \n"
  564. " VMOV D3, R2,R2 \n"
  565. " VMOV D4, R2,R2 \n"
  566. " VMOV D5, R2,R2 \n"
  567. " VMOV D6, R2,R2 \n"
  568. " VMOV D7, R2,R2 \n"
  569. " VMOV D8, R2,R2 \n"
  570. " VMOV D9, R2,R2 \n"
  571. " VMOV D10,R2,R2 \n"
  572. " VMOV D11,R2,R2 \n"
  573. " VMOV D12,R2,R2 \n"
  574. " VMOV D13,R2,R2 \n"
  575. " VMOV D14,R2,R2 \n"
  576. " VMOV D15,R2,R2 \n"
  577. #if (defined(__ARM_NEON) && (__ARM_NEON == 1))
  578. //Initialise D32 registers to 0
  579. " VMOV D16,R2,R2 \n"
  580. " VMOV D17,R2,R2 \n"
  581. " VMOV D18,R2,R2 \n"
  582. " VMOV D19,R2,R2 \n"
  583. " VMOV D20,R2,R2 \n"
  584. " VMOV D21,R2,R2 \n"
  585. " VMOV D22,R2,R2 \n"
  586. " VMOV D23,R2,R2 \n"
  587. " VMOV D24,R2,R2 \n"
  588. " VMOV D25,R2,R2 \n"
  589. " VMOV D26,R2,R2 \n"
  590. " VMOV D27,R2,R2 \n"
  591. " VMOV D28,R2,R2 \n"
  592. " VMOV D29,R2,R2 \n"
  593. " VMOV D30,R2,R2 \n"
  594. " VMOV D31,R2,R2 \n"
  595. #endif
  596. //Initialise FPSCR to a known state
  597. " VMRS R2,FPSCR \n"
  598. " LDR R3,=0x00086060 \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
  599. " AND R2,R2,R3 \n"
  600. " VMSR FPSCR,R2 "
  601. );
  602. }
  603. #pragma GCC diagnostic pop
  604. #endif /* __CMSIS_GCC_H */