cachel1_armv7.h 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441
  1. /******************************************************************************
  2. * @file cachel1_armv7.h
  3. * @brief CMSIS Level 1 Cache API for Armv7-M and later
  4. * @version V1.0.3
  5. * @date 17. March 2023
  6. ******************************************************************************/
  7. /*
  8. * Copyright (c) 2020-2021 Arm Limited. All rights reserved.
  9. *
  10. * SPDX-License-Identifier: Apache-2.0
  11. *
  12. * Licensed under the Apache License, Version 2.0 (the License); you may
  13. * not use this file except in compliance with the License.
  14. * You may obtain a copy of the License at
  15. *
  16. * www.apache.org/licenses/LICENSE-2.0
  17. *
  18. * Unless required by applicable law or agreed to in writing, software
  19. * distributed under the License is distributed on an AS IS BASIS, WITHOUT
  20. * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  21. * See the License for the specific language governing permissions and
  22. * limitations under the License.
  23. */
  24. #if defined ( __ICCARM__ )
  25. #pragma system_include /* treat file as system include file for MISRA check */
  26. #elif defined (__clang__)
  27. #pragma clang system_header /* treat file as system include file */
  28. #endif
  29. #ifndef ARM_CACHEL1_ARMV7_H
  30. #define ARM_CACHEL1_ARMV7_H
  31. /**
  32. \ingroup CMSIS_Core_FunctionInterface
  33. \defgroup CMSIS_Core_CacheFunctions Cache Functions
  34. \brief Functions that configure Instruction and Data cache.
  35. @{
  36. */
  37. /* Cache Size ID Register Macros */
  38. #define CCSIDR_WAYS(x) (((x) & SCB_CCSIDR_ASSOCIATIVITY_Msk) >> SCB_CCSIDR_ASSOCIATIVITY_Pos)
  39. #define CCSIDR_SETS(x) (((x) & SCB_CCSIDR_NUMSETS_Msk ) >> SCB_CCSIDR_NUMSETS_Pos )
  40. #ifndef __SCB_DCACHE_LINE_SIZE
  41. #define __SCB_DCACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
  42. #endif
  43. #ifndef __SCB_ICACHE_LINE_SIZE
  44. #define __SCB_ICACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
  45. #endif
  46. /**
  47. \brief Enable I-Cache
  48. \details Turns on I-Cache
  49. */
  50. __STATIC_FORCEINLINE void SCB_EnableICache (void)
  51. {
  52. #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
  53. if (SCB->CCR & SCB_CCR_IC_Msk) return; /* return if ICache is already enabled */
  54. __DSB();
  55. __ISB();
  56. SCB->ICIALLU = 0UL; /* invalidate I-Cache */
  57. __DSB();
  58. __ISB();
  59. SCB->CCR |= (uint32_t)SCB_CCR_IC_Msk; /* enable I-Cache */
  60. __DSB();
  61. __ISB();
  62. #endif
  63. }
  64. /**
  65. \brief Disable I-Cache
  66. \details Turns off I-Cache
  67. */
  68. __STATIC_FORCEINLINE void SCB_DisableICache (void)
  69. {
  70. #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
  71. __DSB();
  72. __ISB();
  73. SCB->CCR &= ~(uint32_t)SCB_CCR_IC_Msk; /* disable I-Cache */
  74. SCB->ICIALLU = 0UL; /* invalidate I-Cache */
  75. __DSB();
  76. __ISB();
  77. #endif
  78. }
  79. /**
  80. \brief Invalidate I-Cache
  81. \details Invalidates I-Cache
  82. */
  83. __STATIC_FORCEINLINE void SCB_InvalidateICache (void)
  84. {
  85. #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
  86. __DSB();
  87. __ISB();
  88. SCB->ICIALLU = 0UL;
  89. __DSB();
  90. __ISB();
  91. #endif
  92. }
  93. /**
  94. \brief I-Cache Invalidate by address
  95. \details Invalidates I-Cache for the given address.
  96. I-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
  97. I-Cache memory blocks which are part of given address + given size are invalidated.
  98. \param[in] addr address
  99. \param[in] isize size of memory block (in number of bytes)
  100. */
  101. __STATIC_FORCEINLINE void SCB_InvalidateICache_by_Addr (volatile void *addr, int32_t isize)
  102. {
  103. #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
  104. if ( isize > 0 ) {
  105. int32_t op_size = isize + (((uint32_t)addr) & (__SCB_ICACHE_LINE_SIZE - 1U));
  106. uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_ICACHE_LINE_SIZE - 1U) */;
  107. __DSB();
  108. do {
  109. SCB->ICIMVAU = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
  110. op_addr += __SCB_ICACHE_LINE_SIZE;
  111. op_size -= __SCB_ICACHE_LINE_SIZE;
  112. } while ( op_size > 0 );
  113. __DSB();
  114. __ISB();
  115. }
  116. #endif
  117. }
  118. /**
  119. \brief Enable D-Cache
  120. \details Turns on D-Cache
  121. */
  122. __STATIC_FORCEINLINE void SCB_EnableDCache (void)
  123. {
  124. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  125. uint32_t ccsidr;
  126. uint32_t sets;
  127. uint32_t ways;
  128. if (SCB->CCR & SCB_CCR_DC_Msk) return; /* return if DCache is already enabled */
  129. SCB->CSSELR = 0U; /* select Level 1 data cache */
  130. __DSB();
  131. ccsidr = SCB->CCSIDR;
  132. /* invalidate D-Cache */
  133. sets = (uint32_t)(CCSIDR_SETS(ccsidr));
  134. do {
  135. ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
  136. do {
  137. SCB->DCISW = (((sets << SCB_DCISW_SET_Pos) & SCB_DCISW_SET_Msk) |
  138. ((ways << SCB_DCISW_WAY_Pos) & SCB_DCISW_WAY_Msk) );
  139. #if defined ( __CC_ARM )
  140. __schedule_barrier();
  141. #endif
  142. } while (ways-- != 0U);
  143. } while(sets-- != 0U);
  144. __DSB();
  145. SCB->CCR |= (uint32_t)SCB_CCR_DC_Msk; /* enable D-Cache */
  146. __DSB();
  147. __ISB();
  148. #endif
  149. }
  150. /**
  151. \brief Disable D-Cache
  152. \details Turns off D-Cache
  153. */
  154. __STATIC_FORCEINLINE void SCB_DisableDCache (void)
  155. {
  156. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  157. struct {
  158. uint32_t ccsidr;
  159. uint32_t sets;
  160. uint32_t ways;
  161. } locals
  162. #if ((defined(__GNUC__) || defined(__clang__)) && !defined(__OPTIMIZE__))
  163. __ALIGNED(__SCB_DCACHE_LINE_SIZE)
  164. #endif
  165. ;
  166. SCB->CSSELR = 0U; /* select Level 1 data cache */
  167. __DSB();
  168. SCB->CCR &= ~(uint32_t)SCB_CCR_DC_Msk; /* disable D-Cache */
  169. __DSB();
  170. #if !defined(__OPTIMIZE__)
  171. /*
  172. * For the endless loop issue with no optimization builds.
  173. * More details, see https://github.com/ARM-software/CMSIS_5/issues/620
  174. *
  175. * The issue only happens when local variables are in stack. If
  176. * local variables are saved in general purpose register, then the function
  177. * is OK.
  178. *
  179. * When local variables are in stack, after disabling the cache, flush the
  180. * local variables cache line for data consistency.
  181. */
  182. /* Clean and invalidate the local variable cache. */
  183. #if defined(__ICCARM__)
  184. /* As we can't align the stack to the cache line size, invalidate each of the variables */
  185. SCB->DCCIMVAC = (uint32_t)&locals.sets;
  186. SCB->DCCIMVAC = (uint32_t)&locals.ways;
  187. SCB->DCCIMVAC = (uint32_t)&locals.ccsidr;
  188. #else
  189. SCB->DCCIMVAC = (uint32_t)&locals;
  190. #endif
  191. __DSB();
  192. __ISB();
  193. #endif
  194. locals.ccsidr = SCB->CCSIDR;
  195. /* clean & invalidate D-Cache */
  196. locals.sets = (uint32_t)(CCSIDR_SETS(locals.ccsidr));
  197. do {
  198. locals.ways = (uint32_t)(CCSIDR_WAYS(locals.ccsidr));
  199. do {
  200. SCB->DCCISW = (((locals.sets << SCB_DCCISW_SET_Pos) & SCB_DCCISW_SET_Msk) |
  201. ((locals.ways << SCB_DCCISW_WAY_Pos) & SCB_DCCISW_WAY_Msk) );
  202. #if defined ( __CC_ARM )
  203. __schedule_barrier();
  204. #endif
  205. } while (locals.ways-- != 0U);
  206. } while(locals.sets-- != 0U);
  207. __DSB();
  208. __ISB();
  209. #endif
  210. }
  211. /**
  212. \brief Invalidate D-Cache
  213. \details Invalidates D-Cache
  214. */
  215. __STATIC_FORCEINLINE void SCB_InvalidateDCache (void)
  216. {
  217. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  218. uint32_t ccsidr;
  219. uint32_t sets;
  220. uint32_t ways;
  221. SCB->CSSELR = 0U; /* select Level 1 data cache */
  222. __DSB();
  223. ccsidr = SCB->CCSIDR;
  224. /* invalidate D-Cache */
  225. sets = (uint32_t)(CCSIDR_SETS(ccsidr));
  226. do {
  227. ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
  228. do {
  229. SCB->DCISW = (((sets << SCB_DCISW_SET_Pos) & SCB_DCISW_SET_Msk) |
  230. ((ways << SCB_DCISW_WAY_Pos) & SCB_DCISW_WAY_Msk) );
  231. #if defined ( __CC_ARM )
  232. __schedule_barrier();
  233. #endif
  234. } while (ways-- != 0U);
  235. } while(sets-- != 0U);
  236. __DSB();
  237. __ISB();
  238. #endif
  239. }
  240. /**
  241. \brief Clean D-Cache
  242. \details Cleans D-Cache
  243. */
  244. __STATIC_FORCEINLINE void SCB_CleanDCache (void)
  245. {
  246. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  247. uint32_t ccsidr;
  248. uint32_t sets;
  249. uint32_t ways;
  250. SCB->CSSELR = 0U; /* select Level 1 data cache */
  251. __DSB();
  252. ccsidr = SCB->CCSIDR;
  253. /* clean D-Cache */
  254. sets = (uint32_t)(CCSIDR_SETS(ccsidr));
  255. do {
  256. ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
  257. do {
  258. SCB->DCCSW = (((sets << SCB_DCCSW_SET_Pos) & SCB_DCCSW_SET_Msk) |
  259. ((ways << SCB_DCCSW_WAY_Pos) & SCB_DCCSW_WAY_Msk) );
  260. #if defined ( __CC_ARM )
  261. __schedule_barrier();
  262. #endif
  263. } while (ways-- != 0U);
  264. } while(sets-- != 0U);
  265. __DSB();
  266. __ISB();
  267. #endif
  268. }
  269. /**
  270. \brief Clean & Invalidate D-Cache
  271. \details Cleans and Invalidates D-Cache
  272. */
  273. __STATIC_FORCEINLINE void SCB_CleanInvalidateDCache (void)
  274. {
  275. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  276. uint32_t ccsidr;
  277. uint32_t sets;
  278. uint32_t ways;
  279. SCB->CSSELR = 0U; /* select Level 1 data cache */
  280. __DSB();
  281. ccsidr = SCB->CCSIDR;
  282. /* clean & invalidate D-Cache */
  283. sets = (uint32_t)(CCSIDR_SETS(ccsidr));
  284. do {
  285. ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
  286. do {
  287. SCB->DCCISW = (((sets << SCB_DCCISW_SET_Pos) & SCB_DCCISW_SET_Msk) |
  288. ((ways << SCB_DCCISW_WAY_Pos) & SCB_DCCISW_WAY_Msk) );
  289. #if defined ( __CC_ARM )
  290. __schedule_barrier();
  291. #endif
  292. } while (ways-- != 0U);
  293. } while(sets-- != 0U);
  294. __DSB();
  295. __ISB();
  296. #endif
  297. }
  298. /**
  299. \brief D-Cache Invalidate by address
  300. \details Invalidates D-Cache for the given address.
  301. D-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
  302. D-Cache memory blocks which are part of given address + given size are invalidated.
  303. \param[in] addr address
  304. \param[in] dsize size of memory block (in number of bytes)
  305. */
  306. __STATIC_FORCEINLINE void SCB_InvalidateDCache_by_Addr (volatile void *addr, int32_t dsize)
  307. {
  308. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  309. if ( dsize > 0 ) {
  310. int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
  311. uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
  312. __DSB();
  313. do {
  314. SCB->DCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
  315. op_addr += __SCB_DCACHE_LINE_SIZE;
  316. op_size -= __SCB_DCACHE_LINE_SIZE;
  317. } while ( op_size > 0 );
  318. __DSB();
  319. __ISB();
  320. }
  321. #endif
  322. }
  323. /**
  324. \brief D-Cache Clean by address
  325. \details Cleans D-Cache for the given address
  326. D-Cache is cleaned starting from a 32 byte aligned address in 32 byte granularity.
  327. D-Cache memory blocks which are part of given address + given size are cleaned.
  328. \param[in] addr address
  329. \param[in] dsize size of memory block (in number of bytes)
  330. */
  331. __STATIC_FORCEINLINE void SCB_CleanDCache_by_Addr (volatile void *addr, int32_t dsize)
  332. {
  333. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  334. if ( dsize > 0 ) {
  335. int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
  336. uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
  337. __DSB();
  338. do {
  339. SCB->DCCMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
  340. op_addr += __SCB_DCACHE_LINE_SIZE;
  341. op_size -= __SCB_DCACHE_LINE_SIZE;
  342. } while ( op_size > 0 );
  343. __DSB();
  344. __ISB();
  345. }
  346. #endif
  347. }
  348. /**
  349. \brief D-Cache Clean and Invalidate by address
  350. \details Cleans and invalidates D_Cache for the given address
  351. D-Cache is cleaned and invalidated starting from a 32 byte aligned address in 32 byte granularity.
  352. D-Cache memory blocks which are part of given address + given size are cleaned and invalidated.
  353. \param[in] addr address (aligned to 32-byte boundary)
  354. \param[in] dsize size of memory block (in number of bytes)
  355. */
  356. __STATIC_FORCEINLINE void SCB_CleanInvalidateDCache_by_Addr (volatile void *addr, int32_t dsize)
  357. {
  358. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  359. if ( dsize > 0 ) {
  360. int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
  361. uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
  362. __DSB();
  363. do {
  364. SCB->DCCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
  365. op_addr += __SCB_DCACHE_LINE_SIZE;
  366. op_size -= __SCB_DCACHE_LINE_SIZE;
  367. } while ( op_size > 0 );
  368. __DSB();
  369. __ISB();
  370. }
  371. #endif
  372. }
  373. /*@} end of CMSIS_Core_CacheFunctions */
  374. #endif /* ARM_CACHEL1_ARMV7_H */