cache_ll.h 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510
  1. /*
  2. * SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. // The LL layer for Cache register operations
  7. #pragma once
  8. #include <stdbool.h>
  9. #include "soc/extmem_reg.h"
  10. #include "soc/ext_mem_defs.h"
  11. #include "hal/cache_types.h"
  12. #include "hal/assert.h"
  13. #include "esp32s2/rom/cache.h"
  14. #ifdef __cplusplus
  15. extern "C" {
  16. #endif
  17. #define CACHE_LL_DEFAULT_IBUS_MASK CACHE_BUS_IBUS0
  18. #define CACHE_LL_DEFAULT_DBUS_MASK CACHE_BUS_IBUS2
  19. #define CACHE_LL_L1_ICACHE_AUTOLOAD (1<<0)
  20. #define CACHE_LL_L1_DCACHE_AUTOLOAD (1<<0)
  21. /**
  22. * @brief Check if ICache auto preload is enabled or not
  23. *
  24. * @return true: enabled; false: disabled
  25. */
  26. __attribute__((always_inline))
  27. static inline bool cache_ll_l1_is_icache_autoload_enabled(void)
  28. {
  29. bool enabled = false;
  30. if (REG_GET_BIT(EXTMEM_PRO_ICACHE_CTRL_REG, EXTMEM_PRO_ICACHE_AUTOLOAD_ENA)) {
  31. enabled = true;
  32. }
  33. return enabled;
  34. }
  35. /**
  36. * @brief Check if DCache auto preload is enabled or not
  37. *
  38. * @return true: enabled; false: disabled
  39. */
  40. __attribute__((always_inline))
  41. static inline bool cache_ll_l1_is_dcache_autoload_enabled(void)
  42. {
  43. bool enabled = false;
  44. if (REG_GET_BIT(EXTMEM_PRO_DCACHE_CTRL_REG, EXTMEM_PRO_DCACHE_AUTOLOAD_ENA)) {
  45. enabled = true;
  46. }
  47. return enabled;
  48. }
  49. /**
  50. * @brief Check if ICache or DCache auto preload is enabled or not
  51. *
  52. * @param type see `cache_type_t`
  53. *
  54. * @return true: enabled; false: disabled
  55. */
  56. __attribute__((always_inline))
  57. static inline bool cache_ll_is_cache_autoload_enabled(cache_type_t type)
  58. {
  59. bool enabled = false;
  60. switch (type)
  61. {
  62. case CACHE_TYPE_INSTRUCTION:
  63. enabled = cache_ll_l1_is_icache_autoload_enabled();
  64. break;
  65. case CACHE_TYPE_DATA:
  66. enabled = cache_ll_l1_is_dcache_autoload_enabled();
  67. break;
  68. default: //CACHE_TYPE_ALL
  69. enabled = cache_ll_l1_is_icache_autoload_enabled() && cache_ll_l1_is_dcache_autoload_enabled();
  70. break;
  71. }
  72. return enabled;
  73. }
  74. /**
  75. * @brief Disable ICache
  76. */
  77. __attribute__((always_inline))
  78. static inline void cache_ll_l1_disable_icache(void)
  79. {
  80. Cache_Disable_ICache();
  81. }
  82. /**
  83. * @brief Disable DCache
  84. */
  85. __attribute__((always_inline))
  86. static inline void cache_ll_l1_disable_dcache(void)
  87. {
  88. Cache_Disable_DCache();
  89. }
  90. /**
  91. * @brief Disable ICache or DCache or both
  92. *
  93. * @param type see `cache_type_t`
  94. */
  95. __attribute__((always_inline))
  96. static inline void cache_ll_disable_cache(cache_type_t type)
  97. {
  98. switch (type)
  99. {
  100. case CACHE_TYPE_INSTRUCTION:
  101. cache_ll_l1_disable_icache();
  102. break;
  103. case CACHE_TYPE_DATA:
  104. cache_ll_l1_disable_dcache();
  105. break;
  106. default: //CACHE_TYPE_ALL
  107. cache_ll_l1_disable_icache();
  108. cache_ll_l1_disable_dcache();
  109. break;
  110. }
  111. }
  112. /**
  113. * @brief Enable ICache
  114. *
  115. * @param inst_autoload_en ICache auto preload enabled
  116. */
  117. __attribute__((always_inline))
  118. static inline void cache_ll_l1_enable_icache(bool inst_autoload_en)
  119. {
  120. Cache_Enable_ICache(inst_autoload_en ? CACHE_LL_L1_ICACHE_AUTOLOAD : 0);
  121. }
  122. /**
  123. * @brief Enable DCache
  124. *
  125. * @param data_autoload_en DCache auto preload enabled
  126. */
  127. __attribute__((always_inline))
  128. static inline void cache_ll_l1_enable_dcache(bool data_autoload_en)
  129. {
  130. Cache_Enable_DCache(data_autoload_en ? CACHE_LL_L1_DCACHE_AUTOLOAD : 0);
  131. }
  132. /**
  133. * @brief Enable ICache or DCache or both
  134. *
  135. * @param type see `cache_type_t`
  136. *
  137. * @param data_autoload_en Dcache auto preload enabled
  138. *
  139. * @param inst_autoload_en Icache auto preload enabled
  140. */
  141. __attribute__((always_inline))
  142. static inline void cache_ll_enable_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
  143. {
  144. switch (type)
  145. {
  146. case CACHE_TYPE_INSTRUCTION:
  147. cache_ll_l1_enable_icache(inst_autoload_en);
  148. break;
  149. case CACHE_TYPE_DATA:
  150. cache_ll_l1_enable_dcache(data_autoload_en);
  151. break;
  152. default: //CACHE_TYPE_ALL
  153. cache_ll_l1_enable_icache(inst_autoload_en);
  154. cache_ll_l1_enable_dcache(data_autoload_en);
  155. break;
  156. }
  157. }
  158. /**
  159. * @brief Suspend ICache
  160. */
  161. __attribute__((always_inline))
  162. static inline void cache_ll_l1_suspend_icache(void)
  163. {
  164. Cache_Suspend_ICache();
  165. }
  166. /**
  167. * @brief Suspend DCache
  168. */
  169. __attribute__((always_inline))
  170. static inline void cache_ll_l1_suspend_dcache(void)
  171. {
  172. Cache_Suspend_DCache();
  173. }
  174. /**
  175. * @brief Suspend ICache or DCache or both
  176. *
  177. * @param type see `cache_type_t`
  178. */
  179. __attribute__((always_inline))
  180. static inline void cache_ll_suspend_cache(cache_type_t type)
  181. {
  182. switch (type)
  183. {
  184. case CACHE_TYPE_INSTRUCTION:
  185. cache_ll_l1_suspend_icache();
  186. break;
  187. case CACHE_TYPE_DATA:
  188. cache_ll_l1_suspend_dcache();
  189. break;
  190. default: //CACHE_TYPE_ALL
  191. cache_ll_l1_suspend_icache();
  192. cache_ll_l1_suspend_dcache();
  193. break;
  194. }
  195. }
  196. /**
  197. * @brief Resume ICache
  198. *
  199. * @param inst_autoload_en ICache auto preload enabled
  200. */
  201. __attribute__((always_inline))
  202. static inline void cache_ll_l1_resume_icache(bool inst_autoload_en)
  203. {
  204. Cache_Resume_ICache(inst_autoload_en ? CACHE_LL_L1_ICACHE_AUTOLOAD : 0);
  205. }
  206. /**
  207. * @brief Resume DCache
  208. *
  209. * @param data_autoload_en DCache auto preload enabled
  210. */
  211. __attribute__((always_inline))
  212. static inline void cache_ll_l1_resume_dcache(bool data_autoload_en)
  213. {
  214. Cache_Resume_DCache(data_autoload_en ? CACHE_LL_L1_DCACHE_AUTOLOAD : 0);
  215. }
  216. /**
  217. * @brief Resume ICache or DCache or both
  218. *
  219. * @param type see `cache_type_t`
  220. *
  221. * @param data_autoload_en Dcache auto preload enabled
  222. *
  223. * @param inst_autoload_en Icache auto preload enabled
  224. */
  225. __attribute__((always_inline))
  226. static inline void cache_ll_resume_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
  227. {
  228. switch (type)
  229. {
  230. case CACHE_TYPE_INSTRUCTION:
  231. cache_ll_l1_resume_icache(inst_autoload_en);
  232. break;
  233. case CACHE_TYPE_DATA:
  234. cache_ll_l1_resume_dcache(data_autoload_en);
  235. break;
  236. default: //CACHE_TYPE_ALL
  237. cache_ll_l1_resume_icache(inst_autoload_en);
  238. cache_ll_l1_resume_dcache(data_autoload_en);
  239. break;
  240. }
  241. }
  242. /**
  243. * @brief Check if ICache is enabled or not
  244. *
  245. * @param cache_id cache ID (when l1 cache is per core)
  246. *
  247. * @return true: enabled; false: disabled
  248. */
  249. __attribute__((always_inline))
  250. static inline bool cache_ll_l1_is_icache_enabled(uint32_t cache_id){
  251. HAL_ASSERT(cache_id == 0);
  252. bool enabled;
  253. enabled = REG_GET_BIT(EXTMEM_PRO_ICACHE_CTRL_REG, EXTMEM_PRO_ICACHE_ENABLE);
  254. return enabled;
  255. }
  256. /**
  257. * @brief Check if DCache is enabled or not
  258. *
  259. * @param cache_id cache ID (when l1 cache is per core)
  260. *
  261. * @return true: enabled; false: disabled
  262. */
  263. __attribute__((always_inline))
  264. static inline bool cache_ll_l1_is_dcache_enabled(uint32_t cache_id)
  265. {
  266. HAL_ASSERT(cache_id == 0);
  267. bool enabled;
  268. enabled = REG_GET_BIT(EXTMEM_PRO_DCACHE_CTRL_REG, EXTMEM_PRO_DCACHE_ENABLE);
  269. return enabled;
  270. }
  271. /**
  272. * @brief Check if ICache or DCache or both is enabled or not
  273. *
  274. * @param type see `cache_type_t`
  275. *
  276. * @return true: enabled; false: disabled
  277. */
  278. __attribute__((always_inline))
  279. static inline bool cache_ll_is_cache_enabled(cache_type_t type)
  280. {
  281. bool enabled = false;
  282. switch (type)
  283. {
  284. case CACHE_TYPE_DATA:
  285. enabled = cache_ll_l1_is_dcache_enabled(0);
  286. break;
  287. case CACHE_TYPE_INSTRUCTION:
  288. enabled = cache_ll_l1_is_icache_enabled(0);
  289. break;
  290. default: //CACHE_TYPE_ALL
  291. enabled = cache_ll_l1_is_dcache_enabled(0) && cache_ll_l1_is_icache_enabled(0);
  292. break;
  293. }
  294. return enabled;
  295. }
  296. /**
  297. * @brief Invalidate cache supported addr
  298. *
  299. * Invalidate a Cache item for either ICache or DCache.
  300. *
  301. * @param vaddr Start address of the region to be invalidated
  302. * @param size Size of the region to be invalidated
  303. */
  304. __attribute__((always_inline))
  305. static inline void cache_ll_invalidate_addr(uint32_t vaddr, uint32_t size)
  306. {
  307. Cache_Invalidate_Addr(vaddr, size);
  308. }
  309. /**
  310. * @brief Writeback cache supported addr
  311. *
  312. * Writeback the DCache item to external memory
  313. *
  314. * @param vaddr Start address of the region to writeback
  315. * @param size Size of the region to writeback
  316. */
  317. __attribute__((always_inline))
  318. static inline void cache_ll_writeback_addr(uint32_t vaddr, uint32_t size)
  319. {
  320. Cache_WriteBack_Addr(vaddr, size);
  321. }
  322. /**
  323. * @brief Get ICache line size, in bytes
  324. *
  325. * @return ICache line size, in bytes
  326. */
  327. __attribute__((always_inline))
  328. static inline uint32_t cache_ll_l1_icache_get_line_size(void)
  329. {
  330. uint32_t size = 0;
  331. size = Cache_Get_ICache_Line_Size();
  332. return size;
  333. }
  334. /**
  335. * @brief Get DCache line size, in bytes
  336. *
  337. * @return DCache line size, in bytes
  338. */
  339. __attribute__((always_inline))
  340. static inline uint32_t cache_ll_l1_dcache_get_line_size(void)
  341. {
  342. uint32_t size = 0;
  343. size = Cache_Get_DCache_Line_Size();
  344. return size;
  345. }
  346. /**
  347. * @brief Get ICache or DCache line size, in bytes
  348. *
  349. * @param type see `cache_type_t`
  350. *
  351. * @return ICache/DCache line size, in bytes
  352. */
  353. __attribute__((always_inline))
  354. static inline uint32_t cache_ll_get_line_size(cache_type_t type)
  355. {
  356. uint32_t size = 0;
  357. switch (type)
  358. {
  359. case CACHE_TYPE_INSTRUCTION:
  360. size = cache_ll_l1_icache_get_line_size();
  361. break;
  362. case CACHE_TYPE_DATA:
  363. size = cache_ll_l1_dcache_get_line_size();
  364. break;
  365. default: //CACHE_TYPE_ALL
  366. HAL_ASSERT(false);
  367. break;
  368. }
  369. return size;
  370. }
  371. /**
  372. * @brief Get the buses of a particular cache that are mapped to a virtual address range
  373. *
  374. * External virtual address can only be accessed when the involved cache buses are enabled.
  375. * This API is to get the cache buses where the memory region (from `vaddr_start` to `vaddr_start + len`) reside.
  376. *
  377. * @param cache_id cache ID (when l1 cache is per core)
  378. * @param vaddr_start virtual address start
  379. * @param len vaddr length
  380. */
  381. #if !BOOTLOADER_BUILD
  382. __attribute__((always_inline))
  383. #endif
  384. static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
  385. {
  386. (void)cache_id;
  387. cache_bus_mask_t mask = 0;
  388. uint32_t vaddr_end = vaddr_start + len - 1;
  389. if (vaddr_start >= IRAM1_ADDRESS_LOW) {
  390. mask |= CACHE_BUS_IBUS1;
  391. } else if (vaddr_start >= IRAM0_CACHE_ADDRESS_LOW) {
  392. mask |= CACHE_BUS_IBUS0;
  393. mask |= (vaddr_end >= IRAM1_ADDRESS_LOW) ? CACHE_BUS_IBUS1 : 0;
  394. } else if (vaddr_start >= DRAM0_CACHE_ADDRESS_LOW) {
  395. mask |= CACHE_BUS_DBUS0;
  396. mask |= (vaddr_end >= IRAM0_CACHE_ADDRESS_LOW) ? CACHE_BUS_IBUS0 : 0;
  397. mask |= (vaddr_end >= IRAM1_ADDRESS_LOW) ? CACHE_BUS_IBUS1 : 0;
  398. } else if (vaddr_start >= DRAM1_ADDRESS_LOW) {
  399. mask |= CACHE_BUS_DBUS1;
  400. mask |= (vaddr_end >= DRAM0_CACHE_ADDRESS_LOW) ? CACHE_BUS_DBUS0 : 0;
  401. mask |= (vaddr_end >= IRAM0_CACHE_ADDRESS_LOW) ? CACHE_BUS_IBUS0 : 0;
  402. mask |= (vaddr_end >= IRAM1_ADDRESS_LOW) ? CACHE_BUS_IBUS1 : 0;
  403. } else if (vaddr_start >= DPORT_CACHE_ADDRESS_LOW) {
  404. mask |= CACHE_BUS_DBUS2;
  405. mask |= (vaddr_end >= DRAM1_ADDRESS_LOW) ? CACHE_BUS_DBUS1 : 0;
  406. mask |= (vaddr_end >= DRAM0_CACHE_ADDRESS_LOW) ? CACHE_BUS_DBUS0 : 0;
  407. mask |= (vaddr_end >= IRAM0_CACHE_ADDRESS_LOW) ? CACHE_BUS_IBUS0 : 0;
  408. mask |= (vaddr_end >= IRAM1_ADDRESS_LOW) ? CACHE_BUS_IBUS1 : 0;
  409. } else if (vaddr_start >= DROM0_ADDRESS_LOW) {
  410. mask |= CACHE_BUS_IBUS2;
  411. mask |= (vaddr_end >= DPORT_CACHE_ADDRESS_LOW) ? CACHE_BUS_DBUS2 : 0;
  412. mask |= (vaddr_end >= DRAM1_ADDRESS_LOW) ? CACHE_BUS_DBUS1 : 0;
  413. mask |= (vaddr_end >= DRAM0_CACHE_ADDRESS_LOW) ? CACHE_BUS_DBUS0 : 0;
  414. mask |= (vaddr_end >= IRAM0_CACHE_ADDRESS_LOW) ? CACHE_BUS_IBUS0 : 0;
  415. mask |= (vaddr_end >= IRAM1_ADDRESS_LOW) ? CACHE_BUS_IBUS1 : 0;
  416. } else {
  417. abort();
  418. }
  419. return mask;
  420. }
  421. /**
  422. * Enable the Cache Buses
  423. *
  424. * @param cache_id cache ID (when l1 cache is per core)
  425. * @param mask To know which buses should be enabled
  426. */
  427. #if !BOOTLOADER_BUILD
  428. __attribute__((always_inline))
  429. #endif
  430. static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
  431. {
  432. (void)cache_id;
  433. uint32_t ibus_mask = 0;
  434. ibus_mask |= (mask & CACHE_BUS_IBUS0) ? EXTMEM_PRO_ICACHE_MASK_IRAM0 : 0;
  435. ibus_mask |= (mask & CACHE_BUS_IBUS1) ? EXTMEM_PRO_ICACHE_MASK_IRAM1 : 0;
  436. ibus_mask |= (mask & CACHE_BUS_IBUS2) ? EXTMEM_PRO_ICACHE_MASK_DROM0 : 0;
  437. REG_CLR_BIT(EXTMEM_PRO_ICACHE_CTRL1_REG, ibus_mask);
  438. uint32_t dbus_mask = 0;
  439. dbus_mask |= (mask & CACHE_BUS_DBUS0) ? EXTMEM_PRO_DCACHE_MASK_DRAM0 : 0;
  440. dbus_mask |= (mask & CACHE_BUS_DBUS1) ? EXTMEM_PRO_DCACHE_MASK_DRAM1 : 0;
  441. dbus_mask |= (mask & CACHE_BUS_DBUS2) ? EXTMEM_PRO_DCACHE_MASK_DPORT : 0;
  442. REG_CLR_BIT(EXTMEM_PRO_DCACHE_CTRL1_REG, dbus_mask);
  443. }
  444. /**
  445. * Disable the Cache Buses
  446. *
  447. * @param cache_id cache ID (when l1 cache is per core)
  448. * @param mask To know which buses should be disabled
  449. */
  450. __attribute__((always_inline))
  451. static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
  452. {
  453. (void)cache_id;
  454. uint32_t ibus_mask = 0;
  455. ibus_mask |= (mask & CACHE_BUS_IBUS0) ? EXTMEM_PRO_ICACHE_MASK_IRAM0 : 0;
  456. ibus_mask |= (mask & CACHE_BUS_IBUS1) ? EXTMEM_PRO_ICACHE_MASK_IRAM1 : 0;
  457. ibus_mask |= (mask & CACHE_BUS_IBUS2) ? EXTMEM_PRO_ICACHE_MASK_DROM0 : 0;
  458. REG_SET_BIT(EXTMEM_PRO_ICACHE_CTRL1_REG, ibus_mask);
  459. uint32_t dbus_mask = 0;
  460. dbus_mask |= (mask & CACHE_BUS_DBUS0) ? EXTMEM_PRO_DCACHE_MASK_DRAM0 : 0;
  461. dbus_mask |= (mask & CACHE_BUS_DBUS1) ? EXTMEM_PRO_DCACHE_MASK_DRAM1 : 0;
  462. dbus_mask |= (mask & CACHE_BUS_DBUS2) ? EXTMEM_PRO_DCACHE_MASK_DPORT : 0;
  463. REG_SET_BIT(EXTMEM_PRO_DCACHE_CTRL1_REG, dbus_mask);
  464. }
  465. #ifdef __cplusplus
  466. }
  467. #endif