cache_ll.h 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188
  1. /*
  2. * SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. // The LL layer for Cache register operations
  7. #pragma once
  8. #include "soc/extmem_reg.h"
  9. #include "soc/ext_mem_defs.h"
  10. #include "hal/cache_types.h"
  11. #include "hal/assert.h"
  12. #ifdef __cplusplus
  13. extern "C" {
  14. #endif
  15. #define CACHE_LL_DEFAULT_IBUS_MASK CACHE_BUS_IBUS0
  16. #define CACHE_LL_DEFAULT_DBUS_MASK CACHE_BUS_DBUS0
  17. #define CACHE_LL_L1_ACCESS_EVENT_MASK (0x3f)
  18. #define CACHE_LL_L1_ACCESS_EVENT_DBUS_WR_IC (1<<5)
  19. #define CACHE_LL_L1_ACCESS_EVENT_DBUS_REJECT (1<<4)
  20. #define CACHE_LL_L1_ACCESS_EVENT_DBUS_ACS_MSK_IC (1<<3)
  21. #define CACHE_LL_L1_ACCESS_EVENT_IBUS_REJECT (1<<2)
  22. #define CACHE_LL_L1_ACCESS_EVENT_IBUS_WR_IC (1<<1)
  23. #define CACHE_LL_L1_ACCESS_EVENT_IBUS_ACS_MSK_IC (1<<0)
  24. #define CACHE_LL_L1_ILG_EVENT_MASK (0x23)
  25. #define CACHE_LL_L1_ILG_EVENT_MMU_ENTRY_FAULT (1<<5)
  26. #define CACHE_LL_L1_ILG_EVENT_PRELOAD_OP_FAULT (1<<1)
  27. #define CACHE_LL_L1_ILG_EVENT_SYNC_OP_FAULT (1<<0)
  28. /**
  29. * @brief Get the buses of a particular cache that are mapped to a virtual address range
  30. *
  31. * External virtual address can only be accessed when the involved cache buses are enabled.
  32. * This API is to get the cache buses where the memory region (from `vaddr_start` to `vaddr_start + len`) reside.
  33. *
  34. * @param cache_id cache ID (when l1 cache is per core)
  35. * @param vaddr_start virtual address start
  36. * @param len vaddr length
  37. */
  38. #if !BOOTLOADER_BUILD
  39. __attribute__((always_inline))
  40. #endif
  41. static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
  42. {
  43. HAL_ASSERT(cache_id == 0);
  44. cache_bus_mask_t mask = 0;
  45. uint32_t vaddr_end = vaddr_start + len;
  46. if (vaddr_start >= IRAM0_CACHE_ADDRESS_LOW && vaddr_end <= IRAM0_CACHE_ADDRESS_HIGH) {
  47. mask |= CACHE_BUS_IBUS0;
  48. } else if (vaddr_start >= DRAM0_CACHE_ADDRESS_LOW && vaddr_end <= DRAM0_CACHE_ADDRESS_HIGH) {
  49. mask |= CACHE_BUS_DBUS0;
  50. } else {
  51. HAL_ASSERT(0); //Out of region
  52. }
  53. return mask;
  54. }
  55. /**
  56. * Enable the Cache Buses
  57. *
  58. * @param cache_id cache ID (when l1 cache is per core)
  59. * @param mask To know which buses should be enabled
  60. */
  61. #if !BOOTLOADER_BUILD
  62. __attribute__((always_inline))
  63. #endif
  64. static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
  65. {
  66. HAL_ASSERT(cache_id == 0);
  67. //On esp32c2, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
  68. HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
  69. uint32_t ibus_mask = 0;
  70. ibus_mask |= (mask & CACHE_BUS_IBUS0) ? EXTMEM_ICACHE_SHUT_IBUS : 0;
  71. REG_CLR_BIT(EXTMEM_ICACHE_CTRL1_REG, ibus_mask);
  72. uint32_t dbus_mask = 0;
  73. dbus_mask |= (mask & CACHE_BUS_DBUS0) ? EXTMEM_ICACHE_SHUT_DBUS : 0;
  74. REG_CLR_BIT(EXTMEM_ICACHE_CTRL1_REG, dbus_mask);
  75. }
  76. /**
  77. * Disable the Cache Buses
  78. *
  79. * @param cache_id cache ID (when l1 cache is per core)
  80. * @param mask To know which buses should be disabled
  81. */
  82. __attribute__((always_inline))
  83. static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
  84. {
  85. HAL_ASSERT(cache_id == 0);
  86. //On esp32c2, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
  87. HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
  88. uint32_t ibus_mask = 0;
  89. ibus_mask |= (mask & CACHE_BUS_IBUS0) ? EXTMEM_ICACHE_SHUT_IBUS : 0;
  90. REG_SET_BIT(EXTMEM_ICACHE_CTRL1_REG, ibus_mask);
  91. uint32_t dbus_mask = 0;
  92. dbus_mask |= (mask & CACHE_BUS_DBUS0) ? EXTMEM_ICACHE_SHUT_DBUS : 0;
  93. REG_SET_BIT(EXTMEM_ICACHE_CTRL1_REG, dbus_mask);
  94. }
  95. /*------------------------------------------------------------------------------
  96. * Interrupt
  97. *----------------------------------------------------------------------------*/
  98. /**
  99. * @brief Enable Cache access error interrupt
  100. *
  101. * @param cache_id Cache ID, not used on C2. For compabitlity
  102. * @param mask Interrupt mask
  103. */
  104. static inline void cache_ll_l1_enable_access_error_intr(uint32_t cache_id, uint32_t mask)
  105. {
  106. SET_PERI_REG_MASK(EXTMEM_CORE0_ACS_CACHE_INT_ENA_REG, mask);
  107. }
  108. /**
  109. * @brief Clear Cache access error interrupt status
  110. *
  111. * @param cache_id Cache ID, not used on C2. For compabitlity
  112. * @param mask Interrupt mask
  113. */
  114. static inline void cache_ll_l1_clear_access_error_intr(uint32_t cache_id, uint32_t mask)
  115. {
  116. SET_PERI_REG_MASK(EXTMEM_CORE0_ACS_CACHE_INT_CLR_REG, mask);
  117. }
  118. /**
  119. * @brief Get Cache access error interrupt status
  120. *
  121. * @param cache_id Cache ID, not used on C2. For compabitlity
  122. * @param mask Interrupt mask
  123. *
  124. * @return Status mask
  125. */
  126. static inline uint32_t cache_ll_l1_get_access_error_intr_status(uint32_t cache_id, uint32_t mask)
  127. {
  128. return GET_PERI_REG_MASK(EXTMEM_CORE0_ACS_CACHE_INT_ST_REG, mask);
  129. }
  130. /**
  131. * @brief Enable Cache illegal error interrupt
  132. *
  133. * @param cache_id Cache ID, not used on C2. For compabitlity
  134. * @param mask Interrupt mask
  135. */
  136. static inline void cache_ll_l1_enable_illegal_error_intr(uint32_t cache_id, uint32_t mask)
  137. {
  138. SET_PERI_REG_MASK(EXTMEM_CACHE_ILG_INT_ENA_REG, mask);
  139. }
  140. /**
  141. * @brief Clear Cache illegal error interrupt status
  142. *
  143. * @param cache_id Cache ID, not used on C2. For compabitlity
  144. * @param mask Interrupt mask
  145. */
  146. static inline void cache_ll_l1_clear_illegal_error_intr(uint32_t cache_id, uint32_t mask)
  147. {
  148. SET_PERI_REG_MASK(EXTMEM_CACHE_ILG_INT_CLR_REG, mask);
  149. }
  150. /**
  151. * @brief Get Cache illegal error interrupt status
  152. *
  153. * @param cache_id Cache ID, not used on C2. For compabitlity
  154. * @param mask Interrupt mask
  155. *
  156. * @return Status mask
  157. */
  158. static inline uint32_t cache_ll_l1_get_illegal_error_intr_status(uint32_t cache_id, uint32_t mask)
  159. {
  160. return GET_PERI_REG_MASK(EXTMEM_CACHE_ILG_INT_ST_REG, mask);
  161. }
  162. #ifdef __cplusplus
  163. }
  164. #endif