cache_utils.c 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946
  1. /*
  2. * SPDX-FileCopyrightText: 2015-2021 Espressif Systems (Shanghai) CO LTD
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. #include <stdlib.h>
  7. #include <assert.h>
  8. #include <string.h>
  9. #include <stdio.h>
  10. #include <freertos/FreeRTOS.h>
  11. #include <freertos/task.h>
  12. #include <freertos/semphr.h>
  13. #if CONFIG_IDF_TARGET_ESP32
  14. #include "soc/dport_reg.h"
  15. #include <esp32/rom/spi_flash.h>
  16. #include <esp32/rom/cache.h>
  17. #elif CONFIG_IDF_TARGET_ESP32S2
  18. #include "esp32s2/rom/spi_flash.h"
  19. #include "esp32s2/rom/cache.h"
  20. #include "soc/extmem_reg.h"
  21. #include "soc/cache_memory.h"
  22. #elif CONFIG_IDF_TARGET_ESP32S3
  23. #include "esp32s3/rom/spi_flash.h"
  24. #include "esp32s3/rom/cache.h"
  25. #include "soc/extmem_reg.h"
  26. #include "soc/cache_memory.h"
  27. #elif CONFIG_IDF_TARGET_ESP32C3
  28. #include "esp32c3/rom/spi_flash.h"
  29. #include "esp32c3/rom/cache.h"
  30. #include "soc/extmem_reg.h"
  31. #include "soc/cache_memory.h"
  32. #elif CONFIG_IDF_TARGET_ESP32H2
  33. #include "esp32h2/rom/spi_flash.h"
  34. #include "esp32h2/rom/cache.h"
  35. #include "soc/extmem_reg.h"
  36. #include "soc/cache_memory.h"
  37. #elif CONFIG_IDF_TARGET_ESP8684
  38. #include "esp8684/rom/spi_flash.h"
  39. #include "esp8684/rom/cache.h"
  40. #include "soc/extmem_reg.h"
  41. #include "soc/cache_memory.h"
  42. #endif
  43. #include <soc/soc.h>
  44. #include "sdkconfig.h"
  45. #ifndef CONFIG_FREERTOS_UNICORE
  46. #include "esp_ipc.h"
  47. #endif
  48. #include "esp_attr.h"
  49. #include "esp_intr_alloc.h"
  50. #include "esp_spi_flash.h"
  51. #include "esp_log.h"
  52. static __attribute__((unused)) const char *TAG = "cache";
  53. #define DPORT_CACHE_BIT(cpuid, regid) DPORT_ ## cpuid ## regid
  54. #define DPORT_CACHE_MASK(cpuid) (DPORT_CACHE_BIT(cpuid, _CACHE_MASK_OPSDRAM) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DROM0) | \
  55. DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DRAM1) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IROM0) | \
  56. DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM1) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM0) )
  57. #define DPORT_CACHE_VAL(cpuid) (~(DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DROM0) | \
  58. DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DRAM1) | \
  59. DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM0)))
  60. #define DPORT_CACHE_GET_VAL(cpuid) (cpuid == 0) ? DPORT_CACHE_VAL(PRO) : DPORT_CACHE_VAL(APP)
  61. #define DPORT_CACHE_GET_MASK(cpuid) (cpuid == 0) ? DPORT_CACHE_MASK(PRO) : DPORT_CACHE_MASK(APP)
  62. static void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t *saved_state);
  63. static void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state);
  64. static uint32_t s_flash_op_cache_state[2];
  65. #ifndef CONFIG_FREERTOS_UNICORE
  66. static SemaphoreHandle_t s_flash_op_mutex;
  67. static volatile bool s_flash_op_can_start = false;
  68. static volatile bool s_flash_op_complete = false;
  69. #ifndef NDEBUG
  70. static volatile int s_flash_op_cpu = -1;
  71. #endif
  72. static inline bool esp_task_stack_is_sane_cache_disabled(void)
  73. {
  74. const void *sp = (const void *)esp_cpu_get_sp();
  75. return esp_ptr_in_dram(sp)
  76. #if CONFIG_ESP_SYSTEM_ALLOW_RTC_FAST_MEM_AS_HEAP
  77. || esp_ptr_in_rtc_dram_fast(sp)
  78. #endif
  79. ;
  80. }
  81. void spi_flash_init_lock(void)
  82. {
  83. s_flash_op_mutex = xSemaphoreCreateRecursiveMutex();
  84. assert(s_flash_op_mutex != NULL);
  85. }
  86. void spi_flash_op_lock(void)
  87. {
  88. xSemaphoreTakeRecursive(s_flash_op_mutex, portMAX_DELAY);
  89. }
  90. void spi_flash_op_unlock(void)
  91. {
  92. xSemaphoreGiveRecursive(s_flash_op_mutex);
  93. }
  94. /*
  95. If you're going to modify this, keep in mind that while the flash caches of the pro and app
  96. cpu are separate, the psram cache is *not*. If one of the CPUs returns from a flash routine
  97. with its cache enabled but the other CPUs cache is not enabled yet, you will have problems
  98. when accessing psram from the former CPU.
  99. */
  100. void IRAM_ATTR spi_flash_op_block_func(void *arg)
  101. {
  102. // Disable scheduler on this CPU
  103. vTaskSuspendAll();
  104. // Restore interrupts that aren't located in IRAM
  105. esp_intr_noniram_disable();
  106. uint32_t cpuid = (uint32_t) arg;
  107. // s_flash_op_complete flag is cleared on *this* CPU, otherwise the other
  108. // CPU may reset the flag back to false before IPC task has a chance to check it
  109. // (if it is preempted by an ISR taking non-trivial amount of time)
  110. s_flash_op_complete = false;
  111. s_flash_op_can_start = true;
  112. while (!s_flash_op_complete) {
  113. // busy loop here and wait for the other CPU to finish flash operation
  114. }
  115. // Flash operation is complete, re-enable cache
  116. spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
  117. // Restore interrupts that aren't located in IRAM
  118. esp_intr_noniram_enable();
  119. // Re-enable scheduler
  120. xTaskResumeAll();
  121. }
  122. void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu(void)
  123. {
  124. assert(esp_task_stack_is_sane_cache_disabled());
  125. spi_flash_op_lock();
  126. const int cpuid = xPortGetCoreID();
  127. const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
  128. #ifndef NDEBUG
  129. // For sanity check later: record the CPU which has started doing flash operation
  130. assert(s_flash_op_cpu == -1);
  131. s_flash_op_cpu = cpuid;
  132. #endif
  133. if (xTaskGetSchedulerState() == taskSCHEDULER_NOT_STARTED) {
  134. // Scheduler hasn't been started yet, it means that spi_flash API is being
  135. // called from the 2nd stage bootloader or from user_start_cpu0, i.e. from
  136. // PRO CPU. APP CPU is either in reset or spinning inside user_start_cpu1,
  137. // which is in IRAM. So it is safe to disable cache for the other_cpuid after
  138. // esp_intr_noniram_disable.
  139. assert(other_cpuid == 1);
  140. } else {
  141. // Temporarily raise current task priority to prevent a deadlock while
  142. // waiting for IPC task to start on the other CPU
  143. int old_prio = uxTaskPriorityGet(NULL);
  144. vTaskPrioritySet(NULL, configMAX_PRIORITIES - 1);
  145. // Signal to the spi_flash_op_block_task on the other CPU that we need it to
  146. // disable cache there and block other tasks from executing.
  147. s_flash_op_can_start = false;
  148. ESP_ERROR_CHECK(esp_ipc_call(other_cpuid, &spi_flash_op_block_func, (void *) other_cpuid));
  149. while (!s_flash_op_can_start) {
  150. // Busy loop and wait for spi_flash_op_block_func to disable cache
  151. // on the other CPU
  152. }
  153. // Disable scheduler on the current CPU
  154. vTaskSuspendAll();
  155. // Can now set the priority back to the normal one
  156. vTaskPrioritySet(NULL, old_prio);
  157. // This is guaranteed to run on CPU <cpuid> because the other CPU is now
  158. // occupied by highest priority task
  159. assert(xPortGetCoreID() == cpuid);
  160. }
  161. // Kill interrupts that aren't located in IRAM
  162. esp_intr_noniram_disable();
  163. // This CPU executes this routine, with non-IRAM interrupts and the scheduler
  164. // disabled. The other CPU is spinning in the spi_flash_op_block_func task, also
  165. // with non-iram interrupts and the scheduler disabled. None of these CPUs will
  166. // touch external RAM or flash this way, so we can safely disable caches.
  167. spi_flash_disable_cache(cpuid, &s_flash_op_cache_state[cpuid]);
  168. spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
  169. }
  170. void IRAM_ATTR spi_flash_enable_interrupts_caches_and_other_cpu(void)
  171. {
  172. const int cpuid = xPortGetCoreID();
  173. const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
  174. #ifndef NDEBUG
  175. // Sanity check: flash operation ends on the same CPU as it has started
  176. assert(cpuid == s_flash_op_cpu);
  177. // More sanity check: if scheduler isn't started, only CPU0 can call this.
  178. assert(!(xTaskGetSchedulerState() == taskSCHEDULER_NOT_STARTED && cpuid != 0));
  179. s_flash_op_cpu = -1;
  180. #endif
  181. // Re-enable cache on both CPUs. After this, cache (flash and external RAM) should work again.
  182. spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
  183. spi_flash_restore_cache(other_cpuid, s_flash_op_cache_state[other_cpuid]);
  184. if (xTaskGetSchedulerState() != taskSCHEDULER_NOT_STARTED) {
  185. // Signal to spi_flash_op_block_task that flash operation is complete
  186. s_flash_op_complete = true;
  187. }
  188. // Re-enable non-iram interrupts
  189. esp_intr_noniram_enable();
  190. // Resume tasks on the current CPU, if the scheduler has started.
  191. // NOTE: enabling non-IRAM interrupts has to happen before this,
  192. // because once the scheduler has started, due to preemption the
  193. // current task can end up being moved to the other CPU.
  194. // But esp_intr_noniram_enable has to be called on the same CPU which
  195. // called esp_intr_noniram_disable
  196. if (xTaskGetSchedulerState() != taskSCHEDULER_NOT_STARTED) {
  197. xTaskResumeAll();
  198. }
  199. // Release API lock
  200. spi_flash_op_unlock();
  201. }
  202. void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)
  203. {
  204. const uint32_t cpuid = xPortGetCoreID();
  205. const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
  206. // do not care about other CPU, it was halted upon entering panic handler
  207. spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
  208. // Kill interrupts that aren't located in IRAM
  209. esp_intr_noniram_disable();
  210. // Disable cache on this CPU as well
  211. spi_flash_disable_cache(cpuid, &s_flash_op_cache_state[cpuid]);
  212. }
  213. void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
  214. {
  215. const uint32_t cpuid = xPortGetCoreID();
  216. // Re-enable cache on this CPU
  217. spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
  218. // Re-enable non-iram interrupts
  219. esp_intr_noniram_enable();
  220. }
  221. #else // CONFIG_FREERTOS_UNICORE
  222. void spi_flash_init_lock(void)
  223. {
  224. }
  225. void spi_flash_op_lock(void)
  226. {
  227. vTaskSuspendAll();
  228. }
  229. void spi_flash_op_unlock(void)
  230. {
  231. xTaskResumeAll();
  232. }
  233. void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu(void)
  234. {
  235. spi_flash_op_lock();
  236. esp_intr_noniram_disable();
  237. spi_flash_disable_cache(0, &s_flash_op_cache_state[0]);
  238. }
  239. void IRAM_ATTR spi_flash_enable_interrupts_caches_and_other_cpu(void)
  240. {
  241. spi_flash_restore_cache(0, s_flash_op_cache_state[0]);
  242. esp_intr_noniram_enable();
  243. spi_flash_op_unlock();
  244. }
  245. void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)
  246. {
  247. // Kill interrupts that aren't located in IRAM
  248. esp_intr_noniram_disable();
  249. // Disable cache on this CPU as well
  250. spi_flash_disable_cache(0, &s_flash_op_cache_state[0]);
  251. }
  252. void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
  253. {
  254. // Re-enable cache on this CPU
  255. spi_flash_restore_cache(0, s_flash_op_cache_state[0]);
  256. // Re-enable non-iram interrupts
  257. esp_intr_noniram_enable();
  258. }
  259. #endif // CONFIG_FREERTOS_UNICORE
  260. /**
  261. * The following two functions are replacements for Cache_Read_Disable and Cache_Read_Enable
  262. * function in ROM. They are used to work around a bug where Cache_Read_Disable requires a call to
  263. * Cache_Flush before Cache_Read_Enable, even if cached data was not modified.
  264. */
  265. static void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t *saved_state)
  266. {
  267. #if CONFIG_IDF_TARGET_ESP32
  268. uint32_t ret = 0;
  269. const uint32_t cache_mask = DPORT_CACHE_GET_MASK(cpuid);
  270. if (cpuid == 0) {
  271. ret |= DPORT_GET_PERI_REG_BITS2(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, 0);
  272. while (DPORT_GET_PERI_REG_BITS2(DPORT_PRO_DCACHE_DBUG0_REG, DPORT_PRO_CACHE_STATE, DPORT_PRO_CACHE_STATE_S) != 1) {
  273. ;
  274. }
  275. DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 0, DPORT_PRO_CACHE_ENABLE_S);
  276. }
  277. #if !CONFIG_FREERTOS_UNICORE
  278. else {
  279. ret |= DPORT_GET_PERI_REG_BITS2(DPORT_APP_CACHE_CTRL1_REG, cache_mask, 0);
  280. while (DPORT_GET_PERI_REG_BITS2(DPORT_APP_DCACHE_DBUG0_REG, DPORT_APP_CACHE_STATE, DPORT_APP_CACHE_STATE_S) != 1) {
  281. ;
  282. }
  283. DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 0, DPORT_APP_CACHE_ENABLE_S);
  284. }
  285. #endif
  286. *saved_state = ret;
  287. #elif CONFIG_IDF_TARGET_ESP32S2
  288. *saved_state = Cache_Suspend_ICache();
  289. #elif CONFIG_IDF_TARGET_ESP32S3
  290. uint32_t icache_state, dcache_state;
  291. icache_state = Cache_Suspend_ICache() << 16;
  292. dcache_state = Cache_Suspend_DCache();
  293. *saved_state = icache_state | dcache_state;
  294. #elif CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP8684
  295. uint32_t icache_state;
  296. icache_state = Cache_Suspend_ICache() << 16;
  297. *saved_state = icache_state;
  298. #endif
  299. }
  300. static void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state)
  301. {
  302. #if CONFIG_IDF_TARGET_ESP32
  303. const uint32_t cache_mask = DPORT_CACHE_GET_MASK(cpuid);
  304. if (cpuid == 0) {
  305. DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 1, DPORT_PRO_CACHE_ENABLE_S);
  306. DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
  307. }
  308. #if !CONFIG_FREERTOS_UNICORE
  309. else {
  310. DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 1, DPORT_APP_CACHE_ENABLE_S);
  311. DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
  312. }
  313. #endif
  314. #elif CONFIG_IDF_TARGET_ESP32S2
  315. Cache_Resume_ICache(saved_state);
  316. #elif CONFIG_IDF_TARGET_ESP32S3
  317. Cache_Resume_DCache(saved_state & 0xffff);
  318. Cache_Resume_ICache(saved_state >> 16);
  319. #elif CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP8684
  320. Cache_Resume_ICache(saved_state >> 16);
  321. #endif
  322. }
  323. IRAM_ATTR bool spi_flash_cache_enabled(void)
  324. {
  325. #if CONFIG_IDF_TARGET_ESP32
  326. bool result = (DPORT_REG_GET_BIT(DPORT_PRO_CACHE_CTRL_REG, DPORT_PRO_CACHE_ENABLE) != 0);
  327. #if portNUM_PROCESSORS == 2
  328. result = result && (DPORT_REG_GET_BIT(DPORT_APP_CACHE_CTRL_REG, DPORT_APP_CACHE_ENABLE) != 0);
  329. #endif
  330. #elif CONFIG_IDF_TARGET_ESP32S2
  331. bool result = (REG_GET_BIT(EXTMEM_PRO_ICACHE_CTRL_REG, EXTMEM_PRO_ICACHE_ENABLE) != 0);
  332. #elif CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP8684
  333. bool result = (REG_GET_BIT(EXTMEM_ICACHE_CTRL_REG, EXTMEM_ICACHE_ENABLE) != 0);
  334. #endif
  335. return result;
  336. }
  337. #if CONFIG_IDF_TARGET_ESP32S2
  338. IRAM_ATTR void esp_config_instruction_cache_mode(void)
  339. {
  340. cache_size_t cache_size;
  341. cache_ways_t cache_ways;
  342. cache_line_size_t cache_line_size;
  343. #if CONFIG_ESP32S2_INSTRUCTION_CACHE_8KB
  344. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
  345. cache_size = CACHE_SIZE_8KB;
  346. #else
  347. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
  348. cache_size = CACHE_SIZE_16KB;
  349. #endif
  350. cache_ways = CACHE_4WAYS_ASSOC;
  351. #if CONFIG_ESP32S2_INSTRUCTION_CACHE_LINE_16B
  352. cache_line_size = CACHE_LINE_SIZE_16B;
  353. #else
  354. cache_line_size = CACHE_LINE_SIZE_32B;
  355. #endif
  356. ESP_EARLY_LOGI(TAG, "Instruction cache \t: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_8KB ? 8 : 16, 4, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : 32);
  357. Cache_Suspend_ICache();
  358. Cache_Set_ICache_Mode(cache_size, cache_ways, cache_line_size);
  359. Cache_Invalidate_ICache_All();
  360. Cache_Resume_ICache(0);
  361. }
  362. IRAM_ATTR void esp_config_data_cache_mode(void)
  363. {
  364. cache_size_t cache_size;
  365. cache_ways_t cache_ways;
  366. cache_line_size_t cache_line_size;
  367. #if CONFIG_ESP32S2_INSTRUCTION_CACHE_8KB
  368. #if CONFIG_ESP32S2_DATA_CACHE_8KB
  369. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
  370. cache_size = CACHE_SIZE_8KB;
  371. #else
  372. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_DCACHE_HIGH, CACHE_MEMORY_INVALID);
  373. cache_size = CACHE_SIZE_16KB;
  374. #endif
  375. #else
  376. #if CONFIG_ESP32S2_DATA_CACHE_8KB
  377. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_INVALID);
  378. cache_size = CACHE_SIZE_8KB;
  379. #else
  380. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_DCACHE_HIGH);
  381. cache_size = CACHE_SIZE_16KB;
  382. #endif
  383. #endif
  384. cache_ways = CACHE_4WAYS_ASSOC;
  385. #if CONFIG_ESP32S2_DATA_CACHE_LINE_16B
  386. cache_line_size = CACHE_LINE_SIZE_16B;
  387. #else
  388. cache_line_size = CACHE_LINE_SIZE_32B;
  389. #endif
  390. ESP_EARLY_LOGI(TAG, "Data cache \t\t: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_8KB ? 8 : 16, 4, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : 32);
  391. Cache_Set_DCache_Mode(cache_size, cache_ways, cache_line_size);
  392. Cache_Invalidate_DCache_All();
  393. }
  394. static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache, bool dcache)
  395. {
  396. uint32_t i_autoload, d_autoload;
  397. if (icache) {
  398. i_autoload = Cache_Suspend_ICache();
  399. }
  400. if (dcache) {
  401. d_autoload = Cache_Suspend_DCache();
  402. }
  403. REG_SET_BIT(EXTMEM_PRO_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_PRO_CACHE_FLASH_WRAP_AROUND);
  404. if (icache) {
  405. Cache_Resume_ICache(i_autoload);
  406. }
  407. if (dcache) {
  408. Cache_Resume_DCache(d_autoload);
  409. }
  410. }
  411. #if CONFIG_ESP32S2_SPIRAM_SUPPORT
  412. static IRAM_ATTR void esp_enable_cache_spiram_wrap(bool icache, bool dcache)
  413. {
  414. uint32_t i_autoload, d_autoload;
  415. if (icache) {
  416. i_autoload = Cache_Suspend_ICache();
  417. }
  418. if (dcache) {
  419. d_autoload = Cache_Suspend_DCache();
  420. }
  421. REG_SET_BIT(EXTMEM_PRO_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_PRO_CACHE_SRAM_RD_WRAP_AROUND);
  422. if (icache) {
  423. Cache_Resume_ICache(i_autoload);
  424. }
  425. if (dcache) {
  426. Cache_Resume_DCache(d_autoload);
  427. }
  428. }
  429. #endif
  430. esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable, bool dcache_wrap_enable)
  431. {
  432. int icache_wrap_size = 0, dcache_wrap_size = 0;
  433. int flash_wrap_sizes[2] = {-1, -1}, spiram_wrap_sizes[2] = {-1, -1};
  434. int flash_wrap_size = 0, spiram_wrap_size = 0;
  435. int flash_count = 0, spiram_count = 0;
  436. int i;
  437. bool flash_spiram_wrap_together, flash_support_wrap = true, spiram_support_wrap = true;
  438. uint32_t drom0_in_icache = 1;//always 1 in esp32s2
  439. #if CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP8684
  440. drom0_in_icache = 0;
  441. #endif
  442. if (icache_wrap_enable) {
  443. #if CONFIG_ESP32S2_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP32C3_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP32H2_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP8684_INSTRUCTION_CACHE_LINE_16B
  444. icache_wrap_size = 16;
  445. #else
  446. icache_wrap_size = 32;
  447. #endif
  448. }
  449. if (dcache_wrap_enable) {
  450. #if CONFIG_ESP32S2_DATA_CACHE_LINE_16B || CONFIG_ESP32S3_DATA_CACHE_LINE_16B || CONFIG_ESP32C3_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP32H2_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP8684_INSTRUCTION_CACHE_LINE_16B
  451. dcache_wrap_size = 16;
  452. #else
  453. dcache_wrap_size = 32;
  454. #endif
  455. }
  456. uint32_t instruction_use_spiram = 0;
  457. uint32_t rodata_use_spiram = 0;
  458. #if CONFIG_SPIRAM_FETCH_INSTRUCTIONS
  459. extern uint32_t esp_spiram_instruction_access_enabled(void);
  460. instruction_use_spiram = esp_spiram_instruction_access_enabled();
  461. #endif
  462. #if CONFIG_SPIRAM_RODATA
  463. extern uint32_t esp_spiram_rodata_access_enabled(void);
  464. rodata_use_spiram = esp_spiram_rodata_access_enabled();
  465. #endif
  466. if (instruction_use_spiram) {
  467. spiram_wrap_sizes[0] = icache_wrap_size;
  468. } else {
  469. flash_wrap_sizes[0] = icache_wrap_size;
  470. }
  471. if (rodata_use_spiram) {
  472. if (drom0_in_icache) {
  473. spiram_wrap_sizes[0] = icache_wrap_size;
  474. } else {
  475. spiram_wrap_sizes[1] = dcache_wrap_size;
  476. flash_wrap_sizes[1] = dcache_wrap_size;
  477. }
  478. #ifdef CONFIG_EXT_RODATA_SUPPORT
  479. spiram_wrap_sizes[1] = dcache_wrap_size;
  480. #endif
  481. } else {
  482. if (drom0_in_icache) {
  483. flash_wrap_sizes[0] = icache_wrap_size;
  484. } else {
  485. flash_wrap_sizes[1] = dcache_wrap_size;
  486. }
  487. #ifdef CONFIG_EXT_RODATA_SUPPORT
  488. flash_wrap_sizes[1] = dcache_wrap_size;
  489. #endif
  490. }
  491. #ifdef CONFIG_ESP32S2_SPIRAM_SUPPORT
  492. spiram_wrap_sizes[1] = dcache_wrap_size;
  493. #endif
  494. for (i = 0; i < 2; i++) {
  495. if (flash_wrap_sizes[i] != -1) {
  496. flash_count++;
  497. flash_wrap_size = flash_wrap_sizes[i];
  498. }
  499. }
  500. for (i = 0; i < 2; i++) {
  501. if (spiram_wrap_sizes[i] != -1) {
  502. spiram_count++;
  503. spiram_wrap_size = spiram_wrap_sizes[i];
  504. }
  505. }
  506. if (flash_count + spiram_count <= 2) {
  507. flash_spiram_wrap_together = false;
  508. } else {
  509. flash_spiram_wrap_together = true;
  510. }
  511. ESP_EARLY_LOGI(TAG, "flash_count=%d, size=%d, spiram_count=%d, size=%d,together=%d", flash_count, flash_wrap_size, spiram_count, spiram_wrap_size, flash_spiram_wrap_together);
  512. if (flash_count > 1 && flash_wrap_sizes[0] != flash_wrap_sizes[1]) {
  513. ESP_EARLY_LOGW(TAG, "Flash wrap with different length %d and %d, abort wrap.", flash_wrap_sizes[0], flash_wrap_sizes[1]);
  514. if (spiram_wrap_size == 0) {
  515. return ESP_FAIL;
  516. }
  517. if (flash_spiram_wrap_together) {
  518. ESP_EARLY_LOGE(TAG, "Abort spiram wrap because flash wrap length not fixed.");
  519. return ESP_FAIL;
  520. }
  521. }
  522. if (spiram_count > 1 && spiram_wrap_sizes[0] != spiram_wrap_sizes[1]) {
  523. ESP_EARLY_LOGW(TAG, "SPIRAM wrap with different length %d and %d, abort wrap.", spiram_wrap_sizes[0], spiram_wrap_sizes[1]);
  524. if (flash_wrap_size == 0) {
  525. return ESP_FAIL;
  526. }
  527. if (flash_spiram_wrap_together) {
  528. ESP_EARLY_LOGW(TAG, "Abort flash wrap because spiram wrap length not fixed.");
  529. return ESP_FAIL;
  530. }
  531. }
  532. if (flash_spiram_wrap_together && flash_wrap_size != spiram_wrap_size) {
  533. ESP_EARLY_LOGW(TAG, "SPIRAM has different wrap length with flash, %d and %d, abort wrap.", spiram_wrap_size, flash_wrap_size);
  534. return ESP_FAIL;
  535. }
  536. #ifdef CONFIG_FLASHMODE_QIO
  537. flash_support_wrap = true;
  538. extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
  539. if (!spi_flash_support_wrap_size(flash_wrap_size)) {
  540. flash_support_wrap = false;
  541. ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
  542. }
  543. #else
  544. ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
  545. #endif
  546. #ifdef CONFIG_ESP32S2_SPIRAM_SUPPORT
  547. extern bool psram_support_wrap_size(uint32_t wrap_size);
  548. if (!psram_support_wrap_size(spiram_wrap_size)) {
  549. spiram_support_wrap = false;
  550. ESP_EARLY_LOGW(TAG, "SPIRAM do not support wrap size %d.", spiram_wrap_size);
  551. }
  552. #endif
  553. if (flash_spiram_wrap_together && !(flash_support_wrap && spiram_support_wrap)) {
  554. ESP_EARLY_LOGW(TAG, "Flash and SPIRAM should support wrap together.");
  555. return ESP_FAIL;
  556. }
  557. extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
  558. if (flash_support_wrap && flash_wrap_size > 0) {
  559. ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
  560. spi_flash_enable_wrap(flash_wrap_size);
  561. esp_enable_cache_flash_wrap((flash_wrap_sizes[0] > 0), (flash_wrap_sizes[1] > 0));
  562. }
  563. #if CONFIG_ESP32S2_SPIRAM_SUPPORT
  564. extern esp_err_t psram_enable_wrap(uint32_t wrap_size);
  565. if (spiram_support_wrap && spiram_wrap_size > 0) {
  566. ESP_EARLY_LOGI(TAG, "SPIRAM wrap enabled, size = %d.", spiram_wrap_size);
  567. psram_enable_wrap(spiram_wrap_size);
  568. esp_enable_cache_spiram_wrap((spiram_wrap_sizes[0] > 0), (spiram_wrap_sizes[1] > 0));
  569. }
  570. #endif
  571. return ESP_OK;
  572. }
  573. #endif
  574. #if CONFIG_IDF_TARGET_ESP32S3
  575. IRAM_ATTR void esp_config_instruction_cache_mode(void)
  576. {
  577. cache_size_t cache_size;
  578. cache_ways_t cache_ways;
  579. cache_line_size_t cache_line_size;
  580. #if CONFIG_ESP32S3_INSTRUCTION_CACHE_16KB
  581. Cache_Occupy_ICache_MEMORY(CACHE_MEMORY_IBANK0, CACHE_MEMORY_INVALID);
  582. cache_size = CACHE_SIZE_HALF;
  583. #else
  584. Cache_Occupy_ICache_MEMORY(CACHE_MEMORY_IBANK0, CACHE_MEMORY_IBANK1);
  585. cache_size = CACHE_SIZE_FULL;
  586. #endif
  587. #if CONFIG_ESP32S3_INSTRUCTION_CACHE_4WAYS
  588. cache_ways = CACHE_4WAYS_ASSOC;
  589. #else
  590. cache_ways = CACHE_8WAYS_ASSOC;
  591. #endif
  592. #if CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B
  593. cache_line_size = CACHE_LINE_SIZE_16B;
  594. #elif CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_32B
  595. cache_line_size = CACHE_LINE_SIZE_32B;
  596. #else
  597. cache_line_size = CACHE_LINE_SIZE_64B;
  598. #endif
  599. ESP_EARLY_LOGI(TAG, "Instruction cache: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_HALF ? 16 : 32, cache_ways == CACHE_4WAYS_ASSOC ? 4 : 8, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : (cache_line_size == CACHE_LINE_SIZE_32B ? 32 : 64));
  600. Cache_Set_ICache_Mode(cache_size, cache_ways, cache_line_size);
  601. Cache_Invalidate_ICache_All();
  602. extern void Cache_Enable_ICache(uint32_t autoload);
  603. Cache_Enable_ICache(0);
  604. }
  605. IRAM_ATTR void esp_config_data_cache_mode(void)
  606. {
  607. cache_size_t cache_size;
  608. cache_ways_t cache_ways;
  609. cache_line_size_t cache_line_size;
  610. #if CONFIG_ESP32S3_DATA_CACHE_32KB
  611. Cache_Occupy_DCache_MEMORY(CACHE_MEMORY_DBANK1, CACHE_MEMORY_INVALID);
  612. cache_size = CACHE_SIZE_HALF;
  613. #else
  614. Cache_Occupy_DCache_MEMORY(CACHE_MEMORY_DBANK0, CACHE_MEMORY_DBANK1);
  615. cache_size = CACHE_SIZE_FULL;
  616. #endif
  617. #if CONFIG_ESP32S3_DATA_CACHE_4WAYS
  618. cache_ways = CACHE_4WAYS_ASSOC;
  619. #else
  620. cache_ways = CACHE_8WAYS_ASSOC;
  621. #endif
  622. #if CONFIG_ESP32S3_DATA_CACHE_LINE_16B
  623. cache_line_size = CACHE_LINE_SIZE_16B;
  624. #elif CONFIG_ESP32S3_DATA_CACHE_LINE_32B
  625. cache_line_size = CACHE_LINE_SIZE_32B;
  626. #else
  627. cache_line_size = CACHE_LINE_SIZE_64B;
  628. #endif
  629. // ESP_EARLY_LOGI(TAG, "Data cache: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_HALF ? 32 : 64, cache_ways == CACHE_4WAYS_ASSOC ? 4 : 8, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : (cache_line_size == CACHE_LINE_SIZE_32B ? 32 : 64));
  630. Cache_Set_DCache_Mode(cache_size, cache_ways, cache_line_size);
  631. Cache_Invalidate_DCache_All();
  632. }
  633. static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache, bool dcache)
  634. {
  635. uint32_t i_autoload, d_autoload;
  636. if (icache) {
  637. i_autoload = Cache_Suspend_ICache();
  638. }
  639. if (dcache) {
  640. d_autoload = Cache_Suspend_DCache();
  641. }
  642. REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_FLASH_WRAP_AROUND);
  643. if (icache) {
  644. Cache_Resume_ICache(i_autoload);
  645. }
  646. if (dcache) {
  647. Cache_Resume_DCache(d_autoload);
  648. }
  649. }
  650. #if CONFIG_ESP32S3_SPIRAM_SUPPORT
  651. static IRAM_ATTR void esp_enable_cache_spiram_wrap(bool icache, bool dcache)
  652. {
  653. uint32_t i_autoload, d_autoload;
  654. if (icache) {
  655. i_autoload = Cache_Suspend_ICache();
  656. }
  657. if (dcache) {
  658. d_autoload = Cache_Suspend_DCache();
  659. }
  660. REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_SRAM_RD_WRAP_AROUND);
  661. if (icache) {
  662. Cache_Resume_ICache(i_autoload);
  663. }
  664. if (dcache) {
  665. Cache_Resume_DCache(d_autoload);
  666. }
  667. }
  668. #endif
  669. esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable, bool dcache_wrap_enable)
  670. {
  671. int icache_wrap_size = 0, dcache_wrap_size = 0;
  672. int flash_wrap_sizes[2] = {-1, -1}, spiram_wrap_sizes[2] = {-1, -1};
  673. int flash_wrap_size = 0, spiram_wrap_size = 0;
  674. int flash_count = 0, spiram_count = 0;
  675. int i;
  676. bool flash_spiram_wrap_together, flash_support_wrap = false, spiram_support_wrap = true;
  677. uint32_t drom0_in_icache = 0;//always 0 in chip7.2.4
  678. if (icache_wrap_enable) {
  679. #if CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B
  680. icache_wrap_size = 16;
  681. #elif CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_32B
  682. icache_wrap_size = 32;
  683. #else
  684. icache_wrap_size = 64;
  685. #endif
  686. }
  687. if (dcache_wrap_enable) {
  688. #if CONFIG_ESP32S3_DATA_CACHE_LINE_16B
  689. dcache_wrap_size = 16;
  690. #elif CONFIG_ESP32S3_DATA_CACHE_LINE_32B
  691. dcache_wrap_size = 32;
  692. #else
  693. dcache_wrap_size = 64;
  694. #endif
  695. }
  696. uint32_t instruction_use_spiram = 0;
  697. uint32_t rodata_use_spiram = 0;
  698. #if CONFIG_SPIRAM_FETCH_INSTRUCTIONS
  699. extern uint32_t esp_spiram_instruction_access_enabled();
  700. instruction_use_spiram = esp_spiram_instruction_access_enabled();
  701. #endif
  702. #if CONFIG_SPIRAM_RODATA
  703. extern uint32_t esp_spiram_rodata_access_enabled();
  704. rodata_use_spiram = esp_spiram_rodata_access_enabled();
  705. #endif
  706. if (instruction_use_spiram) {
  707. spiram_wrap_sizes[0] = icache_wrap_size;
  708. } else {
  709. flash_wrap_sizes[0] = icache_wrap_size;
  710. }
  711. if (rodata_use_spiram) {
  712. if (drom0_in_icache) {
  713. spiram_wrap_sizes[0] = icache_wrap_size;
  714. } else {
  715. spiram_wrap_sizes[1] = dcache_wrap_size;
  716. }
  717. #ifdef CONFIG_EXT_RODATA_SUPPORT
  718. spiram_wrap_sizes[1] = dcache_wrap_size;
  719. #endif
  720. } else {
  721. if (drom0_in_icache) {
  722. flash_wrap_sizes[0] = icache_wrap_size;
  723. } else {
  724. flash_wrap_sizes[1] = dcache_wrap_size;
  725. }
  726. #ifdef CONFIG_EXT_RODATA_SUPPORT
  727. flash_wrap_sizes[1] = dcache_wrap_size;
  728. #endif
  729. }
  730. #ifdef CONFIG_ESP32S3_SPIRAM_SUPPORT
  731. spiram_wrap_sizes[1] = dcache_wrap_size;
  732. #endif
  733. for (i = 0; i < 2; i++) {
  734. if (flash_wrap_sizes[i] != -1) {
  735. flash_count++;
  736. flash_wrap_size = flash_wrap_sizes[i];
  737. }
  738. }
  739. for (i = 0; i < 2; i++) {
  740. if (spiram_wrap_sizes[i] != -1) {
  741. spiram_count++;
  742. spiram_wrap_size = spiram_wrap_sizes[i];
  743. }
  744. }
  745. if (flash_count + spiram_count <= 2) {
  746. flash_spiram_wrap_together = false;
  747. } else {
  748. flash_spiram_wrap_together = true;
  749. }
  750. if (flash_count > 1 && flash_wrap_sizes[0] != flash_wrap_sizes[1]) {
  751. ESP_EARLY_LOGW(TAG, "Flash wrap with different length %d and %d, abort wrap.", flash_wrap_sizes[0], flash_wrap_sizes[1]);
  752. if (spiram_wrap_size == 0) {
  753. return ESP_FAIL;
  754. }
  755. if (flash_spiram_wrap_together) {
  756. ESP_EARLY_LOGE(TAG, "Abort spiram wrap because flash wrap length not fixed.");
  757. return ESP_FAIL;
  758. }
  759. }
  760. if (spiram_count > 1 && spiram_wrap_sizes[0] != spiram_wrap_sizes[1]) {
  761. ESP_EARLY_LOGW(TAG, "SPIRAM wrap with different length %d and %d, abort wrap.", spiram_wrap_sizes[0], spiram_wrap_sizes[1]);
  762. if (flash_wrap_size == 0) {
  763. return ESP_FAIL;
  764. }
  765. if (flash_spiram_wrap_together) {
  766. ESP_EARLY_LOGW(TAG, "Abort flash wrap because spiram wrap length not fixed.");
  767. return ESP_FAIL;
  768. }
  769. }
  770. if (flash_spiram_wrap_together && flash_wrap_size != spiram_wrap_size) {
  771. ESP_EARLY_LOGW(TAG, "SPIRAM has different wrap length with flash, %d and %d, abort wrap.", spiram_wrap_size, flash_wrap_size);
  772. return ESP_FAIL;
  773. }
  774. #ifdef CONFIG_FLASHMODE_QIO
  775. flash_support_wrap = true;
  776. extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
  777. if (!spi_flash_support_wrap_size(flash_wrap_size)) {
  778. flash_support_wrap = false;
  779. ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
  780. }
  781. #else
  782. ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
  783. #endif
  784. #ifdef CONFIG_ESP32S3_SPIRAM_SUPPORT
  785. extern bool psram_support_wrap_size(uint32_t wrap_size);
  786. if (!psram_support_wrap_size(spiram_wrap_size)) {
  787. spiram_support_wrap = false;
  788. ESP_EARLY_LOGW(TAG, "SPIRAM do not support wrap size %d.", spiram_wrap_size);
  789. }
  790. #endif
  791. if (flash_spiram_wrap_together && !(flash_support_wrap && spiram_support_wrap)) {
  792. ESP_EARLY_LOGW(TAG, "Flash and SPIRAM should support wrap together.");
  793. return ESP_FAIL;
  794. }
  795. extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
  796. if (flash_support_wrap && flash_wrap_size > 0) {
  797. ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
  798. spi_flash_enable_wrap(flash_wrap_size);
  799. esp_enable_cache_flash_wrap((flash_wrap_sizes[0] > 0), (flash_wrap_sizes[1] > 0));
  800. }
  801. #if CONFIG_ESP32S3_SPIRAM_SUPPORT
  802. extern esp_err_t psram_enable_wrap(uint32_t wrap_size);
  803. if (spiram_support_wrap && spiram_wrap_size > 0) {
  804. ESP_EARLY_LOGI(TAG, "SPIRAM wrap enabled, size = %d.", spiram_wrap_size);
  805. psram_enable_wrap(spiram_wrap_size);
  806. esp_enable_cache_spiram_wrap((spiram_wrap_sizes[0] > 0), (spiram_wrap_sizes[1] > 0));
  807. }
  808. #endif
  809. return ESP_OK;
  810. }
  811. #endif
  812. #if CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP8684
  813. static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache)
  814. {
  815. uint32_t i_autoload;
  816. if (icache) {
  817. i_autoload = Cache_Suspend_ICache();
  818. }
  819. REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_FLASH_WRAP_AROUND);
  820. if (icache) {
  821. Cache_Resume_ICache(i_autoload);
  822. }
  823. }
  824. esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable)
  825. {
  826. int flash_wrap_size = 0;
  827. bool flash_support_wrap = false;
  828. if (icache_wrap_enable) {
  829. flash_wrap_size = 32;
  830. }
  831. #ifdef CONFIG_FLASHMODE_QIO
  832. flash_support_wrap = true;
  833. extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
  834. if (!spi_flash_support_wrap_size(flash_wrap_size)) {
  835. flash_support_wrap = false;
  836. ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
  837. }
  838. #else
  839. ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
  840. #endif // CONFIG_FLASHMODE_QIO
  841. extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
  842. if (flash_support_wrap && flash_wrap_size > 0) {
  843. ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
  844. spi_flash_enable_wrap(flash_wrap_size);
  845. esp_enable_cache_flash_wrap((flash_wrap_size > 0));
  846. }
  847. return ESP_OK;
  848. }
  849. #endif // CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP8684
  850. void IRAM_ATTR spi_flash_enable_cache(uint32_t cpuid)
  851. {
  852. #if CONFIG_IDF_TARGET_ESP32
  853. uint32_t cache_value = DPORT_CACHE_GET_VAL(cpuid);
  854. cache_value &= DPORT_CACHE_GET_MASK(cpuid);
  855. // Re-enable cache on this CPU
  856. spi_flash_restore_cache(cpuid, cache_value);
  857. #else
  858. spi_flash_restore_cache(0, 0); // TODO cache_value should be non-zero
  859. #endif
  860. }