cache_utils.c 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936
  1. /*
  2. * SPDX-FileCopyrightText: 2015-2021 Espressif Systems (Shanghai) CO LTD
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. #include <stdlib.h>
  7. #include <assert.h>
  8. #include <string.h>
  9. #include <stdio.h>
  10. #include <freertos/FreeRTOS.h>
  11. #include <freertos/task.h>
  12. #include <freertos/semphr.h>
  13. #if CONFIG_IDF_TARGET_ESP32
  14. #include "soc/dport_reg.h"
  15. #include <esp32/rom/spi_flash.h>
  16. #include <esp32/rom/cache.h>
  17. #elif CONFIG_IDF_TARGET_ESP32S2
  18. #include "esp32s2/rom/spi_flash.h"
  19. #include "esp32s2/rom/cache.h"
  20. #include "soc/extmem_reg.h"
  21. #include "soc/cache_memory.h"
  22. #elif CONFIG_IDF_TARGET_ESP32S3
  23. #include "esp32s3/rom/spi_flash.h"
  24. #include "esp32s3/rom/cache.h"
  25. #include "soc/extmem_reg.h"
  26. #include "soc/cache_memory.h"
  27. #elif CONFIG_IDF_TARGET_ESP32C3
  28. #include "esp32c3/rom/spi_flash.h"
  29. #include "esp32c3/rom/cache.h"
  30. #include "soc/extmem_reg.h"
  31. #include "soc/cache_memory.h"
  32. #endif
  33. #include <soc/soc.h>
  34. #include "sdkconfig.h"
  35. #ifndef CONFIG_FREERTOS_UNICORE
  36. #include "esp_ipc.h"
  37. #endif
  38. #include "esp_attr.h"
  39. #include "esp_intr_alloc.h"
  40. #include "esp_spi_flash.h"
  41. #include "esp_log.h"
  42. static __attribute__((unused)) const char *TAG = "cache";
  43. #define DPORT_CACHE_BIT(cpuid, regid) DPORT_ ## cpuid ## regid
  44. #define DPORT_CACHE_MASK(cpuid) (DPORT_CACHE_BIT(cpuid, _CACHE_MASK_OPSDRAM) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DROM0) | \
  45. DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DRAM1) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IROM0) | \
  46. DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM1) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM0) )
  47. #define DPORT_CACHE_VAL(cpuid) (~(DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DROM0) | \
  48. DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DRAM1) | \
  49. DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM0)))
  50. #define DPORT_CACHE_GET_VAL(cpuid) (cpuid == 0) ? DPORT_CACHE_VAL(PRO) : DPORT_CACHE_VAL(APP)
  51. #define DPORT_CACHE_GET_MASK(cpuid) (cpuid == 0) ? DPORT_CACHE_MASK(PRO) : DPORT_CACHE_MASK(APP)
  52. static void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t *saved_state);
  53. static void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state);
  54. static uint32_t s_flash_op_cache_state[2];
  55. #ifndef CONFIG_FREERTOS_UNICORE
  56. static SemaphoreHandle_t s_flash_op_mutex;
  57. static volatile bool s_flash_op_can_start = false;
  58. static volatile bool s_flash_op_complete = false;
  59. #ifndef NDEBUG
  60. static volatile int s_flash_op_cpu = -1;
  61. #endif
  62. static inline bool esp_task_stack_is_sane_cache_disabled(void)
  63. {
  64. const void *sp = (const void *)get_sp();
  65. return esp_ptr_in_dram(sp)
  66. #if CONFIG_ESP_SYSTEM_ALLOW_RTC_FAST_MEM_AS_HEAP
  67. || esp_ptr_in_rtc_dram_fast(sp)
  68. #endif
  69. ;
  70. }
  71. void spi_flash_init_lock(void)
  72. {
  73. s_flash_op_mutex = xSemaphoreCreateRecursiveMutex();
  74. assert(s_flash_op_mutex != NULL);
  75. }
  76. void spi_flash_op_lock(void)
  77. {
  78. xSemaphoreTakeRecursive(s_flash_op_mutex, portMAX_DELAY);
  79. }
  80. void spi_flash_op_unlock(void)
  81. {
  82. xSemaphoreGiveRecursive(s_flash_op_mutex);
  83. }
  84. /*
  85. If you're going to modify this, keep in mind that while the flash caches of the pro and app
  86. cpu are separate, the psram cache is *not*. If one of the CPUs returns from a flash routine
  87. with its cache enabled but the other CPUs cache is not enabled yet, you will have problems
  88. when accessing psram from the former CPU.
  89. */
  90. void IRAM_ATTR spi_flash_op_block_func(void *arg)
  91. {
  92. // Disable scheduler on this CPU
  93. vTaskSuspendAll();
  94. // Restore interrupts that aren't located in IRAM
  95. esp_intr_noniram_disable();
  96. uint32_t cpuid = (uint32_t) arg;
  97. // s_flash_op_complete flag is cleared on *this* CPU, otherwise the other
  98. // CPU may reset the flag back to false before IPC task has a chance to check it
  99. // (if it is preempted by an ISR taking non-trivial amount of time)
  100. s_flash_op_complete = false;
  101. s_flash_op_can_start = true;
  102. while (!s_flash_op_complete) {
  103. // busy loop here and wait for the other CPU to finish flash operation
  104. }
  105. // Flash operation is complete, re-enable cache
  106. spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
  107. // Restore interrupts that aren't located in IRAM
  108. esp_intr_noniram_enable();
  109. // Re-enable scheduler
  110. xTaskResumeAll();
  111. }
  112. void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu(void)
  113. {
  114. assert(esp_task_stack_is_sane_cache_disabled());
  115. spi_flash_op_lock();
  116. const int cpuid = xPortGetCoreID();
  117. const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
  118. #ifndef NDEBUG
  119. // For sanity check later: record the CPU which has started doing flash operation
  120. assert(s_flash_op_cpu == -1);
  121. s_flash_op_cpu = cpuid;
  122. #endif
  123. if (xTaskGetSchedulerState() == taskSCHEDULER_NOT_STARTED) {
  124. // Scheduler hasn't been started yet, it means that spi_flash API is being
  125. // called from the 2nd stage bootloader or from user_start_cpu0, i.e. from
  126. // PRO CPU. APP CPU is either in reset or spinning inside user_start_cpu1,
  127. // which is in IRAM. So it is safe to disable cache for the other_cpuid after
  128. // esp_intr_noniram_disable.
  129. assert(other_cpuid == 1);
  130. } else {
  131. // Temporarily raise current task priority to prevent a deadlock while
  132. // waiting for IPC task to start on the other CPU
  133. int old_prio = uxTaskPriorityGet(NULL);
  134. vTaskPrioritySet(NULL, configMAX_PRIORITIES - 1);
  135. // Signal to the spi_flash_op_block_task on the other CPU that we need it to
  136. // disable cache there and block other tasks from executing.
  137. s_flash_op_can_start = false;
  138. esp_err_t ret = esp_ipc_call(other_cpuid, &spi_flash_op_block_func, (void *) other_cpuid);
  139. assert(ret == ESP_OK);
  140. while (!s_flash_op_can_start) {
  141. // Busy loop and wait for spi_flash_op_block_func to disable cache
  142. // on the other CPU
  143. }
  144. // Disable scheduler on the current CPU
  145. vTaskSuspendAll();
  146. // Can now set the priority back to the normal one
  147. vTaskPrioritySet(NULL, old_prio);
  148. // This is guaranteed to run on CPU <cpuid> because the other CPU is now
  149. // occupied by highest priority task
  150. assert(xPortGetCoreID() == cpuid);
  151. }
  152. // Kill interrupts that aren't located in IRAM
  153. esp_intr_noniram_disable();
  154. // This CPU executes this routine, with non-IRAM interrupts and the scheduler
  155. // disabled. The other CPU is spinning in the spi_flash_op_block_func task, also
  156. // with non-iram interrupts and the scheduler disabled. None of these CPUs will
  157. // touch external RAM or flash this way, so we can safely disable caches.
  158. spi_flash_disable_cache(cpuid, &s_flash_op_cache_state[cpuid]);
  159. spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
  160. }
  161. void IRAM_ATTR spi_flash_enable_interrupts_caches_and_other_cpu(void)
  162. {
  163. const int cpuid = xPortGetCoreID();
  164. const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
  165. #ifndef NDEBUG
  166. // Sanity check: flash operation ends on the same CPU as it has started
  167. assert(cpuid == s_flash_op_cpu);
  168. // More sanity check: if scheduler isn't started, only CPU0 can call this.
  169. assert(!(xTaskGetSchedulerState() == taskSCHEDULER_NOT_STARTED && cpuid != 0));
  170. s_flash_op_cpu = -1;
  171. #endif
  172. // Re-enable cache on both CPUs. After this, cache (flash and external RAM) should work again.
  173. spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
  174. spi_flash_restore_cache(other_cpuid, s_flash_op_cache_state[other_cpuid]);
  175. if (xTaskGetSchedulerState() != taskSCHEDULER_NOT_STARTED) {
  176. // Signal to spi_flash_op_block_task that flash operation is complete
  177. s_flash_op_complete = true;
  178. }
  179. // Re-enable non-iram interrupts
  180. esp_intr_noniram_enable();
  181. // Resume tasks on the current CPU, if the scheduler has started.
  182. // NOTE: enabling non-IRAM interrupts has to happen before this,
  183. // because once the scheduler has started, due to preemption the
  184. // current task can end up being moved to the other CPU.
  185. // But esp_intr_noniram_enable has to be called on the same CPU which
  186. // called esp_intr_noniram_disable
  187. if (xTaskGetSchedulerState() != taskSCHEDULER_NOT_STARTED) {
  188. xTaskResumeAll();
  189. }
  190. // Release API lock
  191. spi_flash_op_unlock();
  192. }
  193. void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)
  194. {
  195. const uint32_t cpuid = xPortGetCoreID();
  196. const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
  197. // do not care about other CPU, it was halted upon entering panic handler
  198. spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
  199. // Kill interrupts that aren't located in IRAM
  200. esp_intr_noniram_disable();
  201. // Disable cache on this CPU as well
  202. spi_flash_disable_cache(cpuid, &s_flash_op_cache_state[cpuid]);
  203. }
  204. void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
  205. {
  206. const uint32_t cpuid = xPortGetCoreID();
  207. // Re-enable cache on this CPU
  208. spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
  209. // Re-enable non-iram interrupts
  210. esp_intr_noniram_enable();
  211. }
  212. #else // CONFIG_FREERTOS_UNICORE
  213. void spi_flash_init_lock(void)
  214. {
  215. }
  216. void spi_flash_op_lock(void)
  217. {
  218. vTaskSuspendAll();
  219. }
  220. void spi_flash_op_unlock(void)
  221. {
  222. xTaskResumeAll();
  223. }
  224. void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu(void)
  225. {
  226. spi_flash_op_lock();
  227. esp_intr_noniram_disable();
  228. spi_flash_disable_cache(0, &s_flash_op_cache_state[0]);
  229. }
  230. void IRAM_ATTR spi_flash_enable_interrupts_caches_and_other_cpu(void)
  231. {
  232. spi_flash_restore_cache(0, s_flash_op_cache_state[0]);
  233. esp_intr_noniram_enable();
  234. spi_flash_op_unlock();
  235. }
  236. void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)
  237. {
  238. // Kill interrupts that aren't located in IRAM
  239. esp_intr_noniram_disable();
  240. // Disable cache on this CPU as well
  241. spi_flash_disable_cache(0, &s_flash_op_cache_state[0]);
  242. }
  243. void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
  244. {
  245. // Re-enable cache on this CPU
  246. spi_flash_restore_cache(0, s_flash_op_cache_state[0]);
  247. // Re-enable non-iram interrupts
  248. esp_intr_noniram_enable();
  249. }
  250. #endif // CONFIG_FREERTOS_UNICORE
  251. /**
  252. * The following two functions are replacements for Cache_Read_Disable and Cache_Read_Enable
  253. * function in ROM. They are used to work around a bug where Cache_Read_Disable requires a call to
  254. * Cache_Flush before Cache_Read_Enable, even if cached data was not modified.
  255. */
  256. static void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t *saved_state)
  257. {
  258. #if CONFIG_IDF_TARGET_ESP32
  259. uint32_t ret = 0;
  260. const uint32_t cache_mask = DPORT_CACHE_GET_MASK(cpuid);
  261. if (cpuid == 0) {
  262. ret |= DPORT_GET_PERI_REG_BITS2(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, 0);
  263. while (DPORT_GET_PERI_REG_BITS2(DPORT_PRO_DCACHE_DBUG0_REG, DPORT_PRO_CACHE_STATE, DPORT_PRO_CACHE_STATE_S) != 1) {
  264. ;
  265. }
  266. DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 0, DPORT_PRO_CACHE_ENABLE_S);
  267. }
  268. #if !CONFIG_FREERTOS_UNICORE
  269. else {
  270. ret |= DPORT_GET_PERI_REG_BITS2(DPORT_APP_CACHE_CTRL1_REG, cache_mask, 0);
  271. while (DPORT_GET_PERI_REG_BITS2(DPORT_APP_DCACHE_DBUG0_REG, DPORT_APP_CACHE_STATE, DPORT_APP_CACHE_STATE_S) != 1) {
  272. ;
  273. }
  274. DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 0, DPORT_APP_CACHE_ENABLE_S);
  275. }
  276. #endif
  277. *saved_state = ret;
  278. #elif CONFIG_IDF_TARGET_ESP32S2
  279. *saved_state = Cache_Suspend_ICache();
  280. #elif CONFIG_IDF_TARGET_ESP32S3
  281. uint32_t icache_state, dcache_state;
  282. icache_state = Cache_Suspend_ICache() << 16;
  283. dcache_state = Cache_Suspend_DCache();
  284. *saved_state = icache_state | dcache_state;
  285. #elif CONFIG_IDF_TARGET_ESP32C3
  286. uint32_t icache_state;
  287. icache_state = Cache_Suspend_ICache() << 16;
  288. *saved_state = icache_state;
  289. #endif
  290. }
  291. static void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state)
  292. {
  293. #if CONFIG_IDF_TARGET_ESP32
  294. const uint32_t cache_mask = DPORT_CACHE_GET_MASK(cpuid);
  295. if (cpuid == 0) {
  296. DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 1, DPORT_PRO_CACHE_ENABLE_S);
  297. DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
  298. }
  299. #if !CONFIG_FREERTOS_UNICORE
  300. else {
  301. DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 1, DPORT_APP_CACHE_ENABLE_S);
  302. DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
  303. }
  304. #endif
  305. #elif CONFIG_IDF_TARGET_ESP32S2
  306. Cache_Resume_ICache(saved_state);
  307. #elif CONFIG_IDF_TARGET_ESP32S3
  308. Cache_Resume_DCache(saved_state & 0xffff);
  309. Cache_Resume_ICache(saved_state >> 16);
  310. #elif CONFIG_IDF_TARGET_ESP32C3
  311. Cache_Resume_ICache(saved_state >> 16);
  312. #endif
  313. }
  314. IRAM_ATTR bool spi_flash_cache_enabled(void)
  315. {
  316. #if CONFIG_IDF_TARGET_ESP32
  317. bool result = (DPORT_REG_GET_BIT(DPORT_PRO_CACHE_CTRL_REG, DPORT_PRO_CACHE_ENABLE) != 0);
  318. #if portNUM_PROCESSORS == 2
  319. result = result && (DPORT_REG_GET_BIT(DPORT_APP_CACHE_CTRL_REG, DPORT_APP_CACHE_ENABLE) != 0);
  320. #endif
  321. #elif CONFIG_IDF_TARGET_ESP32S2
  322. bool result = (REG_GET_BIT(EXTMEM_PRO_ICACHE_CTRL_REG, EXTMEM_PRO_ICACHE_ENABLE) != 0);
  323. #elif CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3
  324. bool result = (REG_GET_BIT(EXTMEM_ICACHE_CTRL_REG, EXTMEM_ICACHE_ENABLE) != 0);
  325. #endif
  326. return result;
  327. }
  328. #if CONFIG_IDF_TARGET_ESP32S2
  329. IRAM_ATTR void esp_config_instruction_cache_mode(void)
  330. {
  331. cache_size_t cache_size;
  332. cache_ways_t cache_ways;
  333. cache_line_size_t cache_line_size;
  334. #if CONFIG_ESP32S2_INSTRUCTION_CACHE_8KB
  335. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
  336. cache_size = CACHE_SIZE_8KB;
  337. #else
  338. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
  339. cache_size = CACHE_SIZE_16KB;
  340. #endif
  341. cache_ways = CACHE_4WAYS_ASSOC;
  342. #if CONFIG_ESP32S2_INSTRUCTION_CACHE_LINE_16B
  343. cache_line_size = CACHE_LINE_SIZE_16B;
  344. #else
  345. cache_line_size = CACHE_LINE_SIZE_32B;
  346. #endif
  347. ESP_EARLY_LOGI(TAG, "Instruction cache \t: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_8KB ? 8 : 16, 4, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : 32);
  348. Cache_Suspend_ICache();
  349. Cache_Set_ICache_Mode(cache_size, cache_ways, cache_line_size);
  350. Cache_Invalidate_ICache_All();
  351. Cache_Resume_ICache(0);
  352. }
  353. IRAM_ATTR void esp_config_data_cache_mode(void)
  354. {
  355. cache_size_t cache_size;
  356. cache_ways_t cache_ways;
  357. cache_line_size_t cache_line_size;
  358. #if CONFIG_ESP32S2_INSTRUCTION_CACHE_8KB
  359. #if CONFIG_ESP32S2_DATA_CACHE_8KB
  360. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
  361. cache_size = CACHE_SIZE_8KB;
  362. #else
  363. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_DCACHE_HIGH, CACHE_MEMORY_INVALID);
  364. cache_size = CACHE_SIZE_16KB;
  365. #endif
  366. #else
  367. #if CONFIG_ESP32S2_DATA_CACHE_8KB
  368. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_INVALID);
  369. cache_size = CACHE_SIZE_8KB;
  370. #else
  371. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_DCACHE_HIGH);
  372. cache_size = CACHE_SIZE_16KB;
  373. #endif
  374. #endif
  375. cache_ways = CACHE_4WAYS_ASSOC;
  376. #if CONFIG_ESP32S2_DATA_CACHE_LINE_16B
  377. cache_line_size = CACHE_LINE_SIZE_16B;
  378. #else
  379. cache_line_size = CACHE_LINE_SIZE_32B;
  380. #endif
  381. ESP_EARLY_LOGI(TAG, "Data cache \t\t: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_8KB ? 8 : 16, 4, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : 32);
  382. Cache_Set_DCache_Mode(cache_size, cache_ways, cache_line_size);
  383. Cache_Invalidate_DCache_All();
  384. }
  385. static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache, bool dcache)
  386. {
  387. uint32_t i_autoload, d_autoload;
  388. if (icache) {
  389. i_autoload = Cache_Suspend_ICache();
  390. }
  391. if (dcache) {
  392. d_autoload = Cache_Suspend_DCache();
  393. }
  394. REG_SET_BIT(EXTMEM_PRO_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_PRO_CACHE_FLASH_WRAP_AROUND);
  395. if (icache) {
  396. Cache_Resume_ICache(i_autoload);
  397. }
  398. if (dcache) {
  399. Cache_Resume_DCache(d_autoload);
  400. }
  401. }
  402. #if CONFIG_ESP32S2_SPIRAM_SUPPORT
  403. static IRAM_ATTR void esp_enable_cache_spiram_wrap(bool icache, bool dcache)
  404. {
  405. uint32_t i_autoload, d_autoload;
  406. if (icache) {
  407. i_autoload = Cache_Suspend_ICache();
  408. }
  409. if (dcache) {
  410. d_autoload = Cache_Suspend_DCache();
  411. }
  412. REG_SET_BIT(EXTMEM_PRO_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_PRO_CACHE_SRAM_RD_WRAP_AROUND);
  413. if (icache) {
  414. Cache_Resume_ICache(i_autoload);
  415. }
  416. if (dcache) {
  417. Cache_Resume_DCache(d_autoload);
  418. }
  419. }
  420. #endif
  421. esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable, bool dcache_wrap_enable)
  422. {
  423. int icache_wrap_size = 0, dcache_wrap_size = 0;
  424. int flash_wrap_sizes[2] = {-1, -1}, spiram_wrap_sizes[2] = {-1, -1};
  425. int flash_wrap_size = 0, spiram_wrap_size = 0;
  426. int flash_count = 0, spiram_count = 0;
  427. int i;
  428. bool flash_spiram_wrap_together, flash_support_wrap = true, spiram_support_wrap = true;
  429. uint32_t drom0_in_icache = 1;//always 1 in esp32s2
  430. #if CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3
  431. drom0_in_icache = 0;
  432. #endif
  433. if (icache_wrap_enable) {
  434. #if CONFIG_ESP32S2_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP32C3_INSTRUCTION_CACHE_LINE_16B
  435. icache_wrap_size = 16;
  436. #else
  437. icache_wrap_size = 32;
  438. #endif
  439. }
  440. if (dcache_wrap_enable) {
  441. #if CONFIG_ESP32S2_DATA_CACHE_LINE_16B || CONFIG_ESP32S3_DATA_CACHE_LINE_16B || CONFIG_ESP32C3_INSTRUCTION_CACHE_LINE_16B
  442. dcache_wrap_size = 16;
  443. #else
  444. dcache_wrap_size = 32;
  445. #endif
  446. }
  447. uint32_t instruction_use_spiram = 0;
  448. uint32_t rodata_use_spiram = 0;
  449. #if CONFIG_SPIRAM_FETCH_INSTRUCTIONS
  450. extern uint32_t esp_spiram_instruction_access_enabled(void);
  451. instruction_use_spiram = esp_spiram_instruction_access_enabled();
  452. #endif
  453. #if CONFIG_SPIRAM_RODATA
  454. extern uint32_t esp_spiram_rodata_access_enabled(void);
  455. rodata_use_spiram = esp_spiram_rodata_access_enabled();
  456. #endif
  457. if (instruction_use_spiram) {
  458. spiram_wrap_sizes[0] = icache_wrap_size;
  459. } else {
  460. flash_wrap_sizes[0] = icache_wrap_size;
  461. }
  462. if (rodata_use_spiram) {
  463. if (drom0_in_icache) {
  464. spiram_wrap_sizes[0] = icache_wrap_size;
  465. } else {
  466. spiram_wrap_sizes[1] = dcache_wrap_size;
  467. flash_wrap_sizes[1] = dcache_wrap_size;
  468. }
  469. #ifdef CONFIG_EXT_RODATA_SUPPORT
  470. spiram_wrap_sizes[1] = dcache_wrap_size;
  471. #endif
  472. } else {
  473. if (drom0_in_icache) {
  474. flash_wrap_sizes[0] = icache_wrap_size;
  475. } else {
  476. flash_wrap_sizes[1] = dcache_wrap_size;
  477. }
  478. #ifdef CONFIG_EXT_RODATA_SUPPORT
  479. flash_wrap_sizes[1] = dcache_wrap_size;
  480. #endif
  481. }
  482. #ifdef CONFIG_ESP32S2_SPIRAM_SUPPORT
  483. spiram_wrap_sizes[1] = dcache_wrap_size;
  484. #endif
  485. for (i = 0; i < 2; i++) {
  486. if (flash_wrap_sizes[i] != -1) {
  487. flash_count++;
  488. flash_wrap_size = flash_wrap_sizes[i];
  489. }
  490. }
  491. for (i = 0; i < 2; i++) {
  492. if (spiram_wrap_sizes[i] != -1) {
  493. spiram_count++;
  494. spiram_wrap_size = spiram_wrap_sizes[i];
  495. }
  496. }
  497. if (flash_count + spiram_count <= 2) {
  498. flash_spiram_wrap_together = false;
  499. } else {
  500. flash_spiram_wrap_together = true;
  501. }
  502. ESP_EARLY_LOGI(TAG, "flash_count=%d, size=%d, spiram_count=%d, size=%d,together=%d", flash_count, flash_wrap_size, spiram_count, spiram_wrap_size, flash_spiram_wrap_together);
  503. if (flash_count > 1 && flash_wrap_sizes[0] != flash_wrap_sizes[1]) {
  504. ESP_EARLY_LOGW(TAG, "Flash wrap with different length %d and %d, abort wrap.", flash_wrap_sizes[0], flash_wrap_sizes[1]);
  505. if (spiram_wrap_size == 0) {
  506. return ESP_FAIL;
  507. }
  508. if (flash_spiram_wrap_together) {
  509. ESP_EARLY_LOGE(TAG, "Abort spiram wrap because flash wrap length not fixed.");
  510. return ESP_FAIL;
  511. }
  512. }
  513. if (spiram_count > 1 && spiram_wrap_sizes[0] != spiram_wrap_sizes[1]) {
  514. ESP_EARLY_LOGW(TAG, "SPIRAM wrap with different length %d and %d, abort wrap.", spiram_wrap_sizes[0], spiram_wrap_sizes[1]);
  515. if (flash_wrap_size == 0) {
  516. return ESP_FAIL;
  517. }
  518. if (flash_spiram_wrap_together) {
  519. ESP_EARLY_LOGW(TAG, "Abort flash wrap because spiram wrap length not fixed.");
  520. return ESP_FAIL;
  521. }
  522. }
  523. if (flash_spiram_wrap_together && flash_wrap_size != spiram_wrap_size) {
  524. ESP_EARLY_LOGW(TAG, "SPIRAM has different wrap length with flash, %d and %d, abort wrap.", spiram_wrap_size, flash_wrap_size);
  525. return ESP_FAIL;
  526. }
  527. #ifdef CONFIG_FLASHMODE_QIO
  528. flash_support_wrap = true;
  529. extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
  530. if (!spi_flash_support_wrap_size(flash_wrap_size)) {
  531. flash_support_wrap = false;
  532. ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
  533. }
  534. #else
  535. ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
  536. #endif
  537. #ifdef CONFIG_ESP32S2_SPIRAM_SUPPORT
  538. extern bool psram_support_wrap_size(uint32_t wrap_size);
  539. if (!psram_support_wrap_size(spiram_wrap_size)) {
  540. spiram_support_wrap = false;
  541. ESP_EARLY_LOGW(TAG, "SPIRAM do not support wrap size %d.", spiram_wrap_size);
  542. }
  543. #endif
  544. if (flash_spiram_wrap_together && !(flash_support_wrap && spiram_support_wrap)) {
  545. ESP_EARLY_LOGW(TAG, "Flash and SPIRAM should support wrap together.");
  546. return ESP_FAIL;
  547. }
  548. extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
  549. if (flash_support_wrap && flash_wrap_size > 0) {
  550. ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
  551. spi_flash_enable_wrap(flash_wrap_size);
  552. esp_enable_cache_flash_wrap((flash_wrap_sizes[0] > 0), (flash_wrap_sizes[1] > 0));
  553. }
  554. #if CONFIG_ESP32S2_SPIRAM_SUPPORT
  555. extern esp_err_t psram_enable_wrap(uint32_t wrap_size);
  556. if (spiram_support_wrap && spiram_wrap_size > 0) {
  557. ESP_EARLY_LOGI(TAG, "SPIRAM wrap enabled, size = %d.", spiram_wrap_size);
  558. psram_enable_wrap(spiram_wrap_size);
  559. esp_enable_cache_spiram_wrap((spiram_wrap_sizes[0] > 0), (spiram_wrap_sizes[1] > 0));
  560. }
  561. #endif
  562. return ESP_OK;
  563. }
  564. #endif
  565. #if CONFIG_IDF_TARGET_ESP32S3
  566. IRAM_ATTR void esp_config_instruction_cache_mode(void)
  567. {
  568. cache_size_t cache_size;
  569. cache_ways_t cache_ways;
  570. cache_line_size_t cache_line_size;
  571. #if CONFIG_ESP32S3_INSTRUCTION_CACHE_16KB
  572. Cache_Occupy_ICache_MEMORY(CACHE_MEMORY_IBANK0, CACHE_MEMORY_INVALID);
  573. cache_size = CACHE_SIZE_HALF;
  574. #else
  575. Cache_Occupy_ICache_MEMORY(CACHE_MEMORY_IBANK0, CACHE_MEMORY_IBANK1);
  576. cache_size = CACHE_SIZE_FULL;
  577. #endif
  578. #if CONFIG_ESP32S3_INSTRUCTION_CACHE_4WAYS
  579. cache_ways = CACHE_4WAYS_ASSOC;
  580. #else
  581. cache_ways = CACHE_8WAYS_ASSOC;
  582. #endif
  583. #if CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B
  584. cache_line_size = CACHE_LINE_SIZE_16B;
  585. #elif CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_32B
  586. cache_line_size = CACHE_LINE_SIZE_32B;
  587. #else
  588. cache_line_size = CACHE_LINE_SIZE_64B;
  589. #endif
  590. ESP_EARLY_LOGI(TAG, "Instruction cache: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_HALF ? 16 : 32, cache_ways == CACHE_4WAYS_ASSOC ? 4 : 8, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : (cache_line_size == CACHE_LINE_SIZE_32B ? 32 : 64));
  591. Cache_Set_ICache_Mode(cache_size, cache_ways, cache_line_size);
  592. Cache_Invalidate_ICache_All();
  593. extern void Cache_Enable_ICache(uint32_t autoload);
  594. Cache_Enable_ICache(0);
  595. }
  596. IRAM_ATTR void esp_config_data_cache_mode(void)
  597. {
  598. cache_size_t cache_size;
  599. cache_ways_t cache_ways;
  600. cache_line_size_t cache_line_size;
  601. #if CONFIG_ESP32S3_DATA_CACHE_32KB
  602. Cache_Occupy_DCache_MEMORY(CACHE_MEMORY_DBANK1, CACHE_MEMORY_INVALID);
  603. cache_size = CACHE_SIZE_HALF;
  604. #else
  605. Cache_Occupy_DCache_MEMORY(CACHE_MEMORY_DBANK0, CACHE_MEMORY_DBANK1);
  606. cache_size = CACHE_SIZE_FULL;
  607. #endif
  608. #if CONFIG_ESP32S3_DATA_CACHE_4WAYS
  609. cache_ways = CACHE_4WAYS_ASSOC;
  610. #else
  611. cache_ways = CACHE_8WAYS_ASSOC;
  612. #endif
  613. #if CONFIG_ESP32S3_DATA_CACHE_LINE_16B
  614. cache_line_size = CACHE_LINE_SIZE_16B;
  615. #elif CONFIG_ESP32S3_DATA_CACHE_LINE_32B
  616. cache_line_size = CACHE_LINE_SIZE_32B;
  617. #else
  618. cache_line_size = CACHE_LINE_SIZE_64B;
  619. #endif
  620. // ESP_EARLY_LOGI(TAG, "Data cache: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_HALF ? 32 : 64, cache_ways == CACHE_4WAYS_ASSOC ? 4 : 8, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : (cache_line_size == CACHE_LINE_SIZE_32B ? 32 : 64));
  621. Cache_Set_DCache_Mode(cache_size, cache_ways, cache_line_size);
  622. Cache_Invalidate_DCache_All();
  623. }
  624. static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache, bool dcache)
  625. {
  626. uint32_t i_autoload, d_autoload;
  627. if (icache) {
  628. i_autoload = Cache_Suspend_ICache();
  629. }
  630. if (dcache) {
  631. d_autoload = Cache_Suspend_DCache();
  632. }
  633. REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_FLASH_WRAP_AROUND);
  634. if (icache) {
  635. Cache_Resume_ICache(i_autoload);
  636. }
  637. if (dcache) {
  638. Cache_Resume_DCache(d_autoload);
  639. }
  640. }
  641. #if CONFIG_ESP32S3_SPIRAM_SUPPORT
  642. static IRAM_ATTR void esp_enable_cache_spiram_wrap(bool icache, bool dcache)
  643. {
  644. uint32_t i_autoload, d_autoload;
  645. if (icache) {
  646. i_autoload = Cache_Suspend_ICache();
  647. }
  648. if (dcache) {
  649. d_autoload = Cache_Suspend_DCache();
  650. }
  651. REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_SRAM_RD_WRAP_AROUND);
  652. if (icache) {
  653. Cache_Resume_ICache(i_autoload);
  654. }
  655. if (dcache) {
  656. Cache_Resume_DCache(d_autoload);
  657. }
  658. }
  659. #endif
  660. esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable, bool dcache_wrap_enable)
  661. {
  662. int icache_wrap_size = 0, dcache_wrap_size = 0;
  663. int flash_wrap_sizes[2] = {-1, -1}, spiram_wrap_sizes[2] = {-1, -1};
  664. int flash_wrap_size = 0, spiram_wrap_size = 0;
  665. int flash_count = 0, spiram_count = 0;
  666. int i;
  667. bool flash_spiram_wrap_together, flash_support_wrap = false, spiram_support_wrap = true;
  668. uint32_t drom0_in_icache = 0;//always 0 in chip7.2.4
  669. if (icache_wrap_enable) {
  670. #if CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B
  671. icache_wrap_size = 16;
  672. #elif CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_32B
  673. icache_wrap_size = 32;
  674. #else
  675. icache_wrap_size = 64;
  676. #endif
  677. }
  678. if (dcache_wrap_enable) {
  679. #if CONFIG_ESP32S3_DATA_CACHE_LINE_16B
  680. dcache_wrap_size = 16;
  681. #elif CONFIG_ESP32S3_DATA_CACHE_LINE_32B
  682. dcache_wrap_size = 32;
  683. #else
  684. dcache_wrap_size = 64;
  685. #endif
  686. }
  687. uint32_t instruction_use_spiram = 0;
  688. uint32_t rodata_use_spiram = 0;
  689. #if CONFIG_SPIRAM_FETCH_INSTRUCTIONS
  690. extern uint32_t esp_spiram_instruction_access_enabled();
  691. instruction_use_spiram = esp_spiram_instruction_access_enabled();
  692. #endif
  693. #if CONFIG_SPIRAM_RODATA
  694. extern uint32_t esp_spiram_rodata_access_enabled();
  695. rodata_use_spiram = esp_spiram_rodata_access_enabled();
  696. #endif
  697. if (instruction_use_spiram) {
  698. spiram_wrap_sizes[0] = icache_wrap_size;
  699. } else {
  700. flash_wrap_sizes[0] = icache_wrap_size;
  701. }
  702. if (rodata_use_spiram) {
  703. if (drom0_in_icache) {
  704. spiram_wrap_sizes[0] = icache_wrap_size;
  705. } else {
  706. spiram_wrap_sizes[1] = dcache_wrap_size;
  707. }
  708. #ifdef CONFIG_EXT_RODATA_SUPPORT
  709. spiram_wrap_sizes[1] = dcache_wrap_size;
  710. #endif
  711. } else {
  712. if (drom0_in_icache) {
  713. flash_wrap_sizes[0] = icache_wrap_size;
  714. } else {
  715. flash_wrap_sizes[1] = dcache_wrap_size;
  716. }
  717. #ifdef CONFIG_EXT_RODATA_SUPPORT
  718. flash_wrap_sizes[1] = dcache_wrap_size;
  719. #endif
  720. }
  721. #ifdef CONFIG_ESP32S3_SPIRAM_SUPPORT
  722. spiram_wrap_sizes[1] = dcache_wrap_size;
  723. #endif
  724. for (i = 0; i < 2; i++) {
  725. if (flash_wrap_sizes[i] != -1) {
  726. flash_count++;
  727. flash_wrap_size = flash_wrap_sizes[i];
  728. }
  729. }
  730. for (i = 0; i < 2; i++) {
  731. if (spiram_wrap_sizes[i] != -1) {
  732. spiram_count++;
  733. spiram_wrap_size = spiram_wrap_sizes[i];
  734. }
  735. }
  736. if (flash_count + spiram_count <= 2) {
  737. flash_spiram_wrap_together = false;
  738. } else {
  739. flash_spiram_wrap_together = true;
  740. }
  741. if (flash_count > 1 && flash_wrap_sizes[0] != flash_wrap_sizes[1]) {
  742. ESP_EARLY_LOGW(TAG, "Flash wrap with different length %d and %d, abort wrap.", flash_wrap_sizes[0], flash_wrap_sizes[1]);
  743. if (spiram_wrap_size == 0) {
  744. return ESP_FAIL;
  745. }
  746. if (flash_spiram_wrap_together) {
  747. ESP_EARLY_LOGE(TAG, "Abort spiram wrap because flash wrap length not fixed.");
  748. return ESP_FAIL;
  749. }
  750. }
  751. if (spiram_count > 1 && spiram_wrap_sizes[0] != spiram_wrap_sizes[1]) {
  752. ESP_EARLY_LOGW(TAG, "SPIRAM wrap with different length %d and %d, abort wrap.", spiram_wrap_sizes[0], spiram_wrap_sizes[1]);
  753. if (flash_wrap_size == 0) {
  754. return ESP_FAIL;
  755. }
  756. if (flash_spiram_wrap_together) {
  757. ESP_EARLY_LOGW(TAG, "Abort flash wrap because spiram wrap length not fixed.");
  758. return ESP_FAIL;
  759. }
  760. }
  761. if (flash_spiram_wrap_together && flash_wrap_size != spiram_wrap_size) {
  762. ESP_EARLY_LOGW(TAG, "SPIRAM has different wrap length with flash, %d and %d, abort wrap.", spiram_wrap_size, flash_wrap_size);
  763. return ESP_FAIL;
  764. }
  765. #ifdef CONFIG_FLASHMODE_QIO
  766. flash_support_wrap = true;
  767. extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
  768. if (!spi_flash_support_wrap_size(flash_wrap_size)) {
  769. flash_support_wrap = false;
  770. ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
  771. }
  772. #else
  773. ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
  774. #endif
  775. #ifdef CONFIG_ESP32S3_SPIRAM_SUPPORT
  776. extern bool psram_support_wrap_size(uint32_t wrap_size);
  777. if (!psram_support_wrap_size(spiram_wrap_size)) {
  778. spiram_support_wrap = false;
  779. ESP_EARLY_LOGW(TAG, "SPIRAM do not support wrap size %d.", spiram_wrap_size);
  780. }
  781. #endif
  782. if (flash_spiram_wrap_together && !(flash_support_wrap && spiram_support_wrap)) {
  783. ESP_EARLY_LOGW(TAG, "Flash and SPIRAM should support wrap together.");
  784. return ESP_FAIL;
  785. }
  786. extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
  787. if (flash_support_wrap && flash_wrap_size > 0) {
  788. ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
  789. spi_flash_enable_wrap(flash_wrap_size);
  790. esp_enable_cache_flash_wrap((flash_wrap_sizes[0] > 0), (flash_wrap_sizes[1] > 0));
  791. }
  792. #if CONFIG_ESP32S3_SPIRAM_SUPPORT
  793. extern esp_err_t psram_enable_wrap(uint32_t wrap_size);
  794. if (spiram_support_wrap && spiram_wrap_size > 0) {
  795. ESP_EARLY_LOGI(TAG, "SPIRAM wrap enabled, size = %d.", spiram_wrap_size);
  796. psram_enable_wrap(spiram_wrap_size);
  797. esp_enable_cache_spiram_wrap((spiram_wrap_sizes[0] > 0), (spiram_wrap_sizes[1] > 0));
  798. }
  799. #endif
  800. return ESP_OK;
  801. }
  802. #endif
  803. #if CONFIG_IDF_TARGET_ESP32C3
  804. static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache)
  805. {
  806. uint32_t i_autoload;
  807. if (icache) {
  808. i_autoload = Cache_Suspend_ICache();
  809. }
  810. REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_FLASH_WRAP_AROUND);
  811. if (icache) {
  812. Cache_Resume_ICache(i_autoload);
  813. }
  814. }
  815. esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable)
  816. {
  817. int flash_wrap_size = 0;
  818. bool flash_support_wrap = false;
  819. if (icache_wrap_enable) {
  820. flash_wrap_size = 32;
  821. }
  822. #ifdef CONFIG_FLASHMODE_QIO
  823. flash_support_wrap = true;
  824. extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
  825. if (!spi_flash_support_wrap_size(flash_wrap_size)) {
  826. flash_support_wrap = false;
  827. ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
  828. }
  829. #else
  830. ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
  831. #endif // CONFIG_FLASHMODE_QIO
  832. extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
  833. if (flash_support_wrap && flash_wrap_size > 0) {
  834. ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
  835. spi_flash_enable_wrap(flash_wrap_size);
  836. esp_enable_cache_flash_wrap((flash_wrap_size > 0));
  837. }
  838. return ESP_OK;
  839. }
  840. #endif // CONFIG_IDF_TARGET_ESP32C3
  841. void IRAM_ATTR spi_flash_enable_cache(uint32_t cpuid)
  842. {
  843. #if CONFIG_IDF_TARGET_ESP32
  844. uint32_t cache_value = DPORT_CACHE_GET_VAL(cpuid);
  845. cache_value &= DPORT_CACHE_GET_MASK(cpuid);
  846. // Re-enable cache on this CPU
  847. spi_flash_restore_cache(cpuid, cache_value);
  848. #else
  849. spi_flash_restore_cache(0, 0); // TODO cache_value should be non-zero
  850. #endif
  851. }