cache_utils.c 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607
  1. // Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #include <stdlib.h>
  15. #include <assert.h>
  16. #include <string.h>
  17. #include <stdio.h>
  18. #include <freertos/FreeRTOS.h>
  19. #include <freertos/task.h>
  20. #include <freertos/semphr.h>
  21. #if CONFIG_IDF_TARGET_ESP32
  22. #include <esp32/rom/spi_flash.h>
  23. #include <esp32/rom/cache.h>
  24. #elif CONFIG_IDF_TARGET_ESP32S2BETA
  25. #include "esp32s2beta/rom/spi_flash.h"
  26. #include "esp32s2beta/rom/cache.h"
  27. #endif
  28. #include <soc/soc.h>
  29. #include <soc/dport_reg.h>
  30. #include "sdkconfig.h"
  31. #include "esp_ipc.h"
  32. #include "esp_attr.h"
  33. #include "esp_intr_alloc.h"
  34. #include "esp_spi_flash.h"
  35. #include "esp_log.h"
  36. static __attribute__((unused)) const char* TAG = "spiflash";
  37. static void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t* saved_state);
  38. static void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state);
  39. static uint32_t s_flash_op_cache_state[2];
  40. #ifndef CONFIG_FREERTOS_UNICORE
  41. static SemaphoreHandle_t s_flash_op_mutex;
  42. static volatile bool s_flash_op_can_start = false;
  43. static volatile bool s_flash_op_complete = false;
  44. #ifndef NDEBUG
  45. static volatile int s_flash_op_cpu = -1;
  46. #endif
  47. void spi_flash_init_lock(void)
  48. {
  49. s_flash_op_mutex = xSemaphoreCreateRecursiveMutex();
  50. assert(s_flash_op_mutex != NULL);
  51. }
  52. void spi_flash_op_lock(void)
  53. {
  54. xSemaphoreTakeRecursive(s_flash_op_mutex, portMAX_DELAY);
  55. }
  56. void spi_flash_op_unlock(void)
  57. {
  58. xSemaphoreGiveRecursive(s_flash_op_mutex);
  59. }
  60. /*
  61. If you're going to modify this, keep in mind that while the flash caches of the pro and app
  62. cpu are separate, the psram cache is *not*. If one of the CPUs returns from a flash routine
  63. with its cache enabled but the other CPUs cache is not enabled yet, you will have problems
  64. when accessing psram from the former CPU.
  65. */
  66. void IRAM_ATTR spi_flash_op_block_func(void* arg)
  67. {
  68. // Disable scheduler on this CPU
  69. vTaskSuspendAll();
  70. // Restore interrupts that aren't located in IRAM
  71. esp_intr_noniram_disable();
  72. uint32_t cpuid = (uint32_t) arg;
  73. // s_flash_op_complete flag is cleared on *this* CPU, otherwise the other
  74. // CPU may reset the flag back to false before IPC task has a chance to check it
  75. // (if it is preempted by an ISR taking non-trivial amount of time)
  76. s_flash_op_complete = false;
  77. s_flash_op_can_start = true;
  78. while (!s_flash_op_complete) {
  79. // busy loop here and wait for the other CPU to finish flash operation
  80. }
  81. // Flash operation is complete, re-enable cache
  82. spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
  83. // Restore interrupts that aren't located in IRAM
  84. esp_intr_noniram_enable();
  85. // Re-enable scheduler
  86. xTaskResumeAll();
  87. }
  88. void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu(void)
  89. {
  90. spi_flash_op_lock();
  91. const uint32_t cpuid = xPortGetCoreID();
  92. const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
  93. #ifndef NDEBUG
  94. // For sanity check later: record the CPU which has started doing flash operation
  95. assert(s_flash_op_cpu == -1);
  96. s_flash_op_cpu = cpuid;
  97. #endif
  98. if (xTaskGetSchedulerState() == taskSCHEDULER_NOT_STARTED) {
  99. // Scheduler hasn't been started yet, it means that spi_flash API is being
  100. // called from the 2nd stage bootloader or from user_start_cpu0, i.e. from
  101. // PRO CPU. APP CPU is either in reset or spinning inside user_start_cpu1,
  102. // which is in IRAM. So it is safe to disable cache for the other_cpuid here.
  103. assert(other_cpuid == 1);
  104. spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
  105. } else {
  106. // Temporarily raise current task priority to prevent a deadlock while
  107. // waiting for IPC task to start on the other CPU
  108. int old_prio = uxTaskPriorityGet(NULL);
  109. vTaskPrioritySet(NULL, configMAX_PRIORITIES - 1);
  110. // Signal to the spi_flash_op_block_task on the other CPU that we need it to
  111. // disable cache there and block other tasks from executing.
  112. s_flash_op_can_start = false;
  113. esp_err_t ret = esp_ipc_call(other_cpuid, &spi_flash_op_block_func, (void*) other_cpuid);
  114. assert(ret == ESP_OK);
  115. while (!s_flash_op_can_start) {
  116. // Busy loop and wait for spi_flash_op_block_func to disable cache
  117. // on the other CPU
  118. }
  119. // Disable scheduler on the current CPU
  120. vTaskSuspendAll();
  121. // Can now set the priority back to the normal one
  122. vTaskPrioritySet(NULL, old_prio);
  123. // This is guaranteed to run on CPU <cpuid> because the other CPU is now
  124. // occupied by highest priority task
  125. assert(xPortGetCoreID() == cpuid);
  126. }
  127. // Kill interrupts that aren't located in IRAM
  128. esp_intr_noniram_disable();
  129. // This CPU executes this routine, with non-IRAM interrupts and the scheduler
  130. // disabled. The other CPU is spinning in the spi_flash_op_block_func task, also
  131. // with non-iram interrupts and the scheduler disabled. None of these CPUs will
  132. // touch external RAM or flash this way, so we can safely disable caches.
  133. spi_flash_disable_cache(cpuid, &s_flash_op_cache_state[cpuid]);
  134. spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
  135. }
  136. void IRAM_ATTR spi_flash_enable_interrupts_caches_and_other_cpu(void)
  137. {
  138. const uint32_t cpuid = xPortGetCoreID();
  139. const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
  140. #ifndef NDEBUG
  141. // Sanity check: flash operation ends on the same CPU as it has started
  142. assert(cpuid == s_flash_op_cpu);
  143. // More sanity check: if scheduler isn't started, only CPU0 can call this.
  144. assert(!(xTaskGetSchedulerState() == taskSCHEDULER_NOT_STARTED && cpuid != 0));
  145. s_flash_op_cpu = -1;
  146. #endif
  147. // Re-enable cache on both CPUs. After this, cache (flash and external RAM) should work again.
  148. spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
  149. spi_flash_restore_cache(other_cpuid, s_flash_op_cache_state[other_cpuid]);
  150. if (xTaskGetSchedulerState() != taskSCHEDULER_NOT_STARTED) {
  151. // Signal to spi_flash_op_block_task that flash operation is complete
  152. s_flash_op_complete = true;
  153. }
  154. // Re-enable non-iram interrupts
  155. esp_intr_noniram_enable();
  156. // Resume tasks on the current CPU, if the scheduler has started.
  157. // NOTE: enabling non-IRAM interrupts has to happen before this,
  158. // because once the scheduler has started, due to preemption the
  159. // current task can end up being moved to the other CPU.
  160. // But esp_intr_noniram_enable has to be called on the same CPU which
  161. // called esp_intr_noniram_disable
  162. if (xTaskGetSchedulerState() != taskSCHEDULER_NOT_STARTED) {
  163. xTaskResumeAll();
  164. }
  165. // Release API lock
  166. spi_flash_op_unlock();
  167. }
  168. void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)
  169. {
  170. const uint32_t cpuid = xPortGetCoreID();
  171. const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
  172. // do not care about other CPU, it was halted upon entering panic handler
  173. spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
  174. // Kill interrupts that aren't located in IRAM
  175. esp_intr_noniram_disable();
  176. // Disable cache on this CPU as well
  177. spi_flash_disable_cache(cpuid, &s_flash_op_cache_state[cpuid]);
  178. }
  179. void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
  180. {
  181. const uint32_t cpuid = xPortGetCoreID();
  182. // Re-enable cache on this CPU
  183. spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
  184. // Re-enable non-iram interrupts
  185. esp_intr_noniram_enable();
  186. }
  187. #else // CONFIG_FREERTOS_UNICORE
  188. void spi_flash_init_lock(void)
  189. {
  190. }
  191. void spi_flash_op_lock(void)
  192. {
  193. vTaskSuspendAll();
  194. }
  195. void spi_flash_op_unlock(void)
  196. {
  197. xTaskResumeAll();
  198. }
  199. void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu(void)
  200. {
  201. spi_flash_op_lock();
  202. esp_intr_noniram_disable();
  203. spi_flash_disable_cache(0, &s_flash_op_cache_state[0]);
  204. }
  205. void IRAM_ATTR spi_flash_enable_interrupts_caches_and_other_cpu(void)
  206. {
  207. spi_flash_restore_cache(0, s_flash_op_cache_state[0]);
  208. esp_intr_noniram_enable();
  209. spi_flash_op_unlock();
  210. }
  211. void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)
  212. {
  213. // Kill interrupts that aren't located in IRAM
  214. esp_intr_noniram_disable();
  215. // Disable cache on this CPU as well
  216. spi_flash_disable_cache(0, &s_flash_op_cache_state[0]);
  217. }
  218. void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
  219. {
  220. // Re-enable cache on this CPU
  221. spi_flash_restore_cache(0, s_flash_op_cache_state[0]);
  222. // Re-enable non-iram interrupts
  223. esp_intr_noniram_enable();
  224. }
  225. #endif // CONFIG_FREERTOS_UNICORE
  226. /**
  227. * The following two functions are replacements for Cache_Read_Disable and Cache_Read_Enable
  228. * function in ROM. They are used to work around a bug where Cache_Read_Disable requires a call to
  229. * Cache_Flush before Cache_Read_Enable, even if cached data was not modified.
  230. */
  231. #if CONFIG_IDF_TARGET_ESP32
  232. static const uint32_t cache_mask = DPORT_APP_CACHE_MASK_OPSDRAM | DPORT_APP_CACHE_MASK_DROM0 |
  233. DPORT_APP_CACHE_MASK_DRAM1 | DPORT_APP_CACHE_MASK_IROM0 |
  234. DPORT_APP_CACHE_MASK_IRAM1 | DPORT_APP_CACHE_MASK_IRAM0;
  235. #elif CONFIG_IDF_TARGET_ESP32S2BETA
  236. // static const uint32_t icache_mask = DPORT_PRO_ICACHE_MASK_DROM0 |DPORT_PRO_ICACHE_MASK_IROM0 |
  237. // DPORT_PRO_ICACHE_MASK_IRAM1 | DPORT_PRO_ICACHE_MASK_IRAM0;
  238. // static const uint32_t dcache_mask = DPORT_PRO_DCACHE_MASK_DRAM1 | DPORT_PRO_DCACHE_MASK_DRAM0 |
  239. // DPORT_PRO_DCACHE_MASK_DPORT | DPORT_PRO_DCACHE_MASK_DROM0;
  240. #endif
  241. static void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t* saved_state)
  242. {
  243. #if CONFIG_IDF_TARGET_ESP32
  244. uint32_t ret = 0;
  245. if (cpuid == 0) {
  246. ret |= DPORT_GET_PERI_REG_BITS2(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, 0);
  247. while (DPORT_GET_PERI_REG_BITS2(DPORT_PRO_DCACHE_DBUG0_REG, DPORT_PRO_CACHE_STATE, DPORT_PRO_CACHE_STATE_S) != 1) {
  248. ;
  249. }
  250. DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 0, DPORT_PRO_CACHE_ENABLE_S);
  251. }
  252. #if !CONFIG_FREERTOS_UNICORE
  253. else {
  254. ret |= DPORT_GET_PERI_REG_BITS2(DPORT_APP_CACHE_CTRL1_REG, cache_mask, 0);
  255. while (DPORT_GET_PERI_REG_BITS2(DPORT_APP_DCACHE_DBUG0_REG, DPORT_APP_CACHE_STATE, DPORT_APP_CACHE_STATE_S) != 1) {
  256. ;
  257. }
  258. DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 0, DPORT_APP_CACHE_ENABLE_S);
  259. }
  260. #endif
  261. *saved_state = ret;
  262. #elif CONFIG_IDF_TARGET_ESP32S2BETA
  263. *saved_state = Cache_Suspend_ICache();
  264. if (!Cache_Drom0_Using_ICache()) {
  265. *(saved_state + 1) = Cache_Suspend_DCache();
  266. }
  267. #endif
  268. }
  269. static void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state)
  270. {
  271. #if CONFIG_IDF_TARGET_ESP32
  272. if (cpuid == 0) {
  273. DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 1, DPORT_PRO_CACHE_ENABLE_S);
  274. DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
  275. }
  276. #if !CONFIG_FREERTOS_UNICORE
  277. else {
  278. DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 1, DPORT_APP_CACHE_ENABLE_S);
  279. DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
  280. }
  281. #endif
  282. #elif CONFIG_IDF_TARGET_ESP32S2BETA
  283. Cache_Resume_ICache(saved_state);
  284. if (!Cache_Drom0_Using_ICache()) {
  285. Cache_Resume_DCache(s_flash_op_cache_state[1]);
  286. }
  287. #endif
  288. }
  289. IRAM_ATTR bool spi_flash_cache_enabled(void)
  290. {
  291. #if CONFIG_IDF_TARGET_ESP32
  292. bool result = (DPORT_REG_GET_BIT(DPORT_PRO_CACHE_CTRL_REG, DPORT_PRO_CACHE_ENABLE) != 0);
  293. #elif CONFIG_IDF_TARGET_ESP32S2BETA
  294. bool result = (DPORT_REG_GET_BIT(DPORT_PRO_ICACHE_CTRL_REG, DPORT_PRO_ICACHE_ENABLE) != 0);
  295. if (!Cache_Drom0_Using_ICache()) {
  296. result = result && (DPORT_REG_GET_BIT(DPORT_PRO_DCACHE_CTRL_REG, DPORT_PRO_DCACHE_ENABLE) != 0);
  297. }
  298. #endif
  299. #if portNUM_PROCESSORS == 2
  300. result = result && (DPORT_REG_GET_BIT(DPORT_APP_CACHE_CTRL_REG, DPORT_APP_CACHE_ENABLE) != 0);
  301. #endif
  302. return result;
  303. }
  304. #if CONFIG_IDF_TARGET_ESP32S2BETA
  305. IRAM_ATTR void esp_config_instruction_cache_mode(void)
  306. {
  307. cache_size_t cache_size;
  308. cache_ways_t cache_ways;
  309. cache_line_size_t cache_line_size;
  310. #if CONFIG_ESP32S2_INSTRUCTION_CACHE_8KB
  311. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
  312. cache_size = CACHE_SIZE_8KB;
  313. #else
  314. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
  315. cache_size = CACHE_SIZE_16KB;
  316. #endif
  317. #if CONFIG_INSTRUCTION_CACHE_4WAYS
  318. cache_ways = CACHE_4WAYS_ASSOC;
  319. #else
  320. cache_ways = CACHE_8WAYS_ASSOC;
  321. #endif
  322. #if CONFIG_INSTRUCTION_CACHE_LINE_16B
  323. cache_line_size = CACHE_LINE_SIZE_16B;
  324. #elif CONFIG_INSTRUCTION_CACHE_LINE_32B
  325. cache_line_size = CACHE_LINE_SIZE_32B;
  326. #else
  327. cache_line_size = CACHE_LINE_SIZE_64B;
  328. #endif
  329. ESP_EARLY_LOGI(TAG, "Instruction cache \t: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_8KB ? 8 : 16,cache_ways == CACHE_4WAYS_ASSOC ? 4: 8, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : (cache_line_size == CACHE_LINE_SIZE_32B ? 2 : 64));
  330. Cache_Suspend_ICache();
  331. Cache_Set_ICache_Mode(cache_size, cache_ways, cache_line_size);
  332. Cache_Invalidate_ICache_All();
  333. Cache_Resume_ICache(0);
  334. }
  335. IRAM_ATTR void esp_config_data_cache_mode(void)
  336. {
  337. cache_size_t cache_size;
  338. cache_ways_t cache_ways;
  339. cache_line_size_t cache_line_size;
  340. #if CONFIG_ESP32S2_INSTRUCTION_CACHE_8KB
  341. #if CONFIG_ESP32S2_DATA_CACHE_8KB
  342. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
  343. cache_size = CACHE_SIZE_8KB;
  344. #else
  345. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_DCACHE_HIGH, CACHE_MEMORY_INVALID);
  346. cache_size = CACHE_SIZE_16KB;
  347. #endif
  348. #else
  349. #if CONFIG_ESP32S2_DATA_CACHE_8KB
  350. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_INVALID);
  351. cache_size = CACHE_SIZE_8KB;
  352. #else
  353. Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_DCACHE_HIGH);
  354. cache_size = CACHE_SIZE_16KB;
  355. #endif
  356. #endif
  357. #if CONFIG_ESP32S2_DATA_CACHE_4WAYS
  358. cache_ways = CACHE_4WAYS_ASSOC;
  359. #else
  360. cache_ways = CACHE_8WAYS_ASSOC;
  361. #endif
  362. #if CONFIG_ESP32S2_DATA_CACHE_LINE_16B
  363. cache_line_size = CACHE_LINE_SIZE_16B;
  364. #elif CONFIG_ESP32S2_DATA_CACHE_LINE_32B
  365. cache_line_size = CACHE_LINE_SIZE_32B;
  366. #else
  367. cache_line_size = CACHE_LINE_SIZE_64B;
  368. #endif
  369. ESP_EARLY_LOGI(TAG, "Data cache \t\t: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_8KB ? 8 : 16, cache_ways == CACHE_4WAYS_ASSOC ? 4: 8, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : (cache_line_size == CACHE_LINE_SIZE_32B ? 2 : 64));
  370. Cache_Set_DCache_Mode(cache_size, cache_ways, cache_line_size);
  371. Cache_Invalidate_DCache_All();
  372. }
  373. void esp_switch_rodata_to_dcache(void)
  374. {
  375. REG_CLR_BIT(DPORT_PRO_DCACHE_CTRL1_REG, DPORT_PRO_DCACHE_MASK_DROM0);
  376. Cache_Drom0_Source_DCache();
  377. MMU_Drom_ICache_Unmap();
  378. REG_SET_BIT(DPORT_PRO_ICACHE_CTRL1_REG, DPORT_PRO_ICACHE_MASK_DROM0);
  379. ESP_EARLY_LOGI(TAG, "Switch rodata load path to data cache.");
  380. }
  381. static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache, bool dcache)
  382. {
  383. uint32_t i_autoload, d_autoload;
  384. if (icache) {
  385. i_autoload = Cache_Suspend_ICache();
  386. }
  387. if (dcache) {
  388. d_autoload = Cache_Suspend_DCache();
  389. }
  390. REG_SET_BIT(DPORT_PRO_CACHE_WRAP_AROUND_CTRL_REG, DPORT_PRO_CACHE_FLASH_WRAP_AROUND);
  391. if (icache) {
  392. Cache_Resume_ICache(i_autoload);
  393. }
  394. if (dcache) {
  395. Cache_Resume_DCache(d_autoload);
  396. }
  397. }
  398. #if CONFIG_SPIRAM_SUPPORT
  399. static IRAM_ATTR void esp_enable_cache_spiram_wrap(bool icache, bool dcache)
  400. {
  401. uint32_t i_autoload, d_autoload;
  402. if (icache) {
  403. i_autoload = Cache_Suspend_ICache();
  404. }
  405. if (dcache) {
  406. d_autoload = Cache_Suspend_DCache();
  407. }
  408. REG_SET_BIT(DPORT_PRO_CACHE_WRAP_AROUND_CTRL_REG, DPORT_PRO_CACHE_SRAM_RD_WRAP_AROUND);
  409. if (icache) {
  410. Cache_Resume_ICache(i_autoload);
  411. }
  412. if (dcache) {
  413. Cache_Resume_DCache(d_autoload);
  414. }
  415. }
  416. #endif
  417. esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable, bool dcache_wrap_enable)
  418. {
  419. int icache_wrap_size = 0, dcache_wrap_size = 0;
  420. int flash_wrap_sizes[2]={-1, -1}, spiram_wrap_sizes[2]={-1, -1};
  421. int flash_wrap_size = 0, spiram_wrap_size = 0;
  422. int flash_count = 0, spiram_count = 0;
  423. int i;
  424. bool flash_spiram_wrap_together, flash_support_wrap = true, spiram_support_wrap = true;
  425. if (icache_wrap_enable) {
  426. #if CONFIG_INSTRUCTION_CACHE_LINE_16B
  427. icache_wrap_size = 16;
  428. #elif CONFIG_INSTRUCTION_CACHE_LINE_32B
  429. icache_wrap_size = 32;
  430. #else
  431. icache_wrap_size = 64;
  432. #endif
  433. }
  434. if (dcache_wrap_enable) {
  435. #if CONFIG_DATA_CACHE_LINE_16B
  436. dcache_wrap_size = 16;
  437. #elif CONFIG_DATA_CACHE_LINE_32B
  438. dcache_wrap_size = 32;
  439. #else
  440. dcache_wrap_size = 64;
  441. #endif
  442. }
  443. uint32_t instruction_use_spiram = 0;
  444. uint32_t rodata_use_spiram = 0;
  445. #if CONFIG_INSTRUCTION_USE_SPIRAM
  446. extern uint32_t esp_spiram_instruction_access_enabled();
  447. instruction_use_spiram = esp_spiram_instruction_access_enabled();
  448. #endif
  449. #if CONFIG_RODATA_USE_SPIRAM
  450. extern uint32_t esp_spiram_rodata_access_enabled();
  451. rodata_use_spiram = esp_spiram_rodata_access_enabled();
  452. #endif
  453. if (instruction_use_spiram) {
  454. spiram_wrap_sizes[0] = icache_wrap_size;
  455. } else {
  456. flash_wrap_sizes[0] = icache_wrap_size;
  457. }
  458. if (rodata_use_spiram) {
  459. if (Cache_Drom0_Using_ICache()) {
  460. spiram_wrap_sizes[0] = icache_wrap_size;
  461. } else {
  462. spiram_wrap_sizes[1] = dcache_wrap_size;
  463. }
  464. #ifdef CONFIG_EXT_RODATA_SUPPORT
  465. spiram_wrap_sizes[1] = dcache_wrap_size;
  466. #endif
  467. } else {
  468. if (Cache_Drom0_Using_ICache()) {
  469. flash_wrap_sizes[0] = icache_wrap_size;
  470. } else {
  471. flash_wrap_sizes[1] = dcache_wrap_size;
  472. }
  473. #ifdef CONFIG_EXT_RODATA_SUPPORT
  474. flash_wrap_sizes[1] = dcache_wrap_size;
  475. #endif
  476. }
  477. #ifdef CONFIG_SPIRAM_SUPPORT
  478. spiram_wrap_sizes[1] = dcache_wrap_size;
  479. #endif
  480. for (i = 0; i < 2; i++) {
  481. if (flash_wrap_sizes[i] != -1) {
  482. flash_count++;
  483. flash_wrap_size = flash_wrap_sizes[i];
  484. }
  485. }
  486. for (i = 0; i < 2; i++) {
  487. if (spiram_wrap_sizes[i] != -1) {
  488. spiram_count++;
  489. spiram_wrap_size = spiram_wrap_sizes[i];
  490. }
  491. }
  492. if (flash_count + spiram_count <= 2) {
  493. flash_spiram_wrap_together = false;
  494. } else {
  495. flash_spiram_wrap_together = true;
  496. }
  497. if (flash_count > 1 && flash_wrap_sizes[0] != flash_wrap_sizes[1]) {
  498. ESP_EARLY_LOGW(TAG, "Flash wrap with different length %d and %d, abort wrap.", flash_wrap_sizes[0], flash_wrap_sizes[1]);
  499. if (spiram_wrap_size == 0) {
  500. return ESP_FAIL;
  501. }
  502. if (flash_spiram_wrap_together) {
  503. ESP_EARLY_LOGE(TAG, "Abort spiram wrap because flash wrap length not fixed.");
  504. return ESP_FAIL;
  505. }
  506. }
  507. if (spiram_count > 1 && spiram_wrap_sizes[0] != spiram_wrap_sizes[1]) {
  508. ESP_EARLY_LOGW(TAG, "SPIRAM wrap with different length %d and %d, abort wrap.", spiram_wrap_sizes[0], spiram_wrap_sizes[1]);
  509. if (flash_wrap_size == 0) {
  510. return ESP_FAIL;
  511. }
  512. if (flash_spiram_wrap_together) {
  513. ESP_EARLY_LOGW(TAG, "Abort flash wrap because spiram wrap length not fixed.");
  514. return ESP_FAIL;
  515. }
  516. }
  517. if (flash_spiram_wrap_together && flash_wrap_size != spiram_wrap_size) {
  518. ESP_EARLY_LOGW(TAG, "SPIRAM has different wrap length with flash, %d and %d, abort wrap.", spiram_wrap_size, flash_wrap_size);
  519. return ESP_FAIL;
  520. }
  521. extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
  522. if (!spi_flash_support_wrap_size(flash_wrap_size)) {
  523. flash_support_wrap = false;
  524. ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
  525. }
  526. #ifdef CONFIG_SPIRAM_SUPPORT
  527. extern bool psram_support_wrap_size(uint32_t wrap_size);
  528. if (!psram_support_wrap_size(spiram_wrap_size)) {
  529. spiram_support_wrap = false;
  530. ESP_EARLY_LOGW(TAG, "SPIRAM do not support wrap size %d.", spiram_wrap_size);
  531. }
  532. #endif
  533. if (flash_spiram_wrap_together && !(flash_support_wrap && spiram_support_wrap)) {
  534. ESP_EARLY_LOGW(TAG, "Flash and SPIRAM should support wrap together.");
  535. return ESP_FAIL;
  536. }
  537. extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
  538. if (flash_support_wrap && flash_wrap_size > 0) {
  539. ESP_EARLY_LOGI(TAG, "Flash wrap enabled.");
  540. spi_flash_enable_wrap(flash_wrap_size);
  541. esp_enable_cache_flash_wrap((flash_wrap_sizes[0] > 0), (flash_wrap_sizes[1] > 0));
  542. }
  543. #if CONFIG_SPIRAM_SUPPORT
  544. extern esp_err_t psram_enable_wrap(uint32_t wrap_size);
  545. if (spiram_support_wrap && spiram_wrap_size > 0) {
  546. ESP_EARLY_LOGI(TAG, "SPIRAM wrap enabled.");
  547. psram_enable_wrap(spiram_wrap_size);
  548. esp_enable_cache_spiram_wrap((spiram_wrap_sizes[0] > 0), (spiram_wrap_sizes[1] > 0));
  549. }
  550. #endif
  551. return ESP_OK;
  552. }
  553. #endif