sleep_cpu_asm.S 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247
  1. /*
  2. * SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. #include "soc/soc.h"
  7. #include "riscv/rvsleep-frames.h"
  8. #include "soc/soc_caps.h"
  9. #include "sdkconfig.h"
  10. #if !CONFIG_IDF_TARGET_ESP32C6 && !CONFIG_IDF_TARGET_ESP32H2
  11. #include "soc/lp_aon_reg.h"
  12. #include "soc/extmem_reg.h"
  13. #endif
  14. .section .data1,"aw"
  15. .global rv_core_critical_regs_frame
  16. .type rv_core_critical_regs_frame,@object
  17. .align 4
  18. rv_core_critical_regs_frame:
  19. .word 0
  20. /*
  21. --------------------------------------------------------------------------------
  22. This assembly subroutine is used to save the critical registers of the CPU
  23. core to the internal RAM before sleep, and modify the PMU control flag to
  24. indicate that the system needs to sleep. When the subroutine returns, it
  25. will return the memory pointer that saves the context information of the CPU
  26. critical registers.
  27. --------------------------------------------------------------------------------
  28. */
  29. .section .iram1,"ax"
  30. .global rv_core_critical_regs_save
  31. .type rv_core_critical_regs_save,@function
  32. .align 4
  33. rv_core_critical_regs_save:
  34. /* arrived here in critical section. we need:
  35. save riscv core critical registers to RvCoreCriticalSleepFrame
  36. */
  37. csrw mscratch, t0 /* use mscratch as temp storage */
  38. la t0, rv_core_critical_regs_frame
  39. lw t0, 0(t0) /* t0 pointer to RvCoreCriticalSleepFrame object */
  40. sw ra, RV_SLP_CTX_RA(t0)
  41. sw sp, RV_SLP_CTX_SP(t0)
  42. sw gp, RV_SLP_CTX_GP(t0)
  43. sw tp, RV_SLP_CTX_TP(t0)
  44. sw t1, RV_SLP_CTX_T1(t0)
  45. sw t2, RV_SLP_CTX_T2(t0)
  46. sw s0, RV_SLP_CTX_S0(t0)
  47. sw s1, RV_SLP_CTX_S1(t0)
  48. /* a0 is caller saved, so it does not need to be saved, but it should be the
  49. pointer value of RvCoreCriticalSleepFrame for return.
  50. */
  51. mv a0, t0
  52. sw a0, RV_SLP_CTX_A0(t0)
  53. sw a1, RV_SLP_CTX_A1(t0)
  54. sw a2, RV_SLP_CTX_A2(t0)
  55. sw a3, RV_SLP_CTX_A3(t0)
  56. sw a4, RV_SLP_CTX_A4(t0)
  57. sw a5, RV_SLP_CTX_A5(t0)
  58. sw a6, RV_SLP_CTX_A6(t0)
  59. sw a7, RV_SLP_CTX_A7(t0)
  60. sw s2, RV_SLP_CTX_S2(t0)
  61. sw s3, RV_SLP_CTX_S3(t0)
  62. sw s4, RV_SLP_CTX_S4(t0)
  63. sw s5, RV_SLP_CTX_S5(t0)
  64. sw s6, RV_SLP_CTX_S6(t0)
  65. sw s7, RV_SLP_CTX_S7(t0)
  66. sw s8, RV_SLP_CTX_S8(t0)
  67. sw s9, RV_SLP_CTX_S9(t0)
  68. sw s10, RV_SLP_CTX_S10(t0)
  69. sw s11, RV_SLP_CTX_S11(t0)
  70. sw t3, RV_SLP_CTX_T3(t0)
  71. sw t4, RV_SLP_CTX_T4(t0)
  72. sw t5, RV_SLP_CTX_T5(t0)
  73. sw t6, RV_SLP_CTX_T6(t0)
  74. csrr t1, mstatus
  75. sw t1, RV_SLP_CTX_MSTATUS(t0)
  76. csrr t2, mtvec
  77. sw t2, RV_SLP_CTX_MTVEC(t0)
  78. csrr t3, mcause
  79. sw t3, RV_SLP_CTX_MCAUSE(t0)
  80. csrr t1, mtval
  81. sw t1, RV_SLP_CTX_MTVAL(t0)
  82. csrr t2, mie
  83. sw t2, RV_SLP_CTX_MIE(t0)
  84. csrr t3, mip
  85. sw t3, RV_SLP_CTX_MIP(t0)
  86. csrr t1, mepc
  87. sw t1, RV_SLP_CTX_MEPC(t0)
  88. /*
  89. !!! Let idf knows it's going to sleep !!!
  90. RV_SLP_STK_PMUFUNC field is used to identify whether it is going to sleep or
  91. has just been awakened. We use the lowest 2 bits as indication information,
  92. 3 means being awakened, 1 means going to sleep.
  93. */
  94. li t1, ~0x3
  95. lw t2, RV_SLP_CTX_PMUFUNC(t0)
  96. and t2, t1, t2
  97. ori t2, t2, 0x1
  98. sw t2, RV_SLP_CTX_PMUFUNC(t0)
  99. mv t3, t0
  100. csrr t0, mscratch
  101. sw t0, RV_SLP_CTX_T0(t3)
  102. #if !CONFIG_IDF_TARGET_ESP32C6 && !CONFIG_IDF_TARGET_ESP32H2
  103. /* writeback dcache is required here!!! */
  104. la t0, EXTMEM_CACHE_SYNC_MAP_REG
  105. li t1, 0x10
  106. sw t1, 0x0(t0) /* set EXTMEM_CACHE_SYNC_MAP_REG bit 4 */
  107. la t2, EXTMEM_CACHE_SYNC_ADDR_REG
  108. sw zero, 0x0(t2) /* clear EXTMEM_CACHE_SYNC_ADDR_REG */
  109. la t0, EXTMEM_CACHE_SYNC_SIZE_REG
  110. sw zero, 0x0(t0) /* clear EXTMEM_CACHE_SYNC_SIZE_REG */
  111. la t1, EXTMEM_CACHE_SYNC_CTRL_REG
  112. lw t2, 0x0(t1)
  113. ori t2, t2, 0x4
  114. sw t2, 0x0(t1)
  115. li t0, 0x10 /* SYNC_DONE bit */
  116. wait_sync_done:
  117. lw t2, 0x0(t1)
  118. and t2, t0, t2
  119. beqz t2, wait_sync_done
  120. #endif
  121. lw t0, RV_SLP_CTX_T0(t3)
  122. lw t1, RV_SLP_CTX_T1(t3)
  123. lw t2, RV_SLP_CTX_T2(t3)
  124. lw t3, RV_SLP_CTX_T3(t3)
  125. ret
  126. .size rv_core_critical_regs_save, . - rv_core_critical_regs_save
  127. #define CSR_PCER_U 0x800
  128. #define CSR_PCMR_U 0x801
  129. #define PCER_CYCLES (1<<0) /* count clock cycles */
  130. #define PCMR_GLOBAL_EN (1<<0) /* enable count */
  131. #define pcer CSR_PCER_U
  132. #define pcmr CSR_PCMR_U
  133. /*
  134. --------------------------------------------------------------------------------
  135. This assembly subroutine is used to restore the CPU core critical register
  136. context before sleep after system wakes up, modify the PMU control
  137. information, and return the critical register context memory object pointer.
  138. After the subroutine returns, continue to restore other modules of the
  139. system.
  140. --------------------------------------------------------------------------------
  141. */
  142. .section .iram1,"ax"
  143. .global rv_core_critical_regs_restore
  144. .weak rv_core_critical_regs_restore
  145. .type rv_core_critical_regs_restore,@function
  146. .global _rv_core_critical_regs_restore
  147. .type _rv_core_critical_regs_restore,@function
  148. .align 4
  149. _rv_core_critical_regs_restore: /* export a strong symbol to jump to here, used
  150. * for a static callback */
  151. nop
  152. rv_core_critical_regs_restore:
  153. la t0, rv_core_critical_regs_frame
  154. lw t0, 0(t0) /* t0 pointer to RvCoreCriticalSleepFrame object */
  155. beqz t0, .skip_restore /* make sure we do not jump to zero address */
  156. /*
  157. !!! Let idf knows it's sleep awake. !!!
  158. RV_SLP_STK_PMUFUNC field is used to identify whether it is going to sleep or
  159. has just been awakened. We use the lowest 2 bits as indication information,
  160. 3 means being awakened, 1 means going to sleep.
  161. */
  162. lw t1, RV_SLP_CTX_PMUFUNC(t0)
  163. ori t1, t1, 0x3
  164. sw t1, RV_SLP_CTX_PMUFUNC(t0)
  165. lw t2, RV_SLP_CTX_MEPC(t0)
  166. csrw mepc, t2
  167. lw t3, RV_SLP_CTX_MIP(t0)
  168. csrw mip, t3
  169. lw t1, RV_SLP_CTX_MIE(t0)
  170. csrw mie, t1
  171. lw t2, RV_SLP_CTX_MSTATUS(t0)
  172. csrw mstatus, t2
  173. lw t3, RV_SLP_CTX_MTVEC(t0)
  174. csrw mtvec, t3
  175. lw t1, RV_SLP_CTX_MCAUSE(t0)
  176. csrw mcause, t1
  177. lw t2, RV_SLP_CTX_MTVAL(t0)
  178. csrw mtval, t2
  179. lw t6, RV_SLP_CTX_T6(t0)
  180. lw t5, RV_SLP_CTX_T5(t0)
  181. lw t4, RV_SLP_CTX_T4(t0)
  182. lw t3, RV_SLP_CTX_T3(t0)
  183. lw s11, RV_SLP_CTX_S11(t0)
  184. lw s10, RV_SLP_CTX_S10(t0)
  185. lw s9, RV_SLP_CTX_S9(t0)
  186. lw s8, RV_SLP_CTX_S8(t0)
  187. lw s7, RV_SLP_CTX_S7(t0)
  188. lw s6, RV_SLP_CTX_S6(t0)
  189. lw s5, RV_SLP_CTX_S5(t0)
  190. lw s4, RV_SLP_CTX_S4(t0)
  191. lw s3, RV_SLP_CTX_S3(t0)
  192. lw s2, RV_SLP_CTX_S2(t0)
  193. lw a7, RV_SLP_CTX_A7(t0)
  194. lw a6, RV_SLP_CTX_A6(t0)
  195. lw a5, RV_SLP_CTX_A5(t0)
  196. lw a4, RV_SLP_CTX_A4(t0)
  197. lw a3, RV_SLP_CTX_A3(t0)
  198. lw a2, RV_SLP_CTX_A2(t0)
  199. lw a1, RV_SLP_CTX_A1(t0)
  200. lw a0, RV_SLP_CTX_A0(t0)
  201. lw s1, RV_SLP_CTX_S1(t0)
  202. lw s0, RV_SLP_CTX_S0(t0)
  203. lw t2, RV_SLP_CTX_T2(t0)
  204. lw t1, RV_SLP_CTX_T1(t0)
  205. lw tp, RV_SLP_CTX_TP(t0)
  206. lw gp, RV_SLP_CTX_GP(t0)
  207. lw sp, RV_SLP_CTX_SP(t0)
  208. lw ra, RV_SLP_CTX_RA(t0)
  209. lw t0, RV_SLP_CTX_T0(t0)
  210. .skip_restore:
  211. ret
  212. .size rv_core_critical_regs_restore, . - rv_core_critical_regs_restore