stackframe.h 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-02-02 lizhirui first version
  9. * 2021-02-11 lizhirui fixed gp save/store bug
  10. * 2021-11-18 JasonHu add fpu registers save/restore
  11. */
  12. #ifndef __STACKFRAME_H__
  13. #define __STACKFRAME_H__
  14. #include "cpuport.h"
  15. #include "riscv_csr.h"
  16. #ifdef ARCH_RISCV_FPU
  17. #define FPU_CTX_F0_OFF (__riscv_flen >> 5) * 0 /* offsetof(fpu_context_t, fpustatus.f[0]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  18. #define FPU_CTX_F1_OFF (__riscv_flen >> 5) * 4 /* offsetof(fpu_context_t, fpustatus.f[1]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  19. #define FPU_CTX_F2_OFF (__riscv_flen >> 5) * 8 /* offsetof(fpu_context_t, fpustatus.f[2]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  20. #define FPU_CTX_F3_OFF (__riscv_flen >> 5) * 12 /* offsetof(fpu_context_t, fpustatus.f[3]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  21. #define FPU_CTX_F4_OFF (__riscv_flen >> 5) * 16 /* offsetof(fpu_context_t, fpustatus.f[4]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  22. #define FPU_CTX_F5_OFF (__riscv_flen >> 5) * 20 /* offsetof(fpu_context_t, fpustatus.f[5]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  23. #define FPU_CTX_F6_OFF (__riscv_flen >> 5) * 24 /* offsetof(fpu_context_t, fpustatus.f[6]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  24. #define FPU_CTX_F7_OFF (__riscv_flen >> 5) * 28 /* offsetof(fpu_context_t, fpustatus.f[7]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  25. #define FPU_CTX_F8_OFF (__riscv_flen >> 5) * 32 /* offsetof(fpu_context_t, fpustatus.f[8]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  26. #define FPU_CTX_F9_OFF (__riscv_flen >> 5) * 36 /* offsetof(fpu_context_t, fpustatus.f[9]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  27. #define FPU_CTX_F10_OFF (__riscv_flen >> 5) * 40 /* offsetof(fpu_context_t, fpustatus.f[10]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  28. #define FPU_CTX_F11_OFF (__riscv_flen >> 5) * 44 /* offsetof(fpu_context_t, fpustatus.f[11]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  29. #define FPU_CTX_F12_OFF (__riscv_flen >> 5) * 48 /* offsetof(fpu_context_t, fpustatus.f[12]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  30. #define FPU_CTX_F13_OFF (__riscv_flen >> 5) * 52 /* offsetof(fpu_context_t, fpustatus.f[13]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  31. #define FPU_CTX_F14_OFF (__riscv_flen >> 5) * 56 /* offsetof(fpu_context_t, fpustatus.f[14]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  32. #define FPU_CTX_F15_OFF (__riscv_flen >> 5) * 60 /* offsetof(fpu_context_t, fpustatus.f[15]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  33. #define FPU_CTX_F16_OFF (__riscv_flen >> 5) * 64 /* offsetof(fpu_context_t, fpustatus.f[16]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  34. #define FPU_CTX_F17_OFF (__riscv_flen >> 5) * 68 /* offsetof(fpu_context_t, fpustatus.f[17]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  35. #define FPU_CTX_F18_OFF (__riscv_flen >> 5) * 72 /* offsetof(fpu_context_t, fpustatus.f[18]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  36. #define FPU_CTX_F19_OFF (__riscv_flen >> 5) * 76 /* offsetof(fpu_context_t, fpustatus.f[19]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  37. #define FPU_CTX_F20_OFF (__riscv_flen >> 5) * 80 /* offsetof(fpu_context_t, fpustatus.f[20]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  38. #define FPU_CTX_F21_OFF (__riscv_flen >> 5) * 84 /* offsetof(fpu_context_t, fpustatus.f[21]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  39. #define FPU_CTX_F22_OFF (__riscv_flen >> 5) * 88 /* offsetof(fpu_context_t, fpustatus.f[22]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  40. #define FPU_CTX_F23_OFF (__riscv_flen >> 5) * 92 /* offsetof(fpu_context_t, fpustatus.f[23]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  41. #define FPU_CTX_F24_OFF (__riscv_flen >> 5) * 96 /* offsetof(fpu_context_t, fpustatus.f[24]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  42. #define FPU_CTX_F25_OFF (__riscv_flen >> 5) * 100 /* offsetof(fpu_context_t, fpustatus.f[25]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  43. #define FPU_CTX_F26_OFF (__riscv_flen >> 5) * 104 /* offsetof(fpu_context_t, fpustatus.f[26]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  44. #define FPU_CTX_F27_OFF (__riscv_flen >> 5) * 108 /* offsetof(fpu_context_t, fpustatus.f[27]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  45. #define FPU_CTX_F28_OFF (__riscv_flen >> 5) * 112 /* offsetof(fpu_context_t, fpustatus.f[28]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  46. #define FPU_CTX_F29_OFF (__riscv_flen >> 5) * 116 /* offsetof(fpu_context_t, fpustatus.f[29]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  47. #define FPU_CTX_F30_OFF (__riscv_flen >> 5) * 120 /* offsetof(fpu_context_t, fpustatus.f[30]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  48. #define FPU_CTX_F31_OFF (__riscv_flen >> 5) * 124 /* offsetof(fpu_context_t, fpustatus.f[31]) - offsetof(fpu_context_t, fpustatus.f[0]) */
  49. #endif /* ARCH_RISCV_FPU */
  50. .macro SAVE_ALL
  51. /* save general registers */
  52. STORE x2, (-CTX_GENERAL_REG_NR * REGBYTES + 1 * REGBYTES)(sp) // need store x2(sp) to stack first.
  53. addi sp, sp, -CTX_GENERAL_REG_NR * REGBYTES
  54. STORE x1, 0 * REGBYTES(sp)
  55. STORE x3, 2 * REGBYTES(sp)
  56. STORE x4, 3 * REGBYTES(sp)
  57. STORE x5, 4 * REGBYTES(sp)
  58. STORE x6, 5 * REGBYTES(sp)
  59. STORE x7, 6 * REGBYTES(sp)
  60. STORE x8, 7 * REGBYTES(sp)
  61. STORE x9, 8 * REGBYTES(sp)
  62. STORE x10, 9 * REGBYTES(sp)
  63. STORE x11, 10 * REGBYTES(sp)
  64. STORE x12, 11 * REGBYTES(sp)
  65. STORE x13, 12 * REGBYTES(sp)
  66. STORE x14, 13 * REGBYTES(sp)
  67. STORE x15, 14 * REGBYTES(sp)
  68. #ifndef __riscv_32e
  69. STORE x16, 15 * REGBYTES(sp)
  70. STORE x17, 16 * REGBYTES(sp)
  71. STORE x18, 17 * REGBYTES(sp)
  72. STORE x19, 18 * REGBYTES(sp)
  73. STORE x20, 19 * REGBYTES(sp)
  74. STORE x21, 20 * REGBYTES(sp)
  75. STORE x22, 21 * REGBYTES(sp)
  76. STORE x23, 22 * REGBYTES(sp)
  77. STORE x24, 23 * REGBYTES(sp)
  78. STORE x25, 24 * REGBYTES(sp)
  79. STORE x26, 25 * REGBYTES(sp)
  80. STORE x27, 26 * REGBYTES(sp)
  81. STORE x28, 27 * REGBYTES(sp)
  82. STORE x29, 28 * REGBYTES(sp)
  83. STORE x30, 29 * REGBYTES(sp)
  84. STORE x31, 30 * REGBYTES(sp)
  85. csrr x1, mepc
  86. STORE x1, 31 * REGBYTES(sp)
  87. csrr t3, mstatus
  88. STORE t3, 32 * REGBYTES(sp)
  89. #else
  90. csrr x1, mepc
  91. STORE x1, 15 * REGBYTES(sp)
  92. csrr x1, mstatus
  93. STORE x1, 16 * REGBYTES(sp)
  94. #endif
  95. #if defined(__riscv_dsp) && defined(ARCH_RISCV_DSP)
  96. addi sp, sp, -CTX_DSP_CSR_REG_NR * REGBYTES
  97. csrr t0, vxsat
  98. STORE t0, 0(sp)
  99. #endif
  100. #if defined(__riscv_flen) && defined(ARCH_RISCV_FPU)
  101. #if CONFIG_CHECK_FPU_DIRTY
  102. li t1, SR_FS_DIRTY
  103. and t4, t3, t1
  104. bne t4, t1, 1f
  105. #endif /* CONFIG_CHECK_FPU_DIRTY */
  106. /* save fcsr registers */
  107. addi sp, sp, -(CTX_FPU_CSR_REG_NR) * REGBYTES
  108. frcsr t0
  109. STORE t0, (0)(sp)
  110. /* save fpu registers*/
  111. addi sp, sp, -(CTX_FPU_REG_NR) * FREGBYTES
  112. FSTORE f0, FPU_CTX_F0_OFF(sp)
  113. FSTORE f1, FPU_CTX_F1_OFF(sp)
  114. FSTORE f2, FPU_CTX_F2_OFF(sp)
  115. FSTORE f3, FPU_CTX_F3_OFF(sp)
  116. FSTORE f4, FPU_CTX_F4_OFF(sp)
  117. FSTORE f5, FPU_CTX_F5_OFF(sp)
  118. FSTORE f6, FPU_CTX_F6_OFF(sp)
  119. FSTORE f7, FPU_CTX_F7_OFF(sp)
  120. FSTORE f8, FPU_CTX_F8_OFF(sp)
  121. FSTORE f9, FPU_CTX_F9_OFF(sp)
  122. FSTORE f10, FPU_CTX_F10_OFF(sp)
  123. FSTORE f11, FPU_CTX_F11_OFF(sp)
  124. FSTORE f12, FPU_CTX_F12_OFF(sp)
  125. FSTORE f13, FPU_CTX_F13_OFF(sp)
  126. FSTORE f14, FPU_CTX_F14_OFF(sp)
  127. FSTORE f15, FPU_CTX_F15_OFF(sp)
  128. FSTORE f16, FPU_CTX_F16_OFF(sp)
  129. FSTORE f17, FPU_CTX_F17_OFF(sp)
  130. FSTORE f18, FPU_CTX_F18_OFF(sp)
  131. FSTORE f19, FPU_CTX_F19_OFF(sp)
  132. FSTORE f20, FPU_CTX_F20_OFF(sp)
  133. FSTORE f21, FPU_CTX_F21_OFF(sp)
  134. FSTORE f22, FPU_CTX_F22_OFF(sp)
  135. FSTORE f23, FPU_CTX_F23_OFF(sp)
  136. FSTORE f24, FPU_CTX_F24_OFF(sp)
  137. FSTORE f25, FPU_CTX_F25_OFF(sp)
  138. FSTORE f26, FPU_CTX_F26_OFF(sp)
  139. FSTORE f27, FPU_CTX_F27_OFF(sp)
  140. FSTORE f28, FPU_CTX_F28_OFF(sp)
  141. FSTORE f29, FPU_CTX_F29_OFF(sp)
  142. FSTORE f30, FPU_CTX_F30_OFF(sp)
  143. FSTORE f31, FPU_CTX_F31_OFF(sp)
  144. #if CONFIG_CHECK_FPU_DIRTY
  145. j 2f
  146. 1:
  147. /* don't store, move sp only */
  148. addi sp, sp, -(CTX_FPU_CSR_REG_NR * REGBYTES + CTX_FPU_REG_NR * FREGBYTES)
  149. 2:
  150. #endif /* CONFIG_CHECK_FPU_DIRTY */
  151. #endif /* __riscv_flen && ARCH_RISCV_FPU */
  152. #if defined(__riscv_vector) && defined(ARCH_RISCV_VECTOR)
  153. #if CONFIG_CHECK_VECTOR_DIRTY
  154. /* check if VS filed of MSTATUS is 'dirty' */
  155. li t1, SR_VS_DIRTY
  156. and t4, t3, t1
  157. bne t4, t1, 3f
  158. #endif
  159. /* save vector csr registers */
  160. addi sp, sp, -(CTX_VECTOR_CSR_REG_NR) * REGBYTES
  161. csrr t0, vl
  162. STORE t0, (0)(sp)
  163. csrr t0, vtype
  164. STORE t0, (1 * REGBYTES)(sp)
  165. csrr t0, vstart
  166. STORE t0, (2 * REGBYTES)(sp)
  167. csrr t0, vxsat
  168. STORE t0, (3 * REGBYTES)(sp)
  169. csrr t0, vxrm
  170. STORE t0, (4 * REGBYTES)(sp)
  171. /* save vector registers */
  172. csrr t0, vlenb
  173. li t1, CTX_VECTOR_REG_NR
  174. mul t2, t0, t1
  175. sub sp, sp, t2
  176. slli t0, t0, 3
  177. mv t1, sp
  178. #if (__riscv_v == 7000)
  179. vsetvli zero, zero, e8, m8
  180. vsb.v v0, (t1)
  181. add t1, t1, t0
  182. vsb.v v8, (t1)
  183. add t1, t1, t0
  184. vsb.v v16, (t1)
  185. add t1, t1, t0
  186. vsb.v v24, (t1)
  187. #elif (__riscv_v == 1000000)
  188. vsetvli zero, zero, e8, m8, ta, ma
  189. vs8r.v v0, (t1)
  190. add t1, t1, t0
  191. vs8r.v v8, (t1)
  192. add t1, t1, t0
  193. vs8r.v v16, (t1)
  194. add t1, t1, t0
  195. vs8r.v v24, (t1)
  196. #endif
  197. #if CONFIG_CHECK_VECTOR_DIRTY
  198. j 4f
  199. 3:
  200. /* don't save, move sp only */
  201. addi sp, sp, -(CTX_VECTOR_CSR_REG_NR) * REGBYTES
  202. csrr t0, vlenb
  203. li t1, CTX_VECTOR_REG_NR
  204. mul t2, t0, t1
  205. sub sp, sp, t2
  206. 4:
  207. #endif /* CONFIG_CHECK_VECTOR_DIRTY */
  208. #endif /* __riscv_vector && ARCH_RISCV_VECTOR */
  209. #if (defined(__riscv_matrix) || defined(__riscv_xtheadmatrix)) && defined(ARCH_RISCV_MATRIX)
  210. #if CONFIG_CHECK_MATRIX_DIRTY
  211. /* if not dirty, skip */
  212. li t1, SR_MS_DIRTY
  213. and t4, t3, t1
  214. bne t4, t1, 5f
  215. #endif
  216. /* save matrix csr registers */
  217. addi sp, sp, -(CTX_MATRIX_CSR_REG_NR) * REGBYTES
  218. csrr t0, xmrstart
  219. STORE t0, (0)(sp)
  220. csrr t0, xmcsr
  221. STORE t0, (1 * REGBYTES)(sp)
  222. csrr t0, xmsize
  223. STORE t0, (2 * REGBYTES)(sp)
  224. /* save matrix registers */
  225. csrr t0, xmlenb
  226. slli t1, t0, 3
  227. sub sp, sp, t1
  228. csrw xmrstart, x0
  229. mst8mb m0, (sp)
  230. #if CONFIG_CHECK_MATRIX_DIRTY
  231. j 6f
  232. 5:
  233. addi sp, sp, -(CTX_MATRIX_CSR_REG_NR) * REGBYTES
  234. csrr t0, xmlenb
  235. slli t1, t0, 3
  236. sub sp, sp, t1
  237. 6:
  238. #endif /* CONFIG_CHECK_MATRIX_DIRTY */
  239. #endif /* __riscv_matrix || __riscv_xtheadmatrix */
  240. .endm
  241. #if CONFIG_CHECK_FPU_DIRTY || CONFIG_CHECK_VECTOR_DIRTY || CONFIG_CHECK_MATRIX_DIRTY
  242. .macro RESTORE_MSTATUS
  243. li t1, 0
  244. #if (defined(__riscv_matrix) || defined(__riscv_xtheadmatrix)) && defined(ARCH_RISCV_MATRIX) /* matrix registers */
  245. addi t1, t1, (CTX_MATRIX_CSR_REG_NR) * REGBYTES
  246. csrr t0, xmlenb
  247. slli t0, t0, 3
  248. add t1, t1, t0
  249. #endif /* __riscv_matrix || __riscv_xtheadmatrix */
  250. #if defined(__riscv_vector) && defined(ARCH_RISCV_VECTOR)
  251. addi t1, t1, (CTX_VECTOR_CSR_REG_NR) * REGBYTES
  252. csrr t0, vlenb
  253. li t2, CTX_VECTOR_REG_NR
  254. mul t2, t0, t2
  255. add t1, t1, t2
  256. #endif
  257. #if defined(__riscv_flen) && defined(ARCH_RISCV_FPU)
  258. addi t1, t1, (CTX_FPU_CSR_REG_NR * REGBYTES + CTX_FPU_REG_NR * FREGBYTES)
  259. #endif
  260. /* general regs */
  261. addi t1, t1, (CTX_GENERAL_REG_NR - 1) * REGBYTES
  262. /* restore mstatus */
  263. add sp, sp, t1
  264. LOAD t3, (0)(sp)
  265. csrw mstatus, t3
  266. sub sp, sp, t1
  267. .endm
  268. #endif /* CONFIG_CHECK_FPU_DIRTY || CONFIG_CHECK_VECTOR_DIRTY */
  269. .macro RESTORE_ALL
  270. #if CONFIG_CHECK_FPU_DIRTY || CONFIG_CHECK_VECTOR_DIRTY || CONFIG_CHECK_MATRIX_DIRTY
  271. RESTORE_MSTATUS
  272. #endif /* CONFIG_CHECK_FPU_DIRTY || CONFIG_CHECK_VECTOR_DIRTY || CONFIG_CHECK_MATRIX_DIRTY */
  273. #if (defined(__riscv_matrix) || defined(__riscv_xtheadmatrix)) && defined(ARCH_RISCV_MATRIX)
  274. #if CONFIG_CHECK_MATRIX_DIRTY
  275. /* if not dirty, skip */
  276. li t1, SR_MS_DIRTY
  277. and t4, t3, t1
  278. bne t4, t1, 1f
  279. #endif
  280. /* restore matrix registers */
  281. csrr t0, xmlenb
  282. slli t1, t0, 3
  283. csrw xmrstart, x0
  284. mst8mb m0, (sp)
  285. add sp, sp, t1
  286. /* restore matrix csr registers */
  287. csrr t0, xmrstart
  288. LOAD t0, (0)(sp)
  289. csrr t0, xmcsr
  290. LOAD t0, (1 * REGBYTES)(sp)
  291. csrr t0, xmsize
  292. LOAD t0, (2 * REGBYTES)(sp)
  293. addi sp, sp, (CTX_MATRIX_CSR_REG_NR) * REGBYTES
  294. #if CONFIG_CHECK_MATRIX_DIRTY
  295. j 2f
  296. 1:
  297. /* don't restore, move sp only */
  298. csrr t0, xmlenb
  299. slli t1, t0, 3
  300. add sp, sp, t1
  301. addi sp, sp, (CTX_MATRIX_CSR_REG_NR) * REGBYTES
  302. 2:
  303. #endif
  304. #endif /* __riscv_matrix || __riscv_xtheadmatrix */
  305. #if defined(__riscv_vector) && defined(ARCH_RISCV_VECTOR)
  306. #if CONFIG_CHECK_VECTOR_DIRTY
  307. /* restore mstatus first */
  308. /* check if VS filed of MSTATUS is 'dirty' */
  309. li t1, SR_VS_DIRTY
  310. and t4, t3, t1
  311. bne t4, t1, 3f
  312. #endif /* CONFIG_CHECK_VECTOR_DIRTY */
  313. /* restore vector registers */
  314. csrr t0, vlenb
  315. slli t0, t0, 3
  316. #if (__riscv_v == 7000)
  317. vsetvli zero, zero, e8, m8
  318. vlb.v v0, (sp)
  319. add sp, sp, t0
  320. vlb.v v8, (sp)
  321. add sp, sp, t0
  322. vlb.v v16, (sp)
  323. add sp, sp, t0
  324. vlb.v v24, (sp)
  325. add sp, sp, t0
  326. #elif (__riscv_v == 1000000)
  327. vsetvli zero, zero, e8, m8, ta, ma
  328. vl8r.v v0, (sp)
  329. add sp, sp, t0
  330. vl8r.v v8, (sp)
  331. add sp, sp, t0
  332. vl8r.v v16, (sp)
  333. add sp, sp, t0
  334. vl8r.v v24, (sp)
  335. add sp, sp, t0
  336. #endif
  337. /* restore vector csr registers */
  338. LOAD t0, (0)(sp)
  339. LOAD t1, (1 * REGBYTES)(sp)
  340. LOAD t2, (2 * REGBYTES)(sp)
  341. vsetvl zero, t0, t1
  342. csrw vstart, t2
  343. LOAD t2, (3 * REGBYTES)(sp)
  344. csrw vxsat, t2
  345. LOAD t2, (4 * REGBYTES)(sp)
  346. csrw vxrm, t2
  347. addi sp, sp, (CTX_VECTOR_CSR_REG_NR) * REGBYTES
  348. #if CONFIG_CHECK_VECTOR_DIRTY
  349. j 4f
  350. 3:
  351. csrr t0, vlenb
  352. li t1, CTX_VECTOR_REG_NR
  353. mul t2, t0, t1
  354. add sp, sp, t2
  355. addi sp, sp, (CTX_VECTOR_CSR_REG_NR) * REGBYTES
  356. 4:
  357. #endif /* CONFIG_CHECK_VECTOR_DIRTY */
  358. #endif /* __riscv_vector && ARCH_RISCV_VECTOR */
  359. #if defined(__riscv_flen) && defined(ARCH_RISCV_FPU)
  360. #if CONFIG_CHECK_FPU_DIRTY
  361. /* check if FS filed of MSTATUS is 'dirty' */
  362. li t1, SR_FS_DIRTY
  363. and t4, t3, t1
  364. bne t4, t1, 5f
  365. #endif
  366. /* restore float register */
  367. FLOAD f0, FPU_CTX_F0_OFF(sp)
  368. FLOAD f1, FPU_CTX_F1_OFF(sp)
  369. FLOAD f2, FPU_CTX_F2_OFF(sp)
  370. FLOAD f3, FPU_CTX_F3_OFF(sp)
  371. FLOAD f4, FPU_CTX_F4_OFF(sp)
  372. FLOAD f5, FPU_CTX_F5_OFF(sp)
  373. FLOAD f6, FPU_CTX_F6_OFF(sp)
  374. FLOAD f7, FPU_CTX_F7_OFF(sp)
  375. FLOAD f8, FPU_CTX_F8_OFF(sp)
  376. FLOAD f9, FPU_CTX_F9_OFF(sp)
  377. FLOAD f10,FPU_CTX_F10_OFF(sp)
  378. FLOAD f11,FPU_CTX_F11_OFF(sp)
  379. FLOAD f12,FPU_CTX_F12_OFF(sp)
  380. FLOAD f13,FPU_CTX_F13_OFF(sp)
  381. FLOAD f14,FPU_CTX_F14_OFF(sp)
  382. FLOAD f15,FPU_CTX_F15_OFF(sp)
  383. FLOAD f16,FPU_CTX_F16_OFF(sp)
  384. FLOAD f17,FPU_CTX_F17_OFF(sp)
  385. FLOAD f18,FPU_CTX_F18_OFF(sp)
  386. FLOAD f19,FPU_CTX_F19_OFF(sp)
  387. FLOAD f20,FPU_CTX_F20_OFF(sp)
  388. FLOAD f21,FPU_CTX_F21_OFF(sp)
  389. FLOAD f22,FPU_CTX_F22_OFF(sp)
  390. FLOAD f23,FPU_CTX_F23_OFF(sp)
  391. FLOAD f24,FPU_CTX_F24_OFF(sp)
  392. FLOAD f25,FPU_CTX_F25_OFF(sp)
  393. FLOAD f26,FPU_CTX_F26_OFF(sp)
  394. FLOAD f27,FPU_CTX_F27_OFF(sp)
  395. FLOAD f28,FPU_CTX_F28_OFF(sp)
  396. FLOAD f29,FPU_CTX_F29_OFF(sp)
  397. FLOAD f30,FPU_CTX_F30_OFF(sp)
  398. FLOAD f31,FPU_CTX_F31_OFF(sp)
  399. addi sp, sp, CTX_FPU_REG_NR * FREGBYTES
  400. /* restore fcsr registers */
  401. LOAD t0, 0(sp)
  402. fscsr t0
  403. addi sp, sp, CTX_FPU_CSR_REG_NR * REGBYTES
  404. #if CONFIG_CHECK_FPU_DIRTY
  405. j 6f
  406. 5:
  407. addi sp, sp, CTX_FPU_REG_NR * FREGBYTES
  408. addi sp, sp, CTX_FPU_CSR_REG_NR * REGBYTES
  409. 6:
  410. #endif /* CONFIG_CHECK_FPU_DIRTY */
  411. #endif /* __riscv_flen && ARCH_RISCV_FPU */
  412. #if defined(__riscv_dsp) && defined(ARCH_RISCV_DSP)
  413. LOAD t0, 0(sp)
  414. csrw vxsat, t0
  415. addi sp, sp, CTX_DSP_CSR_REG_NR * REGBYTES
  416. #endif
  417. /* restore general registers */
  418. #ifndef __riscv_32e
  419. LOAD x1, 31 * REGBYTES(sp)
  420. #else
  421. LOAD x1, 15 * REGBYTES(sp)
  422. #endif
  423. csrw mepc, x1
  424. #if (!CONFIG_CHECK_FPU_DIRTY) && (!CONFIG_CHECK_VECTOR_DIRTY)
  425. #ifndef __riscv_32e
  426. LOAD x1, 32 * REGBYTES(sp)
  427. #else
  428. LOAD x1, 16 * REGBYTES(sp)
  429. #endif
  430. csrw mstatus, x1
  431. #endif
  432. LOAD x1, 0 * REGBYTES(sp)
  433. LOAD x3, 2 * REGBYTES(sp)
  434. LOAD x4, 3 * REGBYTES(sp)
  435. LOAD x5, 4 * REGBYTES(sp)
  436. LOAD x6, 5 * REGBYTES(sp)
  437. LOAD x7, 6 * REGBYTES(sp)
  438. LOAD x8, 7 * REGBYTES(sp)
  439. LOAD x9, 8 * REGBYTES(sp)
  440. LOAD x10, 9 * REGBYTES(sp)
  441. LOAD x11, 10 * REGBYTES(sp)
  442. LOAD x12, 11 * REGBYTES(sp)
  443. LOAD x13, 12 * REGBYTES(sp)
  444. LOAD x14, 13 * REGBYTES(sp)
  445. LOAD x15, 14 * REGBYTES(sp)
  446. #ifndef __riscv_32e
  447. LOAD x16, 15 * REGBYTES(sp)
  448. LOAD x17, 16 * REGBYTES(sp)
  449. LOAD x18, 17 * REGBYTES(sp)
  450. LOAD x19, 18 * REGBYTES(sp)
  451. LOAD x20, 19 * REGBYTES(sp)
  452. LOAD x21, 20 * REGBYTES(sp)
  453. LOAD x22, 21 * REGBYTES(sp)
  454. LOAD x23, 22 * REGBYTES(sp)
  455. LOAD x24, 23 * REGBYTES(sp)
  456. LOAD x25, 24 * REGBYTES(sp)
  457. LOAD x26, 25 * REGBYTES(sp)
  458. LOAD x27, 26 * REGBYTES(sp)
  459. LOAD x28, 27 * REGBYTES(sp)
  460. LOAD x29, 28 * REGBYTES(sp)
  461. LOAD x30, 29 * REGBYTES(sp)
  462. LOAD x31, 30 * REGBYTES(sp)
  463. #endif
  464. addi sp, sp, CTX_GENERAL_REG_NR * REGBYTES
  465. .endm
  466. .macro RESTORE_SYS_GP
  467. .option push
  468. .option norelax
  469. la gp, __global_pointer$
  470. .option pop
  471. .endm
  472. .macro OPEN_INTERRUPT
  473. csrsi mstatus, 8
  474. .endm
  475. .macro CLOSE_INTERRUPT
  476. csrci mstatus, 8
  477. .endm
  478. #endif