context_gcc.S 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. */
  10. #include "rtconfig.h"
  11. #include "asm-fpu.h"
  12. .text
  13. .weak rt_hw_cpu_id_set
  14. .type rt_hw_cpu_id_set, @function
  15. rt_hw_cpu_id_set:
  16. mrs x0, mpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  17. and x0, x0, #15
  18. msr tpidr_el1, x0
  19. ret
  20. /*
  21. int rt_hw_cpu_id(void)
  22. */
  23. .global rt_hw_cpu_id
  24. .type rt_hw_cpu_id, @function
  25. rt_hw_cpu_id:
  26. mrs x0, tpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  27. ret
  28. /*
  29. void rt_hw_set_process_id(size_t id)
  30. */
  31. .global rt_hw_set_process_id
  32. rt_hw_set_process_id:
  33. msr CONTEXTIDR_EL1, x0
  34. ret
  35. /*
  36. *enable gtimer
  37. */
  38. .globl rt_hw_gtimer_enable
  39. rt_hw_gtimer_enable:
  40. MOV X0,#1
  41. MSR CNTP_CTL_EL0,X0
  42. RET
  43. /*
  44. *set gtimer CNTP_TVAL_EL0 value
  45. */
  46. .globl rt_hw_set_gtimer_val
  47. rt_hw_set_gtimer_val:
  48. MSR CNTP_TVAL_EL0,X0
  49. RET
  50. /*
  51. *get gtimer CNTP_TVAL_EL0 value
  52. */
  53. .globl rt_hw_get_gtimer_val
  54. rt_hw_get_gtimer_val:
  55. MRS X0,CNTP_TVAL_EL0
  56. RET
  57. .globl rt_hw_get_cntpct_val
  58. rt_hw_get_cntpct_val:
  59. MRS X0, CNTPCT_EL0
  60. RET
  61. /*
  62. *get gtimer frq value
  63. */
  64. .globl rt_hw_get_gtimer_frq
  65. rt_hw_get_gtimer_frq:
  66. MRS X0,CNTFRQ_EL0
  67. RET
  68. .macro SAVE_CONTEXT
  69. /* Save the entire context. */
  70. SAVE_FPU SP
  71. STP X0, X1, [SP, #-0x10]!
  72. STP X2, X3, [SP, #-0x10]!
  73. STP X4, X5, [SP, #-0x10]!
  74. STP X6, X7, [SP, #-0x10]!
  75. STP X8, X9, [SP, #-0x10]!
  76. STP X10, X11, [SP, #-0x10]!
  77. STP X12, X13, [SP, #-0x10]!
  78. STP X14, X15, [SP, #-0x10]!
  79. STP X16, X17, [SP, #-0x10]!
  80. STP X18, X19, [SP, #-0x10]!
  81. STP X20, X21, [SP, #-0x10]!
  82. STP X22, X23, [SP, #-0x10]!
  83. STP X24, X25, [SP, #-0x10]!
  84. STP X26, X27, [SP, #-0x10]!
  85. STP X28, X29, [SP, #-0x10]!
  86. MRS X28, FPCR
  87. MRS X29, FPSR
  88. STP X28, X29, [SP, #-0x10]!
  89. MRS X29, SP_EL0
  90. STP X29, X30, [SP, #-0x10]!
  91. MRS X3, SPSR_EL1
  92. MRS X2, ELR_EL1
  93. STP X2, X3, [SP, #-0x10]!
  94. MOV X0, SP /* Move SP into X0 for saving. */
  95. .endm
  96. .macro SAVE_CONTEXT_FROM_EL1
  97. /* Save the entire context. */
  98. SAVE_FPU SP
  99. STP X0, X1, [SP, #-0x10]!
  100. STP X2, X3, [SP, #-0x10]!
  101. STP X4, X5, [SP, #-0x10]!
  102. STP X6, X7, [SP, #-0x10]!
  103. STP X8, X9, [SP, #-0x10]!
  104. STP X10, X11, [SP, #-0x10]!
  105. STP X12, X13, [SP, #-0x10]!
  106. STP X14, X15, [SP, #-0x10]!
  107. STP X16, X17, [SP, #-0x10]!
  108. STP X18, X19, [SP, #-0x10]!
  109. STP X20, X21, [SP, #-0x10]!
  110. STP X22, X23, [SP, #-0x10]!
  111. STP X24, X25, [SP, #-0x10]!
  112. STP X26, X27, [SP, #-0x10]!
  113. STP X28, X29, [SP, #-0x10]!
  114. MRS X28, FPCR
  115. MRS X29, FPSR
  116. STP X28, X29, [SP, #-0x10]!
  117. MRS X29, SP_EL0
  118. STP X29, X30, [SP, #-0x10]!
  119. MOV X19, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
  120. MOV X18, X30
  121. STP X18, X19, [SP, #-0x10]!
  122. .endm
  123. #ifdef RT_USING_SMP
  124. .macro RESTORE_CONTEXT
  125. /* Set the SP to point to the stack of the task being restored. */
  126. MOV SP, X0
  127. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  128. TST X3, #0x1f
  129. MSR SPSR_EL1, X3
  130. MSR ELR_EL1, X2
  131. LDP X29, X30, [SP], #0x10
  132. MSR SP_EL0, X29
  133. LDP X28, X29, [SP], #0x10
  134. MSR FPCR, X28
  135. MSR FPSR, X29
  136. LDP X28, X29, [SP], #0x10
  137. LDP X26, X27, [SP], #0x10
  138. LDP X24, X25, [SP], #0x10
  139. LDP X22, X23, [SP], #0x10
  140. LDP X20, X21, [SP], #0x10
  141. LDP X18, X19, [SP], #0x10
  142. LDP X16, X17, [SP], #0x10
  143. LDP X14, X15, [SP], #0x10
  144. LDP X12, X13, [SP], #0x10
  145. LDP X10, X11, [SP], #0x10
  146. LDP X8, X9, [SP], #0x10
  147. LDP X6, X7, [SP], #0x10
  148. LDP X4, X5, [SP], #0x10
  149. LDP X2, X3, [SP], #0x10
  150. LDP X0, X1, [SP], #0x10
  151. RESTORE_FPU SP
  152. #ifdef RT_USING_LWP
  153. BEQ arch_ret_to_user
  154. #endif
  155. ERET
  156. .endm
  157. #else
  158. .macro RESTORE_CONTEXT
  159. /* Set the SP to point to the stack of the task being restored. */
  160. MOV SP, X0
  161. #ifdef RT_USING_LWP
  162. BL rt_thread_self
  163. MOV X19, X0
  164. BL lwp_aspace_switch
  165. MOV X0, X19
  166. BL lwp_user_setting_restore
  167. #endif
  168. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  169. TST X3, #0x1f
  170. MSR SPSR_EL1, X3
  171. MSR ELR_EL1, X2
  172. LDP X29, X30, [SP], #0x10
  173. MSR SP_EL0, X29
  174. LDP X28, X29, [SP], #0x10
  175. MSR FPCR, X28
  176. MSR FPSR, X29
  177. LDP X28, X29, [SP], #0x10
  178. LDP X26, X27, [SP], #0x10
  179. LDP X24, X25, [SP], #0x10
  180. LDP X22, X23, [SP], #0x10
  181. LDP X20, X21, [SP], #0x10
  182. LDP X18, X19, [SP], #0x10
  183. LDP X16, X17, [SP], #0x10
  184. LDP X14, X15, [SP], #0x10
  185. LDP X12, X13, [SP], #0x10
  186. LDP X10, X11, [SP], #0x10
  187. LDP X8, X9, [SP], #0x10
  188. LDP X6, X7, [SP], #0x10
  189. LDP X4, X5, [SP], #0x10
  190. LDP X2, X3, [SP], #0x10
  191. LDP X0, X1, [SP], #0x10
  192. RESTORE_FPU SP
  193. #ifdef RT_USING_LWP
  194. BEQ arch_ret_to_user
  195. #endif
  196. ERET
  197. .endm
  198. #endif
  199. .macro RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  200. /* the SP is already ok */
  201. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  202. TST X3, #0x1f
  203. MSR SPSR_EL1, X3
  204. MSR ELR_EL1, X2
  205. LDP X29, X30, [SP], #0x10
  206. MSR SP_EL0, X29
  207. LDP X28, X29, [SP], #0x10
  208. MSR FPCR, X28
  209. MSR FPSR, X29
  210. LDP X28, X29, [SP], #0x10
  211. LDP X26, X27, [SP], #0x10
  212. LDP X24, X25, [SP], #0x10
  213. LDP X22, X23, [SP], #0x10
  214. LDP X20, X21, [SP], #0x10
  215. LDP X18, X19, [SP], #0x10
  216. LDP X16, X17, [SP], #0x10
  217. LDP X14, X15, [SP], #0x10
  218. LDP X12, X13, [SP], #0x10
  219. LDP X10, X11, [SP], #0x10
  220. LDP X8, X9, [SP], #0x10
  221. LDP X6, X7, [SP], #0x10
  222. LDP X4, X5, [SP], #0x10
  223. LDP X2, X3, [SP], #0x10
  224. LDP X0, X1, [SP], #0x10
  225. RESTORE_FPU SP
  226. #ifdef RT_USING_LWP
  227. BEQ arch_ret_to_user
  228. #endif
  229. ERET
  230. .endm
  231. #ifdef RT_USING_SMP
  232. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  233. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  234. #endif
  235. .text
  236. /*
  237. * rt_base_t rt_hw_interrupt_disable();
  238. */
  239. .globl rt_hw_interrupt_disable
  240. rt_hw_interrupt_disable:
  241. MRS X0, DAIF
  242. MSR DAIFSet, #3
  243. DSB SY
  244. RET
  245. /*
  246. * void rt_hw_interrupt_enable(rt_base_t level);
  247. */
  248. .globl rt_hw_interrupt_enable
  249. rt_hw_interrupt_enable:
  250. DSB SY
  251. AND X0, X0, #0xc0
  252. MRS X1, DAIF
  253. BIC X1, X1, #0xc0
  254. ORR X0, X0, X1
  255. MSR DAIF, X0
  256. RET
  257. .text
  258. #ifdef RT_USING_SMP
  259. /*
  260. * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
  261. * X0 --> to (thread stack)
  262. * X1 --> to_thread
  263. */
  264. .globl rt_hw_context_switch_to
  265. rt_hw_context_switch_to:
  266. LDR X0, [X0]
  267. MOV SP, X0
  268. MOV X0, X1
  269. BL rt_cpus_lock_status_restore
  270. #ifdef RT_USING_LWP
  271. BL rt_thread_self
  272. BL lwp_user_setting_restore
  273. #endif
  274. B rt_hw_context_switch_exit
  275. /*
  276. * void rt_hw_context_switch(rt_uint32 from, rt_uint32
  277. to, struct rt_thread *to_thread);
  278. * X0 --> from (from_thread stack)
  279. * X1 --> to (to_thread stack)
  280. * X2 --> to_thread
  281. */
  282. .globl rt_hw_context_switch
  283. rt_hw_context_switch:
  284. SAVE_CONTEXT_FROM_EL1
  285. MOV X3, SP
  286. STR X3, [X0] // store sp in preempted tasks TCB
  287. LDR X0, [X1] // get new task stack pointer
  288. MOV SP, X0
  289. MOV X0, X2
  290. BL rt_cpus_lock_status_restore
  291. #ifdef RT_USING_LWP
  292. BL rt_thread_self
  293. BL lwp_user_setting_restore
  294. #endif
  295. B rt_hw_context_switch_exit
  296. /*
  297. * void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
  298. * X0 :interrupt context
  299. * X1 :addr of from_thread's sp
  300. * X2 :addr of to_thread's sp
  301. * X3 :to_thread's tcb
  302. */
  303. .globl rt_hw_context_switch_interrupt
  304. rt_hw_context_switch_interrupt:
  305. STP X0, X1, [SP, #-0x10]!
  306. STP X2, X3, [SP, #-0x10]!
  307. STP X29, X30, [SP, #-0x10]!
  308. #ifdef RT_USING_LWP
  309. BL rt_thread_self
  310. BL lwp_user_setting_save
  311. #endif
  312. LDP X29, X30, [SP], #0x10
  313. LDP X2, X3, [SP], #0x10
  314. LDP X0, X1, [SP], #0x10
  315. STR X0, [X1]
  316. LDR X0, [X2]
  317. MOV SP, X0
  318. MOV X0, X3
  319. MOV X19, X0
  320. BL rt_cpus_lock_status_restore
  321. MOV X0, X19
  322. #ifdef RT_USING_LWP
  323. BL lwp_user_setting_restore
  324. #endif
  325. B rt_hw_context_switch_exit
  326. .globl vector_fiq
  327. vector_fiq:
  328. B .
  329. .globl vector_irq
  330. vector_irq:
  331. CLREX
  332. SAVE_CONTEXT
  333. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  334. BL rt_interrupt_enter
  335. BL rt_hw_trap_irq
  336. BL rt_interrupt_leave
  337. LDP X0, X1, [SP], #0x10
  338. BL rt_scheduler_do_irq_switch
  339. B rt_hw_context_switch_exit
  340. .global rt_hw_context_switch_exit
  341. rt_hw_context_switch_exit:
  342. MOV X0, SP
  343. RESTORE_CONTEXT
  344. #else
  345. /*
  346. * void rt_hw_context_switch_to(rt_ubase_t to);
  347. * X0 --> to sp
  348. */
  349. .globl rt_hw_context_switch_to
  350. rt_hw_context_switch_to:
  351. LDR X0, [X0]
  352. RESTORE_CONTEXT
  353. /*
  354. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  355. * X0 --> from sp
  356. * X1 --> to sp
  357. * X2 --> to thread
  358. */
  359. .globl rt_hw_context_switch
  360. rt_hw_context_switch:
  361. SAVE_CONTEXT_FROM_EL1
  362. MOV X2, SP
  363. STR X2, [X0] // store sp in preempted tasks TCB
  364. LDR X0, [X1] // get new task stack pointer
  365. RESTORE_CONTEXT
  366. /*
  367. * void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
  368. */
  369. .globl rt_thread_switch_interrupt_flag
  370. .globl rt_interrupt_from_thread
  371. .globl rt_interrupt_to_thread
  372. .globl rt_hw_context_switch_interrupt
  373. rt_hw_context_switch_interrupt:
  374. LDR X6, =rt_thread_switch_interrupt_flag
  375. LDR X7, [X6]
  376. CMP X7, #1
  377. B.EQ _reswitch
  378. LDR X4, =rt_interrupt_from_thread // set rt_interrupt_from_thread
  379. STR X0, [X4]
  380. MOV X7, #1 // set rt_thread_switch_interrupt_flag to 1
  381. STR X7, [X6]
  382. STP X1, X30, [SP, #-0x10]!
  383. #ifdef RT_USING_LWP
  384. MOV X0, X2
  385. BL lwp_user_setting_save
  386. #endif
  387. LDP X1, X30, [SP], #0x10
  388. _reswitch:
  389. LDR X6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
  390. STR X1, [X6]
  391. RET
  392. .text
  393. // -- Exception handlers ----------------------------------
  394. .align 8
  395. .globl vector_fiq
  396. vector_fiq:
  397. SAVE_CONTEXT
  398. STP X0, X1, [SP, #-0x10]!
  399. BL rt_hw_trap_fiq
  400. LDP X0, X1, [SP], #0x10
  401. RESTORE_CONTEXT
  402. .globl rt_interrupt_enter
  403. .globl rt_interrupt_leave
  404. .globl rt_thread_switch_interrupt_flag
  405. .globl rt_interrupt_from_thread
  406. .globl rt_interrupt_to_thread
  407. // -------------------------------------------------------------------
  408. .align 8
  409. .globl vector_irq
  410. vector_irq:
  411. SAVE_CONTEXT
  412. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  413. BL rt_interrupt_enter
  414. BL rt_hw_trap_irq
  415. BL rt_interrupt_leave
  416. LDP X0, X1, [SP], #0x10
  417. // if rt_thread_switch_interrupt_flag set, jump to
  418. // rt_hw_context_switch_interrupt_do and don't return
  419. LDR X1, =rt_thread_switch_interrupt_flag
  420. LDR X2, [X1]
  421. CMP X2, #1
  422. B.NE vector_irq_exit
  423. MOV X2, #0 // clear flag
  424. STR X2, [X1]
  425. LDR X3, =rt_interrupt_from_thread
  426. LDR X4, [X3]
  427. STR x0, [X4] // store sp in preempted tasks's TCB
  428. LDR x3, =rt_interrupt_to_thread
  429. LDR X4, [X3]
  430. LDR x0, [X4] // get new task's stack pointer
  431. RESTORE_CONTEXT
  432. vector_irq_exit:
  433. MOV SP, X0
  434. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  435. #endif
  436. // -------------------------------------------------
  437. .globl vector_exception
  438. vector_exception:
  439. SAVE_CONTEXT
  440. STP X0, X1, [SP, #-0x10]!
  441. BL rt_hw_trap_exception
  442. LDP X0, X1, [SP], #0x10
  443. MOV SP, X0
  444. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  445. .globl vector_serror
  446. vector_serror:
  447. SAVE_CONTEXT
  448. STP X0, X1, [SP, #-0x10]!
  449. BL rt_hw_trap_serror
  450. b .