context_gcc.S 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605
  1. /*
  2. * Copyright (c) 2006-2024, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. * 2023-06-24 WangXiaoyao Support backtrace for user thread
  10. * 2024-01-06 Shell Fix barrier on irq_disable/enable
  11. */
  12. #ifndef __ASSEMBLY__
  13. #define __ASSEMBLY__
  14. #endif
  15. #include "rtconfig.h"
  16. #include "asm-generic.h"
  17. #include "asm-fpu.h"
  18. #include "armv8.h"
  19. #ifndef RT_USING_SMP
  20. .section .data
  21. rt_interrupt_from_thread: .zero 8
  22. rt_interrupt_to_thread: .zero 8
  23. rt_thread_switch_interrupt_flag: .zero 8
  24. #endif
  25. .text
  26. .weak rt_hw_cpu_id_set
  27. .type rt_hw_cpu_id_set, @function
  28. rt_hw_cpu_id_set:
  29. mrs x0, mpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  30. #ifdef ARCH_ARM_CORTEX_A55
  31. lsr x0, x0, #8
  32. #endif
  33. and x0, x0, #15
  34. msr tpidr_el1, x0
  35. ret
  36. /*
  37. int rt_hw_cpu_id(void)
  38. */
  39. .weak rt_hw_cpu_id
  40. .type rt_hw_cpu_id, @function
  41. rt_hw_cpu_id:
  42. mrs x0, tpidr_el1
  43. ret
  44. /*
  45. void rt_hw_set_process_id(size_t id)
  46. */
  47. .global rt_hw_set_process_id
  48. rt_hw_set_process_id:
  49. msr CONTEXTIDR_EL1, x0
  50. ret
  51. /*
  52. *enable gtimer
  53. */
  54. .globl rt_hw_gtimer_enable
  55. rt_hw_gtimer_enable:
  56. MOV X0,#1
  57. MSR CNTP_CTL_EL0,X0
  58. RET
  59. /*
  60. *set gtimer CNTP_TVAL_EL0 value
  61. */
  62. .globl rt_hw_set_gtimer_val
  63. rt_hw_set_gtimer_val:
  64. MSR CNTP_TVAL_EL0,X0
  65. RET
  66. /*
  67. *get gtimer CNTP_TVAL_EL0 value
  68. */
  69. .globl rt_hw_get_gtimer_val
  70. rt_hw_get_gtimer_val:
  71. MRS X0,CNTP_TVAL_EL0
  72. RET
  73. .globl rt_hw_get_cntpct_val
  74. rt_hw_get_cntpct_val:
  75. MRS X0, CNTPCT_EL0
  76. RET
  77. /*
  78. *get gtimer frq value
  79. */
  80. .globl rt_hw_get_gtimer_frq
  81. rt_hw_get_gtimer_frq:
  82. MRS X0,CNTFRQ_EL0
  83. RET
  84. START_POINT(_thread_start)
  85. blr x19
  86. mov x29, #0
  87. blr x20
  88. b . /* never here */
  89. START_POINT_END(_thread_start)
  90. .macro SAVE_CONTEXT
  91. /* Save the entire context. */
  92. SAVE_FPU SP
  93. STP X0, X1, [SP, #-0x10]!
  94. STP X2, X3, [SP, #-0x10]!
  95. STP X4, X5, [SP, #-0x10]!
  96. STP X6, X7, [SP, #-0x10]!
  97. STP X8, X9, [SP, #-0x10]!
  98. STP X10, X11, [SP, #-0x10]!
  99. STP X12, X13, [SP, #-0x10]!
  100. STP X14, X15, [SP, #-0x10]!
  101. STP X16, X17, [SP, #-0x10]!
  102. STP X18, X19, [SP, #-0x10]!
  103. STP X20, X21, [SP, #-0x10]!
  104. STP X22, X23, [SP, #-0x10]!
  105. STP X24, X25, [SP, #-0x10]!
  106. STP X26, X27, [SP, #-0x10]!
  107. STP X28, X29, [SP, #-0x10]!
  108. MRS X28, FPCR
  109. MRS X29, FPSR
  110. STP X28, X29, [SP, #-0x10]!
  111. MRS X29, SP_EL0
  112. STP X29, X30, [SP, #-0x10]!
  113. MRS X3, SPSR_EL1
  114. MRS X2, ELR_EL1
  115. STP X2, X3, [SP, #-0x10]!
  116. MOV X0, SP /* Move SP into X0 for saving. */
  117. .endm
  118. .macro SAVE_CONTEXT_FROM_EL1
  119. /* Save the entire context. */
  120. SAVE_FPU SP
  121. STP X0, X1, [SP, #-0x10]!
  122. STP X2, X3, [SP, #-0x10]!
  123. STP X4, X5, [SP, #-0x10]!
  124. STP X6, X7, [SP, #-0x10]!
  125. STP X8, X9, [SP, #-0x10]!
  126. STP X10, X11, [SP, #-0x10]!
  127. STP X12, X13, [SP, #-0x10]!
  128. STP X14, X15, [SP, #-0x10]!
  129. STP X16, X17, [SP, #-0x10]!
  130. STP X18, X19, [SP, #-0x10]!
  131. STP X20, X21, [SP, #-0x10]!
  132. STP X22, X23, [SP, #-0x10]!
  133. STP X24, X25, [SP, #-0x10]!
  134. STP X26, X27, [SP, #-0x10]!
  135. STP X28, X29, [SP, #-0x10]!
  136. MRS X28, FPCR
  137. MRS X29, FPSR
  138. STP X28, X29, [SP, #-0x10]!
  139. MRS X29, SP_EL0
  140. STP X29, X30, [SP, #-0x10]!
  141. MOV X19, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
  142. MOV X18, X30
  143. STP X18, X19, [SP, #-0x10]!
  144. .endm
  145. #ifdef RT_USING_SMP
  146. .macro RESTORE_CONTEXT
  147. /* Set the SP to point to the stack of the task being restored. */
  148. MOV SP, X0
  149. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  150. TST X3, #0x1f
  151. MSR SPSR_EL1, X3
  152. MSR ELR_EL1, X2
  153. LDP X29, X30, [SP], #0x10
  154. MSR SP_EL0, X29
  155. LDP X28, X29, [SP], #0x10
  156. MSR FPCR, X28
  157. MSR FPSR, X29
  158. LDP X28, X29, [SP], #0x10
  159. LDP X26, X27, [SP], #0x10
  160. LDP X24, X25, [SP], #0x10
  161. LDP X22, X23, [SP], #0x10
  162. LDP X20, X21, [SP], #0x10
  163. LDP X18, X19, [SP], #0x10
  164. LDP X16, X17, [SP], #0x10
  165. LDP X14, X15, [SP], #0x10
  166. LDP X12, X13, [SP], #0x10
  167. LDP X10, X11, [SP], #0x10
  168. LDP X8, X9, [SP], #0x10
  169. LDP X6, X7, [SP], #0x10
  170. LDP X4, X5, [SP], #0x10
  171. LDP X2, X3, [SP], #0x10
  172. LDP X0, X1, [SP], #0x10
  173. RESTORE_FPU SP
  174. #ifdef RT_USING_LWP
  175. BEQ arch_ret_to_user
  176. #endif
  177. ERET
  178. .endm
  179. #else
  180. .macro RESTORE_CONTEXT
  181. /* Set the SP to point to the stack of the task being restored. */
  182. MOV SP, X0
  183. #ifdef RT_USING_LWP
  184. BL rt_thread_self
  185. MOV X19, X0
  186. BL lwp_aspace_switch
  187. MOV X0, X19
  188. BL lwp_user_setting_restore
  189. #endif
  190. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  191. TST X3, #0x1f
  192. MSR SPSR_EL1, X3
  193. MSR ELR_EL1, X2
  194. LDP X29, X30, [SP], #0x10
  195. MSR SP_EL0, X29
  196. LDP X28, X29, [SP], #0x10
  197. MSR FPCR, X28
  198. MSR FPSR, X29
  199. LDP X28, X29, [SP], #0x10
  200. LDP X26, X27, [SP], #0x10
  201. LDP X24, X25, [SP], #0x10
  202. LDP X22, X23, [SP], #0x10
  203. LDP X20, X21, [SP], #0x10
  204. LDP X18, X19, [SP], #0x10
  205. LDP X16, X17, [SP], #0x10
  206. LDP X14, X15, [SP], #0x10
  207. LDP X12, X13, [SP], #0x10
  208. LDP X10, X11, [SP], #0x10
  209. LDP X8, X9, [SP], #0x10
  210. LDP X6, X7, [SP], #0x10
  211. LDP X4, X5, [SP], #0x10
  212. LDP X2, X3, [SP], #0x10
  213. LDP X0, X1, [SP], #0x10
  214. RESTORE_FPU SP
  215. #ifdef RT_USING_LWP
  216. BEQ arch_ret_to_user
  217. #endif
  218. ERET
  219. .endm
  220. #endif
  221. .macro RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  222. /* the SP is already ok */
  223. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  224. TST X3, #0x1f
  225. MSR SPSR_EL1, X3
  226. MSR ELR_EL1, X2
  227. LDP X29, X30, [SP], #0x10
  228. MSR SP_EL0, X29
  229. LDP X28, X29, [SP], #0x10
  230. MSR FPCR, X28
  231. MSR FPSR, X29
  232. LDP X28, X29, [SP], #0x10
  233. LDP X26, X27, [SP], #0x10
  234. LDP X24, X25, [SP], #0x10
  235. LDP X22, X23, [SP], #0x10
  236. LDP X20, X21, [SP], #0x10
  237. LDP X18, X19, [SP], #0x10
  238. LDP X16, X17, [SP], #0x10
  239. LDP X14, X15, [SP], #0x10
  240. LDP X12, X13, [SP], #0x10
  241. LDP X10, X11, [SP], #0x10
  242. LDP X8, X9, [SP], #0x10
  243. LDP X6, X7, [SP], #0x10
  244. LDP X4, X5, [SP], #0x10
  245. LDP X2, X3, [SP], #0x10
  246. LDP X0, X1, [SP], #0x10
  247. RESTORE_FPU SP
  248. #ifdef RT_USING_LWP
  249. BEQ arch_ret_to_user
  250. #endif
  251. ERET
  252. .endm
  253. .macro SAVE_USER_CTX
  254. MRS X1, SPSR_EL1
  255. AND X1, X1, 0xf
  256. CMP X1, XZR
  257. BNE 1f
  258. BL lwp_uthread_ctx_save
  259. LDP X0, X1, [SP]
  260. 1:
  261. .endm
  262. .macro RESTORE_USER_CTX, ctx
  263. LDR X1, [\ctx, #CONTEXT_OFFSET_SPSR_EL1]
  264. AND X1, X1, 0x1f
  265. CMP X1, XZR
  266. BNE 1f
  267. BL lwp_uthread_ctx_restore
  268. 1:
  269. .endm
  270. #ifdef RT_USING_SMP
  271. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  272. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  273. #endif
  274. .text
  275. .global rt_hw_interrupt_is_disabled
  276. rt_hw_interrupt_is_disabled:
  277. MRS X0, DAIF
  278. TST X0, #0xc0
  279. CSET X0, NE
  280. RET
  281. /*
  282. * rt_base_t rt_hw_interrupt_disable();
  283. */
  284. .globl rt_hw_interrupt_disable
  285. rt_hw_interrupt_disable:
  286. MRS X0, DAIF
  287. AND X0, X0, #0xc0
  288. CMP X0, #0xc0
  289. /* branch if bits not both set(zero) */
  290. BNE 1f
  291. RET
  292. 1:
  293. MSR DAIFSet, #3
  294. DSB NSH
  295. ISB
  296. RET
  297. /*
  298. * void rt_hw_interrupt_enable(rt_base_t level);
  299. */
  300. .globl rt_hw_interrupt_enable
  301. rt_hw_interrupt_enable:
  302. AND X0, X0, #0xc0
  303. CMP X0, #0xc0
  304. /* branch if one of the bits not set(zero) */
  305. BNE 1f
  306. RET
  307. 1:
  308. ISB
  309. DSB NSH
  310. AND X0, X0, #0xc0
  311. MRS X1, DAIF
  312. BIC X1, X1, #0xc0
  313. ORR X0, X0, X1
  314. MSR DAIF, X0
  315. RET
  316. .text
  317. #ifdef RT_USING_SMP
  318. /*
  319. * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
  320. * X0 --> to (thread stack)
  321. * X1 --> to_thread
  322. */
  323. .globl rt_hw_context_switch_to
  324. rt_hw_context_switch_to:
  325. LDR X0, [X0]
  326. MOV SP, X0
  327. MOV X0, X1
  328. BL rt_cpus_lock_status_restore
  329. #ifdef RT_USING_LWP
  330. BL rt_thread_self
  331. BL lwp_user_setting_restore
  332. #endif
  333. B rt_hw_context_switch_exit
  334. /*
  335. * void rt_hw_context_switch(rt_uint32 from, rt_uint32
  336. to, struct rt_thread *to_thread);
  337. * X0 --> from (from_thread stack)
  338. * X1 --> to (to_thread stack)
  339. * X2 --> to_thread
  340. */
  341. .globl rt_hw_context_switch
  342. rt_hw_context_switch:
  343. SAVE_CONTEXT_FROM_EL1
  344. MOV X3, SP
  345. STR X3, [X0] // store sp in preempted tasks TCB
  346. LDR X0, [X1] // get new task stack pointer
  347. MOV SP, X0
  348. MOV X0, X2
  349. BL rt_cpus_lock_status_restore
  350. #ifdef RT_USING_LWP
  351. BL rt_thread_self
  352. BL lwp_user_setting_restore
  353. #endif
  354. B rt_hw_context_switch_exit
  355. /*
  356. * void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
  357. * X0 :interrupt context
  358. * X1 :addr of from_thread's sp
  359. * X2 :addr of to_thread's sp
  360. * X3 :to_thread's tcb
  361. */
  362. .globl rt_hw_context_switch_interrupt
  363. rt_hw_context_switch_interrupt:
  364. STP X0, X1, [SP, #-0x10]!
  365. STP X2, X3, [SP, #-0x10]!
  366. STP X29, X30, [SP, #-0x10]!
  367. #ifdef RT_USING_LWP
  368. BL rt_thread_self
  369. BL lwp_user_setting_save
  370. #endif
  371. LDP X29, X30, [SP], #0x10
  372. LDP X2, X3, [SP], #0x10
  373. LDP X0, X1, [SP], #0x10
  374. STR X0, [X1]
  375. LDR X0, [X2]
  376. MOV SP, X0
  377. MOV X0, X3
  378. MOV X19, X0
  379. BL rt_cpus_lock_status_restore
  380. MOV X0, X19
  381. #ifdef RT_USING_LWP
  382. BL lwp_user_setting_restore
  383. #endif
  384. B rt_hw_context_switch_exit
  385. .globl vector_fiq
  386. vector_fiq:
  387. B .
  388. START_POINT(vector_irq)
  389. SAVE_CONTEXT
  390. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  391. BL rt_interrupt_enter
  392. LDP X0, X1, [SP]
  393. #ifdef RT_USING_LWP
  394. SAVE_USER_CTX
  395. #endif
  396. BL rt_hw_trap_irq
  397. #ifdef RT_USING_LWP
  398. LDP X0, X1, [SP]
  399. RESTORE_USER_CTX X0
  400. #endif
  401. BL rt_interrupt_leave
  402. LDP X0, X1, [SP], #0x10
  403. BL rt_scheduler_do_irq_switch
  404. B rt_hw_context_switch_exit
  405. START_POINT_END(vector_irq)
  406. .global rt_hw_context_switch_exit
  407. rt_hw_context_switch_exit:
  408. CLREX
  409. MOV X0, SP
  410. RESTORE_CONTEXT
  411. #else /* !RT_USING_SMP */
  412. /*
  413. * void rt_hw_context_switch_to(rt_ubase_t to);
  414. * X0 --> to sp
  415. */
  416. .globl rt_hw_context_switch_to
  417. rt_hw_context_switch_to:
  418. CLREX
  419. LDR X0, [X0]
  420. RESTORE_CONTEXT
  421. /*
  422. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  423. * X0 --> from sp
  424. * X1 --> to sp
  425. * X2 --> to thread
  426. */
  427. .globl rt_hw_context_switch
  428. rt_hw_context_switch:
  429. CLREX
  430. SAVE_CONTEXT_FROM_EL1
  431. MOV X2, SP
  432. STR X2, [X0] // store sp in preempted tasks TCB
  433. LDR X0, [X1] // get new task stack pointer
  434. RESTORE_CONTEXT
  435. /*
  436. * void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
  437. */
  438. .globl rt_thread_switch_interrupt_flag
  439. .globl rt_interrupt_from_thread
  440. .globl rt_interrupt_to_thread
  441. .globl rt_hw_context_switch_interrupt
  442. rt_hw_context_switch_interrupt:
  443. CLREX
  444. LDR X6, =rt_thread_switch_interrupt_flag
  445. LDR X7, [X6]
  446. CMP X7, #1
  447. B.EQ _reswitch
  448. LDR X4, =rt_interrupt_from_thread // set rt_interrupt_from_thread
  449. STR X0, [X4]
  450. MOV X7, #1 // set rt_thread_switch_interrupt_flag to 1
  451. STR X7, [X6]
  452. STP X1, X30, [SP, #-0x10]!
  453. #ifdef RT_USING_LWP
  454. MOV X0, X2
  455. BL lwp_user_setting_save
  456. #endif
  457. LDP X1, X30, [SP], #0x10
  458. _reswitch:
  459. LDR X6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
  460. STR X1, [X6]
  461. RET
  462. .text
  463. // -- Exception handlers ----------------------------------
  464. .align 8
  465. .globl vector_fiq
  466. vector_fiq:
  467. SAVE_CONTEXT
  468. STP X0, X1, [SP, #-0x10]!
  469. BL rt_hw_trap_fiq
  470. LDP X0, X1, [SP], #0x10
  471. RESTORE_CONTEXT
  472. .globl rt_interrupt_enter
  473. .globl rt_interrupt_leave
  474. .globl rt_thread_switch_interrupt_flag
  475. .globl rt_interrupt_from_thread
  476. .globl rt_interrupt_to_thread
  477. // -------------------------------------------------------------------
  478. .align 8
  479. .globl vector_irq
  480. vector_irq:
  481. SAVE_CONTEXT
  482. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  483. BL rt_interrupt_enter
  484. BL rt_hw_trap_irq
  485. BL rt_interrupt_leave
  486. LDP X0, X1, [SP], #0x10
  487. // if rt_thread_switch_interrupt_flag set, jump to
  488. // rt_hw_context_switch_interrupt_do and don't return
  489. LDR X1, =rt_thread_switch_interrupt_flag
  490. LDR X2, [X1]
  491. CMP X2, #1
  492. B.NE vector_irq_exit
  493. MOV X2, #0 // clear flag
  494. STR X2, [X1]
  495. LDR X3, =rt_interrupt_from_thread
  496. LDR X4, [X3]
  497. STR x0, [X4] // store sp in preempted tasks's TCB
  498. LDR x3, =rt_interrupt_to_thread
  499. LDR X4, [X3]
  500. LDR x0, [X4] // get new task's stack pointer
  501. RESTORE_CONTEXT
  502. vector_irq_exit:
  503. MOV SP, X0
  504. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  505. #endif /* RT_USING_SMP */
  506. // -------------------------------------------------
  507. START_POINT(vector_exception)
  508. SAVE_CONTEXT
  509. STP X0, X1, [SP, #-0x10]!
  510. #ifdef RT_USING_LWP
  511. SAVE_USER_CTX
  512. #endif
  513. BL rt_hw_trap_exception
  514. #ifdef RT_USING_LWP
  515. LDP X0, X1, [SP]
  516. RESTORE_USER_CTX X0
  517. #endif
  518. LDP X0, X1, [SP], #0x10
  519. MOV SP, X0
  520. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  521. START_POINT_END(vector_exception)
  522. START_POINT(vector_serror)
  523. SAVE_CONTEXT
  524. #ifdef RT_USING_LWP
  525. SAVE_USER_CTX
  526. #endif
  527. STP X0, X1, [SP, #-0x10]!
  528. BL rt_hw_trap_serror
  529. b .
  530. START_POINT_END(vector_serror)