context_gcc.S 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144
  1. /*
  2. * Copyright (c) 2006-2024, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. * 2023-06-24 Shell Support backtrace for user thread
  10. * 2024-01-06 Shell Fix barrier on irq_disable/enable
  11. * 2024-03-28 Shell Move vector handling codes from context_gcc.S
  12. */
  13. #ifndef __ASSEMBLY__
  14. #define __ASSEMBLY__
  15. #endif
  16. #include "context_gcc.h"
  17. #include "../include/vector_gcc.h"
  18. #include <rtconfig.h>
  19. #include <asm-generic.h>
  20. #include <asm-fpu.h>
  21. #include <armv8.h>
  22. .section .text
  23. .globl rt_hw_context_switch_to
  24. .macro update_tidr, srcx
  25. #ifdef ARCH_USING_HW_THREAD_SELF
  26. msr ARM64_THREAD_REG, \srcx
  27. #endif /* ARCH_USING_HW_THREAD_SELF */
  28. .endm
  29. /*
  30. * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
  31. * X0 --> to (thread stack)
  32. * X1 --> to_thread
  33. */
  34. rt_hw_context_switch_to:
  35. ldr x0, [x0]
  36. mov sp, x0
  37. update_tidr x1
  38. /* reserved to_thread */
  39. mov x19, x1
  40. mov x0, x19
  41. bl rt_cpus_lock_status_restore
  42. #ifdef RT_USING_SMART
  43. mov x0, x19
  44. bl lwp_user_setting_restore
  45. #endif
  46. b _context_switch_exit
  47. .globl rt_hw_context_switch
  48. /*
  49. * void rt_hw_context_switch(rt_uint32 from, rt_uint32
  50. to, struct rt_thread *to_thread);
  51. * X0 --> from (from_thread stack)
  52. * X1 --> to (to_thread stack)
  53. * X2 --> to_thread
  54. */
  55. rt_hw_context_switch:
  56. SAVE_CONTEXT_SWITCH x19, x20
  57. mov x3, sp
  58. str x3, [x0] // store sp in preempted tasks TCB
  59. ldr x0, [x1] // get new task stack pointer
  60. mov sp, x0
  61. update_tidr x2
  62. /* backup thread self */
  63. mov x19, x2
  64. mov x0, x19
  65. bl rt_cpus_lock_status_restore
  66. #ifdef RT_USING_SMART
  67. mov x0, x19
  68. bl lwp_user_setting_restore
  69. #endif
  70. b _context_switch_exit
  71. .globl rt_hw_irq_exit
  72. .globl rt_hw_context_switch_interrupt
  73. #define EXP_FRAME x19
  74. #define FROM_SPP x20
  75. #define TO_SPP x21
  76. #define TO_TCB x22
  77. /*
  78. * void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
  79. * X0 :interrupt context
  80. * X1 :addr of from_thread's sp
  81. * X2 :addr of to_thread's sp
  82. * X3 :to_thread's tcb
  83. */
  84. rt_hw_context_switch_interrupt:
  85. #ifdef RT_USING_DEBUG
  86. /* debug frame for backtrace */
  87. stp x29, x30, [sp, #-0x10]!
  88. #endif /* RT_USING_DEBUG */
  89. /* we can discard all the previous ABI here */
  90. mov EXP_FRAME, x0
  91. mov FROM_SPP, x1
  92. mov TO_SPP, x2
  93. mov TO_TCB, x3
  94. #ifdef RT_USING_SMART
  95. GET_THREAD_SELF x0
  96. bl lwp_user_setting_save
  97. #endif /* RT_USING_SMART */
  98. /* reset SP of from-thread */
  99. mov sp, EXP_FRAME
  100. /* push context for swtich */
  101. adr lr, rt_hw_irq_exit
  102. SAVE_CONTEXT_SWITCH_FAST
  103. /* save SP of from-thread */
  104. mov x0, sp
  105. str x0, [FROM_SPP]
  106. /* setup SP to to-thread's */
  107. ldr x0, [TO_SPP]
  108. mov sp, x0
  109. update_tidr TO_TCB
  110. mov x0, TO_TCB
  111. bl rt_cpus_lock_status_restore
  112. #ifdef RT_USING_SMART
  113. mov x0, TO_TCB
  114. bl lwp_user_setting_restore
  115. #endif /* RT_USING_SMART */
  116. b _context_switch_exit
  117. _context_switch_exit:
  118. .local _context_switch_exit
  119. clrex
  120. RESTORE_CONTEXT_SWITCH