vector_gcc.h 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128
  1. /*
  2. * Copyright (c) 2006-2020, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2024-03-28 Shell Move vector handling codes from context_gcc.S
  9. * 2024-04-08 Shell Optimizing exception switch between u-space/kernel,
  10. */
  11. #ifndef __ARM64_INC_VECTOR_H__
  12. #define __ARM64_INC_VECTOR_H__
  13. #include "asm-generic.h"
  14. #include <rtconfig.h>
  15. #include <asm-fpu.h>
  16. #include <armv8.h>
  17. .macro SAVE_IRQ_CONTEXT
  18. /* Save the entire context. */
  19. SAVE_FPU sp
  20. stp x0, x1, [sp, #-0x10]!
  21. stp x2, x3, [sp, #-0x10]!
  22. stp x4, x5, [sp, #-0x10]!
  23. stp x6, x7, [sp, #-0x10]!
  24. stp x8, x9, [sp, #-0x10]!
  25. stp x10, x11, [sp, #-0x10]!
  26. stp x12, x13, [sp, #-0x10]!
  27. stp x14, x15, [sp, #-0x10]!
  28. stp x16, x17, [sp, #-0x10]!
  29. stp x18, x19, [sp, #-0x10]!
  30. stp x20, x21, [sp, #-0x10]!
  31. stp x22, x23, [sp, #-0x10]!
  32. stp x24, x25, [sp, #-0x10]!
  33. stp x26, x27, [sp, #-0x10]!
  34. stp x28, x29, [sp, #-0x10]!
  35. mrs x28, fpcr
  36. mrs x29, fpsr
  37. stp x28, x29, [sp, #-0x10]!
  38. mrs x29, sp_el0
  39. stp x29, x30, [sp, #-0x10]!
  40. mrs x3, spsr_el1
  41. mrs x2, elr_el1
  42. stp x2, x3, [sp, #-0x10]!
  43. .endm
  44. #ifdef RT_USING_SMP
  45. #include "../mp/context_gcc.h"
  46. #else
  47. #include "../up/context_gcc.h"
  48. #endif
  49. .macro RESTORE_IRQ_CONTEXT_NO_SPEL0
  50. ldp x2, x3, [sp], #0x10
  51. msr elr_el1, x2
  52. msr spsr_el1, x3
  53. ldp x29, x30, [sp], #0x10
  54. ldp x28, x29, [sp], #0x10
  55. msr fpcr, x28
  56. msr fpsr, x29
  57. ldp x28, x29, [sp], #0x10
  58. ldp x26, x27, [sp], #0x10
  59. ldp x24, x25, [sp], #0x10
  60. ldp x22, x23, [sp], #0x10
  61. ldp x20, x21, [sp], #0x10
  62. ldp x18, x19, [sp], #0x10
  63. ldp x16, x17, [sp], #0x10
  64. ldp x14, x15, [sp], #0x10
  65. ldp x12, x13, [sp], #0x10
  66. ldp x10, x11, [sp], #0x10
  67. ldp x8, x9, [sp], #0x10
  68. ldp x6, x7, [sp], #0x10
  69. ldp x4, x5, [sp], #0x10
  70. ldp x2, x3, [sp], #0x10
  71. ldp x0, x1, [sp], #0x10
  72. RESTORE_FPU sp
  73. .endm
  74. .macro EXCEPTION_SWITCH, eframex, tmpx
  75. #ifdef RT_USING_SMART
  76. /**
  77. * test the spsr for execution level 0
  78. * That is { PSTATE.[NZCV] := SPSR_EL1 & M.EL0t }
  79. */
  80. ldr \tmpx, [\eframex, #CONTEXT_OFFSET_SPSR_EL1]
  81. and \tmpx, \tmpx, 0x1f
  82. cbz \tmpx, 1f
  83. b 2f
  84. 1:
  85. b arch_ret_to_user
  86. 2:
  87. #endif /* RT_USING_SMART */
  88. .endm
  89. .macro SAVE_USER_CTX, eframex, tmpx
  90. #ifdef RT_USING_SMART
  91. mrs \tmpx, spsr_el1
  92. and \tmpx, \tmpx, 0xf
  93. cbz \tmpx, 1f
  94. b 2f
  95. 1:
  96. mov x0, \eframex
  97. bl lwp_uthread_ctx_save
  98. 2:
  99. #endif /* RT_USING_SMART */
  100. .endm
  101. .macro RESTORE_USER_CTX, eframex, tmpx
  102. #ifdef RT_USING_SMART
  103. ldr \tmpx, [\eframex, #CONTEXT_OFFSET_SPSR_EL1]
  104. and \tmpx, \tmpx, 0x1f
  105. cbz \tmpx, 1f
  106. b 2f
  107. 1:
  108. bl lwp_uthread_ctx_restore
  109. 2:
  110. #endif /* RT_USING_SMART */
  111. .endm
  112. #endif /* __ARM64_INC_VECTOR_H__ */