vector_gcc.h 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114
  1. /*
  2. * Copyright (c) 2006-2020, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2024-03-28 Shell Move vector handling codes from context_gcc.S
  9. */
  10. #ifndef __ARM64_INC_VECTOR_H__
  11. #define __ARM64_INC_VECTOR_H__
  12. #include "asm-generic.h"
  13. #include <rtconfig.h>
  14. #include <asm-fpu.h>
  15. #include <armv8.h>
  16. .macro SAVE_IRQ_CONTEXT
  17. /* Save the entire context. */
  18. SAVE_FPU sp
  19. stp x0, x1, [sp, #-0x10]!
  20. stp x2, x3, [sp, #-0x10]!
  21. stp x4, x5, [sp, #-0x10]!
  22. stp x6, x7, [sp, #-0x10]!
  23. stp x8, x9, [sp, #-0x10]!
  24. stp x10, x11, [sp, #-0x10]!
  25. stp x12, x13, [sp, #-0x10]!
  26. stp x14, x15, [sp, #-0x10]!
  27. stp x16, x17, [sp, #-0x10]!
  28. stp x18, x19, [sp, #-0x10]!
  29. stp x20, x21, [sp, #-0x10]!
  30. stp x22, x23, [sp, #-0x10]!
  31. stp x24, x25, [sp, #-0x10]!
  32. stp x26, x27, [sp, #-0x10]!
  33. stp x28, x29, [sp, #-0x10]!
  34. mrs x28, fpcr
  35. mrs x29, fpsr
  36. stp x28, x29, [sp, #-0x10]!
  37. mrs x29, sp_el0
  38. stp x29, x30, [sp, #-0x10]!
  39. mrs x3, spsr_el1
  40. mrs x2, elr_el1
  41. stp x2, x3, [sp, #-0x10]!
  42. mov x0, sp /* Move SP into X0 for saving. */
  43. .endm
  44. #ifdef RT_USING_SMP
  45. #include "../mp/context_gcc.h"
  46. #else
  47. #include "../up/context_gcc.h"
  48. #endif
  49. .macro RESTORE_IRQ_CONTEXT_WITHOUT_MMU_SWITCH
  50. /* the SP is already ok */
  51. ldp x2, x3, [sp], #0x10 /* SPSR and ELR. */
  52. tst x3, #0x1f
  53. msr spsr_el1, x3
  54. msr elr_el1, x2
  55. ldp x29, x30, [sp], #0x10
  56. msr sp_el0, x29
  57. ldp x28, x29, [sp], #0x10
  58. msr fpcr, x28
  59. msr fpsr, x29
  60. ldp x28, x29, [sp], #0x10
  61. ldp x26, x27, [sp], #0x10
  62. ldp x24, x25, [sp], #0x10
  63. ldp x22, x23, [sp], #0x10
  64. ldp x20, x21, [sp], #0x10
  65. ldp x18, x19, [sp], #0x10
  66. ldp x16, x17, [sp], #0x10
  67. ldp x14, x15, [sp], #0x10
  68. ldp x12, x13, [sp], #0x10
  69. ldp x10, x11, [sp], #0x10
  70. ldp x8, x9, [sp], #0x10
  71. ldp x6, x7, [sp], #0x10
  72. ldp x4, x5, [sp], #0x10
  73. ldp x2, x3, [sp], #0x10
  74. ldp x0, x1, [sp], #0x10
  75. RESTORE_FPU sp
  76. #ifdef RT_USING_SMART
  77. beq arch_ret_to_user
  78. #endif
  79. eret
  80. .endm
  81. .macro SAVE_USER_CTX
  82. mrs x1, spsr_el1
  83. and x1, x1, 0xf
  84. cmp x1, xzr
  85. bne 1f
  86. bl lwp_uthread_ctx_save
  87. ldp x0, x1, [sp]
  88. 1:
  89. .endm
  90. .macro RESTORE_USER_CTX, ctx
  91. ldr x1, [\ctx, #CONTEXT_OFFSET_SPSR_EL1]
  92. and x1, x1, 0x1f
  93. cmp x1, xzr
  94. bne 1f
  95. bl lwp_uthread_ctx_restore
  96. 1:
  97. .endm
  98. #endif /* __ARM64_INC_VECTOR_H__ */