atomic_aarch64.c 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108
  1. /*
  2. * Copyright (c) 2006-2023, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2023-05-18 GuEe-GUI first version
  9. */
  10. #include <rtatomic.h>
  11. rt_atomic_t rt_hw_atomic_load(volatile rt_atomic_t *ptr)
  12. {
  13. rt_atomic_t ret;
  14. __asm__ volatile (
  15. " ldr %w0, %1\n"
  16. " dmb ish"
  17. : "=r" (ret)
  18. : "Q" (*ptr)
  19. : "memory");
  20. return ret;
  21. }
  22. void rt_hw_atomic_store(volatile rt_atomic_t *ptr, rt_atomic_t val)
  23. {
  24. __asm__ volatile (
  25. " stlr %w1, %0\n"
  26. " dmb ish"
  27. : "=Q" (*ptr)
  28. : "r" (val)
  29. : "memory");
  30. }
  31. #define AARCH64_ATOMIC_OP_RETURN(op, ins, constraint) \
  32. rt_atomic_t rt_hw_atomic_##op(volatile rt_atomic_t *ptr, rt_atomic_t in_val) \
  33. { \
  34. rt_atomic_t tmp, val, result; \
  35. \
  36. __asm__ volatile ( \
  37. " prfm pstl1strm, %3\n" \
  38. "1: ldxr %w0, %3\n" \
  39. " "#ins " %w1, %w0, %w4\n" \
  40. " stlxr %w2, %w1, %3\n" \
  41. " cbnz %w2, 1b\n" \
  42. " dmb ish" \
  43. : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Q" (*ptr) \
  44. : __RT_STRINGIFY(constraint) "r" (in_val) \
  45. : "memory"); \
  46. \
  47. return result; \
  48. }
  49. AARCH64_ATOMIC_OP_RETURN(add, add, I)
  50. AARCH64_ATOMIC_OP_RETURN(sub, sub, J)
  51. AARCH64_ATOMIC_OP_RETURN(and, and, K)
  52. AARCH64_ATOMIC_OP_RETURN(or, orr, K)
  53. AARCH64_ATOMIC_OP_RETURN(xor, eor, K)
  54. rt_atomic_t rt_hw_atomic_exchange(volatile rt_atomic_t *ptr, rt_atomic_t val)
  55. {
  56. rt_atomic_t ret, tmp;
  57. __asm__ volatile (
  58. " prfm pstl1strm, %2\n"
  59. "1: ldxr %w0, %2\n"
  60. " stlxr %w1, %w3, %2\n"
  61. " cbnz %w1, 1b\n"
  62. " dmb ish"
  63. : "=&r" (ret), "=&r" (tmp), "+Q" (*ptr)
  64. : "r" (val)
  65. : "memory");
  66. return ret;
  67. }
  68. void rt_hw_atomic_flag_clear(volatile rt_atomic_t *ptr)
  69. {
  70. rt_hw_atomic_and(ptr, 0);
  71. }
  72. rt_atomic_t rt_hw_atomic_flag_test_and_set(volatile rt_atomic_t *ptr)
  73. {
  74. return rt_hw_atomic_or(ptr, 1);
  75. }
  76. rt_atomic_t rt_hw_atomic_compare_exchange_strong(volatile rt_atomic_t *ptr, rt_atomic_t *old, rt_atomic_t new)
  77. {
  78. rt_atomic_t tmp, oldval;
  79. __asm__ volatile (
  80. " prfm pstl1strm, %2\n"
  81. "1: ldxr %w0, %2\n"
  82. " eor %w1, %w0, %w3\n"
  83. " cbnz %w1, 2f\n"
  84. " stlxr %w1, %w4, %2\n"
  85. " cbnz %w1, 1b\n"
  86. " dmb ish\n"
  87. "2:"
  88. : "=&r" (oldval), "=&r" (tmp), "+Q" (*ptr)
  89. : "Kr" (*old), "r" (new)
  90. : "memory");
  91. return oldval;
  92. }