nlrx64.c 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139
  1. /*
  2. * This file is part of the MicroPython project, http://micropython.org/
  3. *
  4. * The MIT License (MIT)
  5. *
  6. * Copyright (c) 2013-2017 Damien P. George
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a copy
  9. * of this software and associated documentation files (the "Software"), to deal
  10. * in the Software without restriction, including without limitation the rights
  11. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  12. * copies of the Software, and to permit persons to whom the Software is
  13. * furnished to do so, subject to the following conditions:
  14. *
  15. * The above copyright notice and this permission notice shall be included in
  16. * all copies or substantial portions of the Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  19. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  20. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  21. * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  22. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  23. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  24. * THE SOFTWARE.
  25. */
  26. #include "py/mpstate.h"
  27. #if !MICROPY_NLR_SETJMP && defined(__x86_64__)
  28. #undef nlr_push
  29. // x86-64 callee-save registers are:
  30. // rbx, rbp, rsp, r12, r13, r14, r15
  31. #if defined(_WIN32) || defined(__CYGWIN__)
  32. #define NLR_OS_WINDOWS 1
  33. #else
  34. #define NLR_OS_WINDOWS 0
  35. #endif
  36. __attribute__((used)) unsigned int nlr_push_tail(nlr_buf_t *nlr);
  37. unsigned int nlr_push(nlr_buf_t *nlr) {
  38. (void)nlr;
  39. #if NLR_OS_WINDOWS
  40. __asm volatile (
  41. "movq (%rsp), %rax \n" // load return %rip
  42. "movq %rax, 16(%rcx) \n" // store %rip into nlr_buf
  43. "movq %rbp, 24(%rcx) \n" // store %rbp into nlr_buf
  44. "movq %rsp, 32(%rcx) \n" // store %rsp into nlr_buf
  45. "movq %rbx, 40(%rcx) \n" // store %rbx into nlr_buf
  46. "movq %r12, 48(%rcx) \n" // store %r12 into nlr_buf
  47. "movq %r13, 56(%rcx) \n" // store %r13 into nlr_buf
  48. "movq %r14, 64(%rcx) \n" // store %r14 into nlr_buf
  49. "movq %r15, 72(%rcx) \n" // store %r15 into nlr_buf
  50. "movq %rdi, 80(%rcx) \n" // store %rdr into nlr_buf
  51. "movq %rsi, 88(%rcx) \n" // store %rsi into nlr_buf
  52. "jmp nlr_push_tail \n" // do the rest in C
  53. );
  54. #else
  55. __asm volatile (
  56. #if defined(__APPLE__) || defined(__MACH__)
  57. "pop %rbp \n" // undo function's prelude
  58. #endif
  59. "movq (%rsp), %rax \n" // load return %rip
  60. "movq %rax, 16(%rdi) \n" // store %rip into nlr_buf
  61. "movq %rbp, 24(%rdi) \n" // store %rbp into nlr_buf
  62. "movq %rsp, 32(%rdi) \n" // store %rsp into nlr_buf
  63. "movq %rbx, 40(%rdi) \n" // store %rbx into nlr_buf
  64. "movq %r12, 48(%rdi) \n" // store %r12 into nlr_buf
  65. "movq %r13, 56(%rdi) \n" // store %r13 into nlr_buf
  66. "movq %r14, 64(%rdi) \n" // store %r14 into nlr_buf
  67. "movq %r15, 72(%rdi) \n" // store %r15 into nlr_buf
  68. #if defined(__APPLE__) || defined(__MACH__)
  69. "jmp _nlr_push_tail \n" // do the rest in C
  70. #else
  71. "jmp nlr_push_tail \n" // do the rest in C
  72. #endif
  73. );
  74. #endif
  75. return 0; // needed to silence compiler warning
  76. }
  77. __attribute__((used)) unsigned int nlr_push_tail(nlr_buf_t *nlr) {
  78. nlr_buf_t **top = &MP_STATE_THREAD(nlr_top);
  79. nlr->prev = *top;
  80. *top = nlr;
  81. return 0; // normal return
  82. }
  83. void nlr_pop(void) {
  84. nlr_buf_t **top = &MP_STATE_THREAD(nlr_top);
  85. *top = (*top)->prev;
  86. }
  87. NORETURN void nlr_jump(void *val) {
  88. nlr_buf_t **top_ptr = &MP_STATE_THREAD(nlr_top);
  89. nlr_buf_t *top = *top_ptr;
  90. if (top == NULL) {
  91. nlr_jump_fail(val);
  92. }
  93. top->ret_val = val;
  94. *top_ptr = top->prev;
  95. __asm volatile (
  96. "movq %0, %%rcx \n" // %rcx points to nlr_buf
  97. #if NLR_OS_WINDOWS
  98. "movq 88(%%rcx), %%rsi \n" // load saved %rsi
  99. "movq 80(%%rcx), %%rdi \n" // load saved %rdr
  100. #endif
  101. "movq 72(%%rcx), %%r15 \n" // load saved %r15
  102. "movq 64(%%rcx), %%r14 \n" // load saved %r14
  103. "movq 56(%%rcx), %%r13 \n" // load saved %r13
  104. "movq 48(%%rcx), %%r12 \n" // load saved %r12
  105. "movq 40(%%rcx), %%rbx \n" // load saved %rbx
  106. "movq 32(%%rcx), %%rsp \n" // load saved %rsp
  107. "movq 24(%%rcx), %%rbp \n" // load saved %rbp
  108. "movq 16(%%rcx), %%rax \n" // load saved %rip
  109. "movq %%rax, (%%rsp) \n" // store saved %rip to stack
  110. "xorq %%rax, %%rax \n" // clear return register
  111. "inc %%al \n" // increase to make 1, non-local return
  112. "ret \n" // return
  113. : // output operands
  114. : "r"(top) // input operands
  115. : // clobbered registers
  116. );
  117. for (;;); // needed to silence compiler warning
  118. }
  119. #endif // !MICROPY_NLR_SETJMP && defined(__x86_64__)