emitinlinethumb.c 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841
  1. /*
  2. * This file is part of the MicroPython project, http://micropython.org/
  3. *
  4. * The MIT License (MIT)
  5. *
  6. * Copyright (c) 2013, 2014 Damien P. George
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a copy
  9. * of this software and associated documentation files (the "Software"), to deal
  10. * in the Software without restriction, including without limitation the rights
  11. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  12. * copies of the Software, and to permit persons to whom the Software is
  13. * furnished to do so, subject to the following conditions:
  14. *
  15. * The above copyright notice and this permission notice shall be included in
  16. * all copies or substantial portions of the Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  19. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  20. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  21. * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  22. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  23. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  24. * THE SOFTWARE.
  25. */
  26. #include <stdint.h>
  27. #include <stdio.h>
  28. #include <string.h>
  29. #include <stdarg.h>
  30. #include <assert.h>
  31. #include "py/emit.h"
  32. #include "py/asmthumb.h"
  33. #if MICROPY_EMIT_INLINE_THUMB
  34. typedef enum {
  35. // define rules with a compile function
  36. #define DEF_RULE(rule, comp, kind, ...) PN_##rule,
  37. #define DEF_RULE_NC(rule, kind, ...)
  38. #include "py/grammar.h"
  39. #undef DEF_RULE
  40. #undef DEF_RULE_NC
  41. PN_const_object, // special node for a constant, generic Python object
  42. // define rules without a compile function
  43. #define DEF_RULE(rule, comp, kind, ...)
  44. #define DEF_RULE_NC(rule, kind, ...) PN_##rule,
  45. #include "py/grammar.h"
  46. #undef DEF_RULE
  47. #undef DEF_RULE_NC
  48. } pn_kind_t;
  49. struct _emit_inline_asm_t {
  50. asm_thumb_t as;
  51. uint16_t pass;
  52. mp_obj_t *error_slot;
  53. mp_uint_t max_num_labels;
  54. qstr *label_lookup;
  55. };
  56. STATIC void emit_inline_thumb_error_msg(emit_inline_asm_t *emit, mp_rom_error_text_t msg) {
  57. *emit->error_slot = mp_obj_new_exception_msg(&mp_type_SyntaxError, msg);
  58. }
  59. STATIC void emit_inline_thumb_error_exc(emit_inline_asm_t *emit, mp_obj_t exc) {
  60. *emit->error_slot = exc;
  61. }
  62. emit_inline_asm_t *emit_inline_thumb_new(mp_uint_t max_num_labels) {
  63. emit_inline_asm_t *emit = m_new_obj(emit_inline_asm_t);
  64. memset(&emit->as, 0, sizeof(emit->as));
  65. mp_asm_base_init(&emit->as.base, max_num_labels);
  66. emit->max_num_labels = max_num_labels;
  67. emit->label_lookup = m_new(qstr, max_num_labels);
  68. return emit;
  69. }
  70. void emit_inline_thumb_free(emit_inline_asm_t *emit) {
  71. m_del(qstr, emit->label_lookup, emit->max_num_labels);
  72. mp_asm_base_deinit(&emit->as.base, false);
  73. m_del_obj(emit_inline_asm_t, emit);
  74. }
  75. STATIC void emit_inline_thumb_start_pass(emit_inline_asm_t *emit, pass_kind_t pass, mp_obj_t *error_slot) {
  76. emit->pass = pass;
  77. emit->error_slot = error_slot;
  78. if (emit->pass == MP_PASS_CODE_SIZE) {
  79. memset(emit->label_lookup, 0, emit->max_num_labels * sizeof(qstr));
  80. }
  81. mp_asm_base_start_pass(&emit->as.base, pass == MP_PASS_EMIT ? MP_ASM_PASS_EMIT : MP_ASM_PASS_COMPUTE);
  82. asm_thumb_entry(&emit->as, 0);
  83. }
  84. STATIC void emit_inline_thumb_end_pass(emit_inline_asm_t *emit, mp_uint_t type_sig) {
  85. asm_thumb_exit(&emit->as);
  86. asm_thumb_end_pass(&emit->as);
  87. }
  88. STATIC mp_uint_t emit_inline_thumb_count_params(emit_inline_asm_t *emit, mp_uint_t n_params, mp_parse_node_t *pn_params) {
  89. if (n_params > 4) {
  90. emit_inline_thumb_error_msg(emit, MP_ERROR_TEXT("can only have up to 4 parameters to Thumb assembly"));
  91. return 0;
  92. }
  93. for (mp_uint_t i = 0; i < n_params; i++) {
  94. if (!MP_PARSE_NODE_IS_ID(pn_params[i])) {
  95. emit_inline_thumb_error_msg(emit, MP_ERROR_TEXT("parameters must be registers in sequence r0 to r3"));
  96. return 0;
  97. }
  98. const char *p = qstr_str(MP_PARSE_NODE_LEAF_ARG(pn_params[i]));
  99. if (!(strlen(p) == 2 && p[0] == 'r' && (mp_uint_t)p[1] == '0' + i)) {
  100. emit_inline_thumb_error_msg(emit, MP_ERROR_TEXT("parameters must be registers in sequence r0 to r3"));
  101. return 0;
  102. }
  103. }
  104. return n_params;
  105. }
  106. STATIC bool emit_inline_thumb_label(emit_inline_asm_t *emit, mp_uint_t label_num, qstr label_id) {
  107. assert(label_num < emit->max_num_labels);
  108. if (emit->pass == MP_PASS_CODE_SIZE) {
  109. // check for duplicate label on first pass
  110. for (uint i = 0; i < emit->max_num_labels; i++) {
  111. if (emit->label_lookup[i] == label_id) {
  112. return false;
  113. }
  114. }
  115. }
  116. emit->label_lookup[label_num] = label_id;
  117. mp_asm_base_label_assign(&emit->as.base, label_num);
  118. return true;
  119. }
  120. typedef struct _reg_name_t { byte reg;
  121. byte name[3];
  122. } reg_name_t;
  123. STATIC const reg_name_t reg_name_table[] = {
  124. {0, "r0\0"},
  125. {1, "r1\0"},
  126. {2, "r2\0"},
  127. {3, "r3\0"},
  128. {4, "r4\0"},
  129. {5, "r5\0"},
  130. {6, "r6\0"},
  131. {7, "r7\0"},
  132. {8, "r8\0"},
  133. {9, "r9\0"},
  134. {10, "r10"},
  135. {11, "r11"},
  136. {12, "r12"},
  137. {13, "r13"},
  138. {14, "r14"},
  139. {15, "r15"},
  140. {10, "sl\0"},
  141. {11, "fp\0"},
  142. {13, "sp\0"},
  143. {14, "lr\0"},
  144. {15, "pc\0"},
  145. };
  146. #define MAX_SPECIAL_REGISTER_NAME_LENGTH 7
  147. typedef struct _special_reg_name_t { byte reg;
  148. char name[MAX_SPECIAL_REGISTER_NAME_LENGTH + 1];
  149. } special_reg_name_t;
  150. STATIC const special_reg_name_t special_reg_name_table[] = {
  151. {5, "IPSR"},
  152. {17, "BASEPRI"},
  153. };
  154. // return empty string in case of error, so we can attempt to parse the string
  155. // without a special check if it was in fact a string
  156. STATIC const char *get_arg_str(mp_parse_node_t pn) {
  157. if (MP_PARSE_NODE_IS_ID(pn)) {
  158. qstr qst = MP_PARSE_NODE_LEAF_ARG(pn);
  159. return qstr_str(qst);
  160. } else {
  161. return "";
  162. }
  163. }
  164. STATIC mp_uint_t get_arg_reg(emit_inline_asm_t *emit, const char *op, mp_parse_node_t pn, mp_uint_t max_reg) {
  165. const char *reg_str = get_arg_str(pn);
  166. for (mp_uint_t i = 0; i < MP_ARRAY_SIZE(reg_name_table); i++) {
  167. const reg_name_t *r = &reg_name_table[i];
  168. if (reg_str[0] == r->name[0]
  169. && reg_str[1] == r->name[1]
  170. && reg_str[2] == r->name[2]
  171. && (reg_str[2] == '\0' || reg_str[3] == '\0')) {
  172. if (r->reg > max_reg) {
  173. emit_inline_thumb_error_exc(emit,
  174. mp_obj_new_exception_msg_varg(&mp_type_SyntaxError,
  175. MP_ERROR_TEXT("'%s' expects at most r%d"), op, max_reg));
  176. return 0;
  177. } else {
  178. return r->reg;
  179. }
  180. }
  181. }
  182. emit_inline_thumb_error_exc(emit,
  183. mp_obj_new_exception_msg_varg(&mp_type_SyntaxError,
  184. MP_ERROR_TEXT("'%s' expects a register"), op));
  185. return 0;
  186. }
  187. STATIC mp_uint_t get_arg_special_reg(emit_inline_asm_t *emit, const char *op, mp_parse_node_t pn) {
  188. const char *reg_str = get_arg_str(pn);
  189. for (mp_uint_t i = 0; i < MP_ARRAY_SIZE(special_reg_name_table); i++) {
  190. const special_reg_name_t *r = &special_reg_name_table[i];
  191. if (strcmp(r->name, reg_str) == 0) {
  192. return r->reg;
  193. }
  194. }
  195. emit_inline_thumb_error_exc(emit,
  196. mp_obj_new_exception_msg_varg(&mp_type_SyntaxError,
  197. MP_ERROR_TEXT("'%s' expects a special register"), op));
  198. return 0;
  199. }
  200. #if MICROPY_EMIT_INLINE_THUMB_FLOAT
  201. STATIC mp_uint_t get_arg_vfpreg(emit_inline_asm_t *emit, const char *op, mp_parse_node_t pn) {
  202. const char *reg_str = get_arg_str(pn);
  203. if (reg_str[0] == 's' && reg_str[1] != '\0') {
  204. mp_uint_t regno = 0;
  205. for (++reg_str; *reg_str; ++reg_str) {
  206. mp_uint_t v = *reg_str;
  207. if (!('0' <= v && v <= '9')) {
  208. goto malformed;
  209. }
  210. regno = 10 * regno + v - '0';
  211. }
  212. if (regno > 31) {
  213. emit_inline_thumb_error_exc(emit,
  214. mp_obj_new_exception_msg_varg(&mp_type_SyntaxError,
  215. MP_ERROR_TEXT("'%s' expects at most r%d"), op, 31));
  216. return 0;
  217. } else {
  218. return regno;
  219. }
  220. }
  221. malformed:
  222. emit_inline_thumb_error_exc(emit,
  223. mp_obj_new_exception_msg_varg(&mp_type_SyntaxError,
  224. MP_ERROR_TEXT("'%s' expects an FPU register"), op));
  225. return 0;
  226. }
  227. #endif
  228. STATIC mp_uint_t get_arg_reglist(emit_inline_asm_t *emit, const char *op, mp_parse_node_t pn) {
  229. // a register list looks like {r0, r1, r2} and is parsed as a Python set
  230. if (!MP_PARSE_NODE_IS_STRUCT_KIND(pn, PN_atom_brace)) {
  231. goto bad_arg;
  232. }
  233. mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn;
  234. assert(MP_PARSE_NODE_STRUCT_NUM_NODES(pns) == 1); // should always be
  235. pn = pns->nodes[0];
  236. mp_uint_t reglist = 0;
  237. if (MP_PARSE_NODE_IS_ID(pn)) {
  238. // set with one element
  239. reglist |= 1 << get_arg_reg(emit, op, pn, 15);
  240. } else if (MP_PARSE_NODE_IS_STRUCT(pn)) {
  241. pns = (mp_parse_node_struct_t *)pn;
  242. if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_dictorsetmaker) {
  243. assert(MP_PARSE_NODE_IS_STRUCT(pns->nodes[1])); // should succeed
  244. mp_parse_node_struct_t *pns1 = (mp_parse_node_struct_t *)pns->nodes[1];
  245. if (MP_PARSE_NODE_STRUCT_KIND(pns1) == PN_dictorsetmaker_list) {
  246. // set with multiple elements
  247. // get first element of set (we rely on get_arg_reg to catch syntax errors)
  248. reglist |= 1 << get_arg_reg(emit, op, pns->nodes[0], 15);
  249. // get tail elements (2nd, 3rd, ...)
  250. mp_parse_node_t *nodes;
  251. int n = mp_parse_node_extract_list(&pns1->nodes[0], PN_dictorsetmaker_list2, &nodes);
  252. // process rest of elements
  253. for (int i = 0; i < n; i++) {
  254. reglist |= 1 << get_arg_reg(emit, op, nodes[i], 15);
  255. }
  256. } else {
  257. goto bad_arg;
  258. }
  259. } else {
  260. goto bad_arg;
  261. }
  262. } else {
  263. goto bad_arg;
  264. }
  265. return reglist;
  266. bad_arg:
  267. emit_inline_thumb_error_exc(emit, mp_obj_new_exception_msg_varg(&mp_type_SyntaxError, MP_ERROR_TEXT("'%s' expects {r0, r1, ...}"), op));
  268. return 0;
  269. }
  270. STATIC uint32_t get_arg_i(emit_inline_asm_t *emit, const char *op, mp_parse_node_t pn, uint32_t fit_mask) {
  271. mp_obj_t o;
  272. if (!mp_parse_node_get_int_maybe(pn, &o)) {
  273. emit_inline_thumb_error_exc(emit, mp_obj_new_exception_msg_varg(&mp_type_SyntaxError, MP_ERROR_TEXT("'%s' expects an integer"), op));
  274. return 0;
  275. }
  276. uint32_t i = mp_obj_get_int_truncated(o);
  277. if ((i & (~fit_mask)) != 0) {
  278. emit_inline_thumb_error_exc(emit, mp_obj_new_exception_msg_varg(&mp_type_SyntaxError, MP_ERROR_TEXT("'%s' integer 0x%x doesn't fit in mask 0x%x"), op, i, fit_mask));
  279. return 0;
  280. }
  281. return i;
  282. }
  283. STATIC bool get_arg_addr(emit_inline_asm_t *emit, const char *op, mp_parse_node_t pn, mp_parse_node_t *pn_base, mp_parse_node_t *pn_offset) {
  284. if (!MP_PARSE_NODE_IS_STRUCT_KIND(pn, PN_atom_bracket)) {
  285. goto bad_arg;
  286. }
  287. mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn;
  288. if (!MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_testlist_comp)) {
  289. goto bad_arg;
  290. }
  291. pns = (mp_parse_node_struct_t *)pns->nodes[0];
  292. if (MP_PARSE_NODE_STRUCT_NUM_NODES(pns) != 2) {
  293. goto bad_arg;
  294. }
  295. *pn_base = pns->nodes[0];
  296. *pn_offset = pns->nodes[1];
  297. return true;
  298. bad_arg:
  299. emit_inline_thumb_error_exc(emit, mp_obj_new_exception_msg_varg(&mp_type_SyntaxError, MP_ERROR_TEXT("'%s' expects an address of the form [a, b]"), op));
  300. return false;
  301. }
  302. STATIC int get_arg_label(emit_inline_asm_t *emit, const char *op, mp_parse_node_t pn) {
  303. if (!MP_PARSE_NODE_IS_ID(pn)) {
  304. emit_inline_thumb_error_exc(emit, mp_obj_new_exception_msg_varg(&mp_type_SyntaxError, MP_ERROR_TEXT("'%s' expects a label"), op));
  305. return 0;
  306. }
  307. qstr label_qstr = MP_PARSE_NODE_LEAF_ARG(pn);
  308. for (uint i = 0; i < emit->max_num_labels; i++) {
  309. if (emit->label_lookup[i] == label_qstr) {
  310. return i;
  311. }
  312. }
  313. // only need to have the labels on the last pass
  314. if (emit->pass == MP_PASS_EMIT) {
  315. emit_inline_thumb_error_exc(emit, mp_obj_new_exception_msg_varg(&mp_type_SyntaxError, MP_ERROR_TEXT("label '%q' not defined"), label_qstr));
  316. }
  317. return 0;
  318. }
  319. typedef struct _cc_name_t { byte cc;
  320. byte name[2];
  321. } cc_name_t;
  322. STATIC const cc_name_t cc_name_table[] = {
  323. { ASM_THUMB_CC_EQ, "eq" },
  324. { ASM_THUMB_CC_NE, "ne" },
  325. { ASM_THUMB_CC_CS, "cs" },
  326. { ASM_THUMB_CC_CC, "cc" },
  327. { ASM_THUMB_CC_MI, "mi" },
  328. { ASM_THUMB_CC_PL, "pl" },
  329. { ASM_THUMB_CC_VS, "vs" },
  330. { ASM_THUMB_CC_VC, "vc" },
  331. { ASM_THUMB_CC_HI, "hi" },
  332. { ASM_THUMB_CC_LS, "ls" },
  333. { ASM_THUMB_CC_GE, "ge" },
  334. { ASM_THUMB_CC_LT, "lt" },
  335. { ASM_THUMB_CC_GT, "gt" },
  336. { ASM_THUMB_CC_LE, "le" },
  337. };
  338. typedef struct _format_4_op_t { byte op;
  339. char name[3];
  340. } format_4_op_t;
  341. #define X(x) (((x) >> 4) & 0xff) // only need 1 byte to distinguish these ops
  342. STATIC const format_4_op_t format_4_op_table[] = {
  343. { X(ASM_THUMB_FORMAT_4_EOR), "eor" },
  344. { X(ASM_THUMB_FORMAT_4_LSL), "lsl" },
  345. { X(ASM_THUMB_FORMAT_4_LSR), "lsr" },
  346. { X(ASM_THUMB_FORMAT_4_ASR), "asr" },
  347. { X(ASM_THUMB_FORMAT_4_ADC), "adc" },
  348. { X(ASM_THUMB_FORMAT_4_SBC), "sbc" },
  349. { X(ASM_THUMB_FORMAT_4_ROR), "ror" },
  350. { X(ASM_THUMB_FORMAT_4_TST), "tst" },
  351. { X(ASM_THUMB_FORMAT_4_NEG), "neg" },
  352. { X(ASM_THUMB_FORMAT_4_CMP), "cmp" },
  353. { X(ASM_THUMB_FORMAT_4_CMN), "cmn" },
  354. { X(ASM_THUMB_FORMAT_4_ORR), "orr" },
  355. { X(ASM_THUMB_FORMAT_4_MUL), "mul" },
  356. { X(ASM_THUMB_FORMAT_4_BIC), "bic" },
  357. { X(ASM_THUMB_FORMAT_4_MVN), "mvn" },
  358. };
  359. #undef X
  360. // name is actually a qstr, which should fit in 16 bits
  361. typedef struct _format_9_10_op_t { uint16_t op;
  362. uint16_t name;
  363. } format_9_10_op_t;
  364. #define X(x) (x)
  365. STATIC const format_9_10_op_t format_9_10_op_table[] = {
  366. { X(ASM_THUMB_FORMAT_9_LDR | ASM_THUMB_FORMAT_9_WORD_TRANSFER), MP_QSTR_ldr },
  367. { X(ASM_THUMB_FORMAT_9_LDR | ASM_THUMB_FORMAT_9_BYTE_TRANSFER), MP_QSTR_ldrb },
  368. { X(ASM_THUMB_FORMAT_10_LDRH), MP_QSTR_ldrh },
  369. { X(ASM_THUMB_FORMAT_9_STR | ASM_THUMB_FORMAT_9_WORD_TRANSFER), MP_QSTR_str },
  370. { X(ASM_THUMB_FORMAT_9_STR | ASM_THUMB_FORMAT_9_BYTE_TRANSFER), MP_QSTR_strb },
  371. { X(ASM_THUMB_FORMAT_10_STRH), MP_QSTR_strh },
  372. };
  373. #undef X
  374. #if MICROPY_EMIT_INLINE_THUMB_FLOAT
  375. // actual opcodes are: 0xee00 | op.hi_nibble, 0x0a00 | op.lo_nibble
  376. typedef struct _format_vfp_op_t { byte op;
  377. char name[3];
  378. } format_vfp_op_t;
  379. STATIC const format_vfp_op_t format_vfp_op_table[] = {
  380. { 0x30, "add" },
  381. { 0x34, "sub" },
  382. { 0x20, "mul" },
  383. { 0x80, "div" },
  384. };
  385. #endif
  386. // shorthand alias for whether we allow ARMv7-M instructions
  387. #define ARMV7M MICROPY_EMIT_INLINE_THUMB_ARMV7M
  388. STATIC void emit_inline_thumb_op(emit_inline_asm_t *emit, qstr op, mp_uint_t n_args, mp_parse_node_t *pn_args) {
  389. // TODO perhaps make two tables:
  390. // one_args =
  391. // "b", LAB, asm_thumb_b_n,
  392. // "bgt", LAB, asm_thumb_bgt_n,
  393. // two_args =
  394. // "movs", RLO, I8, asm_thumb_movs_reg_i8
  395. // "movw", REG, REG, asm_thumb_movw_reg_i16
  396. // three_args =
  397. // "subs", RLO, RLO, I3, asm_thumb_subs_reg_reg_i3
  398. size_t op_len;
  399. const char *op_str = (const char *)qstr_data(op, &op_len);
  400. #if MICROPY_EMIT_INLINE_THUMB_FLOAT
  401. if (op_str[0] == 'v') {
  402. // floating point operations
  403. if (n_args == 2) {
  404. mp_uint_t op_code = 0x0ac0, op_code_hi;
  405. if (op == MP_QSTR_vcmp) {
  406. op_code_hi = 0xeeb4;
  407. op_vfp_twoargs:;
  408. mp_uint_t vd = get_arg_vfpreg(emit, op_str, pn_args[0]);
  409. mp_uint_t vm = get_arg_vfpreg(emit, op_str, pn_args[1]);
  410. asm_thumb_op32(&emit->as,
  411. op_code_hi | ((vd & 1) << 6),
  412. op_code | ((vd & 0x1e) << 11) | ((vm & 1) << 5) | (vm & 0x1e) >> 1);
  413. } else if (op == MP_QSTR_vsqrt) {
  414. op_code_hi = 0xeeb1;
  415. goto op_vfp_twoargs;
  416. } else if (op == MP_QSTR_vneg) {
  417. op_code_hi = 0xeeb1;
  418. op_code = 0x0a40;
  419. goto op_vfp_twoargs;
  420. } else if (op == MP_QSTR_vcvt_f32_s32) {
  421. op_code_hi = 0xeeb8; // int to float
  422. goto op_vfp_twoargs;
  423. } else if (op == MP_QSTR_vcvt_s32_f32) {
  424. op_code_hi = 0xeebd; // float to int
  425. goto op_vfp_twoargs;
  426. } else if (op == MP_QSTR_vmrs) {
  427. mp_uint_t reg_dest;
  428. const char *reg_str0 = get_arg_str(pn_args[0]);
  429. if (strcmp(reg_str0, "APSR_nzcv") == 0) {
  430. reg_dest = 15;
  431. } else {
  432. reg_dest = get_arg_reg(emit, op_str, pn_args[0], 15);
  433. }
  434. const char *reg_str1 = get_arg_str(pn_args[1]);
  435. if (strcmp(reg_str1, "FPSCR") == 0) {
  436. // FP status to ARM reg
  437. asm_thumb_op32(&emit->as, 0xeef1, 0x0a10 | (reg_dest << 12));
  438. } else {
  439. goto unknown_op;
  440. }
  441. } else if (op == MP_QSTR_vmov) {
  442. op_code_hi = 0xee00;
  443. mp_uint_t r_arm, vm;
  444. const char *reg_str = get_arg_str(pn_args[0]);
  445. if (reg_str[0] == 'r') {
  446. r_arm = get_arg_reg(emit, op_str, pn_args[0], 15);
  447. vm = get_arg_vfpreg(emit, op_str, pn_args[1]);
  448. op_code_hi |= 0x10;
  449. } else {
  450. vm = get_arg_vfpreg(emit, op_str, pn_args[0]);
  451. r_arm = get_arg_reg(emit, op_str, pn_args[1], 15);
  452. }
  453. asm_thumb_op32(&emit->as,
  454. op_code_hi | ((vm & 0x1e) >> 1),
  455. 0x0a10 | (r_arm << 12) | ((vm & 1) << 7));
  456. } else if (op == MP_QSTR_vldr) {
  457. op_code_hi = 0xed90;
  458. op_vldr_vstr:;
  459. mp_uint_t vd = get_arg_vfpreg(emit, op_str, pn_args[0]);
  460. mp_parse_node_t pn_base, pn_offset;
  461. if (get_arg_addr(emit, op_str, pn_args[1], &pn_base, &pn_offset)) {
  462. mp_uint_t rlo_base = get_arg_reg(emit, op_str, pn_base, 7);
  463. mp_uint_t i8;
  464. i8 = get_arg_i(emit, op_str, pn_offset, 0x3fc) >> 2;
  465. asm_thumb_op32(&emit->as,
  466. op_code_hi | rlo_base | ((vd & 1) << 6),
  467. 0x0a00 | ((vd & 0x1e) << 11) | i8);
  468. }
  469. } else if (op == MP_QSTR_vstr) {
  470. op_code_hi = 0xed80;
  471. goto op_vldr_vstr;
  472. } else {
  473. goto unknown_op;
  474. }
  475. } else if (n_args == 3) {
  476. // search table for arith ops
  477. for (mp_uint_t i = 0; i < MP_ARRAY_SIZE(format_vfp_op_table); i++) {
  478. if (strncmp(op_str + 1, format_vfp_op_table[i].name, 3) == 0 && op_str[4] == '\0') {
  479. mp_uint_t op_code_hi = 0xee00 | (format_vfp_op_table[i].op & 0xf0);
  480. mp_uint_t op_code = 0x0a00 | ((format_vfp_op_table[i].op & 0x0f) << 4);
  481. mp_uint_t vd = get_arg_vfpreg(emit, op_str, pn_args[0]);
  482. mp_uint_t vn = get_arg_vfpreg(emit, op_str, pn_args[1]);
  483. mp_uint_t vm = get_arg_vfpreg(emit, op_str, pn_args[2]);
  484. asm_thumb_op32(&emit->as,
  485. op_code_hi | ((vd & 1) << 6) | (vn >> 1),
  486. op_code | (vm >> 1) | ((vm & 1) << 5) | ((vd & 0x1e) << 11) | ((vn & 1) << 7));
  487. return;
  488. }
  489. }
  490. goto unknown_op;
  491. } else {
  492. goto unknown_op;
  493. }
  494. return;
  495. }
  496. #endif
  497. if (n_args == 0) {
  498. if (op == MP_QSTR_nop) {
  499. asm_thumb_op16(&emit->as, ASM_THUMB_OP_NOP);
  500. } else if (op == MP_QSTR_wfi) {
  501. asm_thumb_op16(&emit->as, ASM_THUMB_OP_WFI);
  502. } else {
  503. goto unknown_op;
  504. }
  505. } else if (n_args == 1) {
  506. if (op == MP_QSTR_b) {
  507. int label_num = get_arg_label(emit, op_str, pn_args[0]);
  508. if (!asm_thumb_b_n_label(&emit->as, label_num)) {
  509. goto branch_not_in_range;
  510. }
  511. } else if (op == MP_QSTR_bl) {
  512. int label_num = get_arg_label(emit, op_str, pn_args[0]);
  513. if (!asm_thumb_bl_label(&emit->as, label_num)) {
  514. goto branch_not_in_range;
  515. }
  516. } else if (op == MP_QSTR_bx) {
  517. mp_uint_t r = get_arg_reg(emit, op_str, pn_args[0], 15);
  518. asm_thumb_op16(&emit->as, 0x4700 | (r << 3));
  519. } else if (op_str[0] == 'b' && (op_len == 3
  520. || (op_len == 5 && op_str[3] == '_'
  521. && (op_str[4] == 'n' || (ARMV7M && op_str[4] == 'w'))))) {
  522. mp_uint_t cc = -1;
  523. for (mp_uint_t i = 0; i < MP_ARRAY_SIZE(cc_name_table); i++) {
  524. if (op_str[1] == cc_name_table[i].name[0] && op_str[2] == cc_name_table[i].name[1]) {
  525. cc = cc_name_table[i].cc;
  526. }
  527. }
  528. if (cc == (mp_uint_t)-1) {
  529. goto unknown_op;
  530. }
  531. int label_num = get_arg_label(emit, op_str, pn_args[0]);
  532. if (!asm_thumb_bcc_nw_label(&emit->as, cc, label_num, op_len == 5 && op_str[4] == 'w')) {
  533. goto branch_not_in_range;
  534. }
  535. } else if (ARMV7M && op_str[0] == 'i' && op_str[1] == 't') {
  536. const char *arg_str = get_arg_str(pn_args[0]);
  537. mp_uint_t cc = -1;
  538. for (mp_uint_t i = 0; i < MP_ARRAY_SIZE(cc_name_table); i++) {
  539. if (arg_str[0] == cc_name_table[i].name[0]
  540. && arg_str[1] == cc_name_table[i].name[1]
  541. && arg_str[2] == '\0') {
  542. cc = cc_name_table[i].cc;
  543. break;
  544. }
  545. }
  546. if (cc == (mp_uint_t)-1) {
  547. goto unknown_op;
  548. }
  549. const char *os = op_str + 2;
  550. while (*os != '\0') {
  551. os++;
  552. }
  553. if (os > op_str + 5) {
  554. goto unknown_op;
  555. }
  556. mp_uint_t it_mask = 8;
  557. while (--os >= op_str + 2) {
  558. it_mask >>= 1;
  559. if (*os == 't') {
  560. it_mask |= (cc & 1) << 3;
  561. } else if (*os == 'e') {
  562. it_mask |= ((~cc) & 1) << 3;
  563. } else {
  564. goto unknown_op;
  565. }
  566. }
  567. asm_thumb_it_cc(&emit->as, cc, it_mask);
  568. } else if (op == MP_QSTR_cpsid) {
  569. // TODO check pn_args[0] == i
  570. asm_thumb_op16(&emit->as, ASM_THUMB_OP_CPSID_I);
  571. } else if (op == MP_QSTR_cpsie) {
  572. // TODO check pn_args[0] == i
  573. asm_thumb_op16(&emit->as, ASM_THUMB_OP_CPSIE_I);
  574. } else if (op == MP_QSTR_push) {
  575. mp_uint_t reglist = get_arg_reglist(emit, op_str, pn_args[0]);
  576. if ((reglist & 0xff00) == 0) {
  577. asm_thumb_op16(&emit->as, 0xb400 | reglist);
  578. } else {
  579. if (!ARMV7M) {
  580. goto unknown_op;
  581. }
  582. asm_thumb_op32(&emit->as, 0xe92d, reglist);
  583. }
  584. } else if (op == MP_QSTR_pop) {
  585. mp_uint_t reglist = get_arg_reglist(emit, op_str, pn_args[0]);
  586. if ((reglist & 0xff00) == 0) {
  587. asm_thumb_op16(&emit->as, 0xbc00 | reglist);
  588. } else {
  589. if (!ARMV7M) {
  590. goto unknown_op;
  591. }
  592. asm_thumb_op32(&emit->as, 0xe8bd, reglist);
  593. }
  594. } else {
  595. goto unknown_op;
  596. }
  597. } else if (n_args == 2) {
  598. if (MP_PARSE_NODE_IS_ID(pn_args[1])) {
  599. // second arg is a register (or should be)
  600. mp_uint_t op_code, op_code_hi;
  601. if (op == MP_QSTR_mov) {
  602. mp_uint_t reg_dest = get_arg_reg(emit, op_str, pn_args[0], 15);
  603. mp_uint_t reg_src = get_arg_reg(emit, op_str, pn_args[1], 15);
  604. asm_thumb_mov_reg_reg(&emit->as, reg_dest, reg_src);
  605. } else if (ARMV7M && op == MP_QSTR_clz) {
  606. op_code_hi = 0xfab0;
  607. op_code = 0xf080;
  608. mp_uint_t rd, rm;
  609. op_clz_rbit:
  610. rd = get_arg_reg(emit, op_str, pn_args[0], 15);
  611. rm = get_arg_reg(emit, op_str, pn_args[1], 15);
  612. asm_thumb_op32(&emit->as, op_code_hi | rm, op_code | (rd << 8) | rm);
  613. } else if (ARMV7M && op == MP_QSTR_rbit) {
  614. op_code_hi = 0xfa90;
  615. op_code = 0xf0a0;
  616. goto op_clz_rbit;
  617. } else if (ARMV7M && op == MP_QSTR_mrs) {
  618. mp_uint_t reg_dest = get_arg_reg(emit, op_str, pn_args[0], 12);
  619. mp_uint_t reg_src = get_arg_special_reg(emit, op_str, pn_args[1]);
  620. asm_thumb_op32(&emit->as, 0xf3ef, 0x8000 | (reg_dest << 8) | reg_src);
  621. } else {
  622. if (op == MP_QSTR_and_) {
  623. op_code = ASM_THUMB_FORMAT_4_AND;
  624. mp_uint_t reg_dest, reg_src;
  625. op_format_4:
  626. reg_dest = get_arg_reg(emit, op_str, pn_args[0], 7);
  627. reg_src = get_arg_reg(emit, op_str, pn_args[1], 7);
  628. asm_thumb_format_4(&emit->as, op_code, reg_dest, reg_src);
  629. return;
  630. }
  631. // search table for ALU ops
  632. for (mp_uint_t i = 0; i < MP_ARRAY_SIZE(format_4_op_table); i++) {
  633. if (strncmp(op_str, format_4_op_table[i].name, 3) == 0 && op_str[3] == '\0') {
  634. op_code = 0x4000 | (format_4_op_table[i].op << 4);
  635. goto op_format_4;
  636. }
  637. }
  638. goto unknown_op;
  639. }
  640. } else {
  641. // second arg is not a register
  642. mp_uint_t op_code;
  643. if (op == MP_QSTR_mov) {
  644. op_code = ASM_THUMB_FORMAT_3_MOV;
  645. mp_uint_t rlo_dest, i8_src;
  646. op_format_3:
  647. rlo_dest = get_arg_reg(emit, op_str, pn_args[0], 7);
  648. i8_src = get_arg_i(emit, op_str, pn_args[1], 0xff);
  649. asm_thumb_format_3(&emit->as, op_code, rlo_dest, i8_src);
  650. } else if (op == MP_QSTR_cmp) {
  651. op_code = ASM_THUMB_FORMAT_3_CMP;
  652. goto op_format_3;
  653. } else if (op == MP_QSTR_add) {
  654. op_code = ASM_THUMB_FORMAT_3_ADD;
  655. goto op_format_3;
  656. } else if (op == MP_QSTR_sub) {
  657. op_code = ASM_THUMB_FORMAT_3_SUB;
  658. goto op_format_3;
  659. } else if (ARMV7M && op == MP_QSTR_movw) {
  660. op_code = ASM_THUMB_OP_MOVW;
  661. mp_uint_t reg_dest;
  662. op_movw_movt:
  663. reg_dest = get_arg_reg(emit, op_str, pn_args[0], 15);
  664. int i_src = get_arg_i(emit, op_str, pn_args[1], 0xffff);
  665. asm_thumb_mov_reg_i16(&emit->as, op_code, reg_dest, i_src);
  666. } else if (ARMV7M && op == MP_QSTR_movt) {
  667. op_code = ASM_THUMB_OP_MOVT;
  668. goto op_movw_movt;
  669. } else if (ARMV7M && op == MP_QSTR_movwt) {
  670. // this is a convenience instruction
  671. mp_uint_t reg_dest = get_arg_reg(emit, op_str, pn_args[0], 15);
  672. uint32_t i_src = get_arg_i(emit, op_str, pn_args[1], 0xffffffff);
  673. asm_thumb_mov_reg_i16(&emit->as, ASM_THUMB_OP_MOVW, reg_dest, i_src & 0xffff);
  674. asm_thumb_mov_reg_i16(&emit->as, ASM_THUMB_OP_MOVT, reg_dest, (i_src >> 16) & 0xffff);
  675. } else if (ARMV7M && op == MP_QSTR_ldrex) {
  676. mp_uint_t r_dest = get_arg_reg(emit, op_str, pn_args[0], 15);
  677. mp_parse_node_t pn_base, pn_offset;
  678. if (get_arg_addr(emit, op_str, pn_args[1], &pn_base, &pn_offset)) {
  679. mp_uint_t r_base = get_arg_reg(emit, op_str, pn_base, 15);
  680. mp_uint_t i8 = get_arg_i(emit, op_str, pn_offset, 0xff) >> 2;
  681. asm_thumb_op32(&emit->as, 0xe850 | r_base, 0x0f00 | (r_dest << 12) | i8);
  682. }
  683. } else {
  684. // search table for ldr/str instructions
  685. for (mp_uint_t i = 0; i < MP_ARRAY_SIZE(format_9_10_op_table); i++) {
  686. if (op == format_9_10_op_table[i].name) {
  687. op_code = format_9_10_op_table[i].op;
  688. mp_parse_node_t pn_base, pn_offset;
  689. mp_uint_t rlo_dest = get_arg_reg(emit, op_str, pn_args[0], 7);
  690. if (get_arg_addr(emit, op_str, pn_args[1], &pn_base, &pn_offset)) {
  691. mp_uint_t rlo_base = get_arg_reg(emit, op_str, pn_base, 7);
  692. mp_uint_t i5;
  693. if (op_code & ASM_THUMB_FORMAT_9_BYTE_TRANSFER) {
  694. i5 = get_arg_i(emit, op_str, pn_offset, 0x1f);
  695. } else if (op_code & ASM_THUMB_FORMAT_10_STRH) { // also catches LDRH
  696. i5 = get_arg_i(emit, op_str, pn_offset, 0x3e) >> 1;
  697. } else {
  698. i5 = get_arg_i(emit, op_str, pn_offset, 0x7c) >> 2;
  699. }
  700. asm_thumb_format_9_10(&emit->as, op_code, rlo_dest, rlo_base, i5);
  701. return;
  702. }
  703. break;
  704. }
  705. }
  706. goto unknown_op;
  707. }
  708. }
  709. } else if (n_args == 3) {
  710. mp_uint_t op_code;
  711. if (op == MP_QSTR_lsl) {
  712. op_code = ASM_THUMB_FORMAT_1_LSL;
  713. mp_uint_t rlo_dest, rlo_src, i5;
  714. op_format_1:
  715. rlo_dest = get_arg_reg(emit, op_str, pn_args[0], 7);
  716. rlo_src = get_arg_reg(emit, op_str, pn_args[1], 7);
  717. i5 = get_arg_i(emit, op_str, pn_args[2], 0x1f);
  718. asm_thumb_format_1(&emit->as, op_code, rlo_dest, rlo_src, i5);
  719. } else if (op == MP_QSTR_lsr) {
  720. op_code = ASM_THUMB_FORMAT_1_LSR;
  721. goto op_format_1;
  722. } else if (op == MP_QSTR_asr) {
  723. op_code = ASM_THUMB_FORMAT_1_ASR;
  724. goto op_format_1;
  725. } else if (op == MP_QSTR_add) {
  726. op_code = ASM_THUMB_FORMAT_2_ADD;
  727. mp_uint_t rlo_dest, rlo_src;
  728. op_format_2:
  729. rlo_dest = get_arg_reg(emit, op_str, pn_args[0], 7);
  730. rlo_src = get_arg_reg(emit, op_str, pn_args[1], 7);
  731. int src_b;
  732. if (MP_PARSE_NODE_IS_ID(pn_args[2])) {
  733. op_code |= ASM_THUMB_FORMAT_2_REG_OPERAND;
  734. src_b = get_arg_reg(emit, op_str, pn_args[2], 7);
  735. } else {
  736. op_code |= ASM_THUMB_FORMAT_2_IMM_OPERAND;
  737. src_b = get_arg_i(emit, op_str, pn_args[2], 0x7);
  738. }
  739. asm_thumb_format_2(&emit->as, op_code, rlo_dest, rlo_src, src_b);
  740. } else if (ARMV7M && op == MP_QSTR_sdiv) {
  741. op_code = 0xfb90; // sdiv high part
  742. mp_uint_t rd, rn, rm;
  743. op_sdiv_udiv:
  744. rd = get_arg_reg(emit, op_str, pn_args[0], 15);
  745. rn = get_arg_reg(emit, op_str, pn_args[1], 15);
  746. rm = get_arg_reg(emit, op_str, pn_args[2], 15);
  747. asm_thumb_op32(&emit->as, op_code | rn, 0xf0f0 | (rd << 8) | rm);
  748. } else if (ARMV7M && op == MP_QSTR_udiv) {
  749. op_code = 0xfbb0; // udiv high part
  750. goto op_sdiv_udiv;
  751. } else if (op == MP_QSTR_sub) {
  752. op_code = ASM_THUMB_FORMAT_2_SUB;
  753. goto op_format_2;
  754. } else if (ARMV7M && op == MP_QSTR_strex) {
  755. mp_uint_t r_dest = get_arg_reg(emit, op_str, pn_args[0], 15);
  756. mp_uint_t r_src = get_arg_reg(emit, op_str, pn_args[1], 15);
  757. mp_parse_node_t pn_base, pn_offset;
  758. if (get_arg_addr(emit, op_str, pn_args[2], &pn_base, &pn_offset)) {
  759. mp_uint_t r_base = get_arg_reg(emit, op_str, pn_base, 15);
  760. mp_uint_t i8 = get_arg_i(emit, op_str, pn_offset, 0xff) >> 2;
  761. asm_thumb_op32(&emit->as, 0xe840 | r_base, (r_src << 12) | (r_dest << 8) | i8);
  762. }
  763. } else {
  764. goto unknown_op;
  765. }
  766. } else {
  767. goto unknown_op;
  768. }
  769. return;
  770. unknown_op:
  771. emit_inline_thumb_error_exc(emit, mp_obj_new_exception_msg_varg(&mp_type_SyntaxError, MP_ERROR_TEXT("unsupported Thumb instruction '%s' with %d arguments"), op_str, n_args));
  772. return;
  773. branch_not_in_range:
  774. emit_inline_thumb_error_msg(emit, MP_ERROR_TEXT("branch not in range"));
  775. return;
  776. }
  777. const emit_inline_asm_method_table_t emit_inline_thumb_method_table = {
  778. #if MICROPY_DYNAMIC_COMPILER
  779. emit_inline_thumb_new,
  780. emit_inline_thumb_free,
  781. #endif
  782. emit_inline_thumb_start_pass,
  783. emit_inline_thumb_end_pass,
  784. emit_inline_thumb_count_params,
  785. emit_inline_thumb_label,
  786. emit_inline_thumb_op,
  787. };
  788. #endif // MICROPY_EMIT_INLINE_THUMB