objfun.c 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538
  1. /*
  2. * This file is part of the MicroPython project, http://micropython.org/
  3. *
  4. * The MIT License (MIT)
  5. *
  6. * Copyright (c) 2013, 2014 Damien P. George
  7. * Copyright (c) 2014 Paul Sokolovsky
  8. *
  9. * Permission is hereby granted, free of charge, to any person obtaining a copy
  10. * of this software and associated documentation files (the "Software"), to deal
  11. * in the Software without restriction, including without limitation the rights
  12. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  13. * copies of the Software, and to permit persons to whom the Software is
  14. * furnished to do so, subject to the following conditions:
  15. *
  16. * The above copyright notice and this permission notice shall be included in
  17. * all copies or substantial portions of the Software.
  18. *
  19. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  20. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  21. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  22. * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  23. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  24. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  25. * THE SOFTWARE.
  26. */
  27. #include <string.h>
  28. #include <assert.h>
  29. #include "py/objtuple.h"
  30. #include "py/objfun.h"
  31. #include "py/runtime.h"
  32. #include "py/bc.h"
  33. #include "py/stackctrl.h"
  34. #if MICROPY_DEBUG_VERBOSE // print debugging info
  35. #define DEBUG_PRINT (1)
  36. #else // don't print debugging info
  37. #define DEBUG_PRINT (0)
  38. #define DEBUG_printf(...) (void)0
  39. #endif
  40. // Note: the "name" entry in mp_obj_type_t for a function type must be
  41. // MP_QSTR_function because it is used to determine if an object is of generic
  42. // function type.
  43. /******************************************************************************/
  44. /* builtin functions */
  45. STATIC mp_obj_t fun_builtin_0_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
  46. (void)args;
  47. assert(mp_obj_is_type(self_in, &mp_type_fun_builtin_0));
  48. mp_obj_fun_builtin_fixed_t *self = MP_OBJ_TO_PTR(self_in);
  49. mp_arg_check_num(n_args, n_kw, 0, 0, false);
  50. return self->fun._0();
  51. }
  52. const mp_obj_type_t mp_type_fun_builtin_0 = {
  53. { &mp_type_type },
  54. .flags = MP_TYPE_FLAG_BINDS_SELF | MP_TYPE_FLAG_BUILTIN_FUN,
  55. .name = MP_QSTR_function,
  56. .call = fun_builtin_0_call,
  57. .unary_op = mp_generic_unary_op,
  58. };
  59. STATIC mp_obj_t fun_builtin_1_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
  60. assert(mp_obj_is_type(self_in, &mp_type_fun_builtin_1));
  61. mp_obj_fun_builtin_fixed_t *self = MP_OBJ_TO_PTR(self_in);
  62. mp_arg_check_num(n_args, n_kw, 1, 1, false);
  63. return self->fun._1(args[0]);
  64. }
  65. const mp_obj_type_t mp_type_fun_builtin_1 = {
  66. { &mp_type_type },
  67. .flags = MP_TYPE_FLAG_BINDS_SELF | MP_TYPE_FLAG_BUILTIN_FUN,
  68. .name = MP_QSTR_function,
  69. .call = fun_builtin_1_call,
  70. .unary_op = mp_generic_unary_op,
  71. };
  72. STATIC mp_obj_t fun_builtin_2_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
  73. assert(mp_obj_is_type(self_in, &mp_type_fun_builtin_2));
  74. mp_obj_fun_builtin_fixed_t *self = MP_OBJ_TO_PTR(self_in);
  75. mp_arg_check_num(n_args, n_kw, 2, 2, false);
  76. return self->fun._2(args[0], args[1]);
  77. }
  78. const mp_obj_type_t mp_type_fun_builtin_2 = {
  79. { &mp_type_type },
  80. .flags = MP_TYPE_FLAG_BINDS_SELF | MP_TYPE_FLAG_BUILTIN_FUN,
  81. .name = MP_QSTR_function,
  82. .call = fun_builtin_2_call,
  83. .unary_op = mp_generic_unary_op,
  84. };
  85. STATIC mp_obj_t fun_builtin_3_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
  86. assert(mp_obj_is_type(self_in, &mp_type_fun_builtin_3));
  87. mp_obj_fun_builtin_fixed_t *self = MP_OBJ_TO_PTR(self_in);
  88. mp_arg_check_num(n_args, n_kw, 3, 3, false);
  89. return self->fun._3(args[0], args[1], args[2]);
  90. }
  91. const mp_obj_type_t mp_type_fun_builtin_3 = {
  92. { &mp_type_type },
  93. .flags = MP_TYPE_FLAG_BINDS_SELF | MP_TYPE_FLAG_BUILTIN_FUN,
  94. .name = MP_QSTR_function,
  95. .call = fun_builtin_3_call,
  96. .unary_op = mp_generic_unary_op,
  97. };
  98. STATIC mp_obj_t fun_builtin_var_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
  99. assert(mp_obj_is_type(self_in, &mp_type_fun_builtin_var));
  100. mp_obj_fun_builtin_var_t *self = MP_OBJ_TO_PTR(self_in);
  101. // check number of arguments
  102. mp_arg_check_num_sig(n_args, n_kw, self->sig);
  103. if (self->sig & 1) {
  104. // function allows keywords
  105. // we create a map directly from the given args array
  106. mp_map_t kw_args;
  107. mp_map_init_fixed_table(&kw_args, n_kw, args + n_args);
  108. return self->fun.kw(n_args, args, &kw_args);
  109. } else {
  110. // function takes a variable number of arguments, but no keywords
  111. return self->fun.var(n_args, args);
  112. }
  113. }
  114. const mp_obj_type_t mp_type_fun_builtin_var = {
  115. { &mp_type_type },
  116. .flags = MP_TYPE_FLAG_BINDS_SELF | MP_TYPE_FLAG_BUILTIN_FUN,
  117. .name = MP_QSTR_function,
  118. .call = fun_builtin_var_call,
  119. .unary_op = mp_generic_unary_op,
  120. };
  121. /******************************************************************************/
  122. /* byte code functions */
  123. qstr mp_obj_code_get_name(const byte *code_info) {
  124. MP_BC_PRELUDE_SIZE_DECODE(code_info);
  125. #if MICROPY_PERSISTENT_CODE
  126. return code_info[0] | (code_info[1] << 8);
  127. #else
  128. return mp_decode_uint_value(code_info);
  129. #endif
  130. }
  131. #if MICROPY_EMIT_NATIVE
  132. STATIC const mp_obj_type_t mp_type_fun_native;
  133. #endif
  134. qstr mp_obj_fun_get_name(mp_const_obj_t fun_in) {
  135. const mp_obj_fun_bc_t *fun = MP_OBJ_TO_PTR(fun_in);
  136. #if MICROPY_EMIT_NATIVE
  137. if (fun->base.type == &mp_type_fun_native || fun->base.type == &mp_type_native_gen_wrap) {
  138. // TODO native functions don't have name stored
  139. return MP_QSTR_;
  140. }
  141. #endif
  142. const byte *bc = fun->bytecode;
  143. MP_BC_PRELUDE_SIG_DECODE(bc);
  144. return mp_obj_code_get_name(bc);
  145. }
  146. #if MICROPY_CPYTHON_COMPAT
  147. STATIC void fun_bc_print(const mp_print_t *print, mp_obj_t o_in, mp_print_kind_t kind) {
  148. (void)kind;
  149. mp_obj_fun_bc_t *o = MP_OBJ_TO_PTR(o_in);
  150. mp_printf(print, "<function %q at 0x%p>", mp_obj_fun_get_name(o_in), o);
  151. }
  152. #endif
  153. #if DEBUG_PRINT
  154. STATIC void dump_args(const mp_obj_t *a, size_t sz) {
  155. DEBUG_printf("%p: ", a);
  156. for (size_t i = 0; i < sz; i++) {
  157. DEBUG_printf("%p ", a[i]);
  158. }
  159. DEBUG_printf("\n");
  160. }
  161. #else
  162. #define dump_args(...) (void)0
  163. #endif
  164. // With this macro you can tune the maximum number of function state bytes
  165. // that will be allocated on the stack. Any function that needs more
  166. // than this will try to use the heap, with fallback to stack allocation.
  167. #define VM_MAX_STATE_ON_STACK (sizeof(mp_uint_t) * 11)
  168. #define DECODE_CODESTATE_SIZE(bytecode, n_state_out_var, state_size_out_var) \
  169. { \
  170. const uint8_t *ip = bytecode; \
  171. size_t n_exc_stack, scope_flags, n_pos_args, n_kwonly_args, n_def_args; \
  172. MP_BC_PRELUDE_SIG_DECODE_INTO(ip, n_state_out_var, n_exc_stack, scope_flags, n_pos_args, n_kwonly_args, n_def_args); \
  173. \
  174. /* state size in bytes */ \
  175. state_size_out_var = n_state_out_var * sizeof(mp_obj_t) \
  176. + n_exc_stack * sizeof(mp_exc_stack_t); \
  177. }
  178. #define INIT_CODESTATE(code_state, _fun_bc, _n_state, n_args, n_kw, args) \
  179. code_state->fun_bc = _fun_bc; \
  180. code_state->ip = 0; \
  181. code_state->n_state = _n_state; \
  182. mp_setup_code_state(code_state, n_args, n_kw, args); \
  183. code_state->old_globals = mp_globals_get();
  184. #if MICROPY_STACKLESS
  185. mp_code_state_t *mp_obj_fun_bc_prepare_codestate(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
  186. MP_STACK_CHECK();
  187. mp_obj_fun_bc_t *self = MP_OBJ_TO_PTR(self_in);
  188. size_t n_state, state_size;
  189. DECODE_CODESTATE_SIZE(self->bytecode, n_state, state_size);
  190. mp_code_state_t *code_state;
  191. #if MICROPY_ENABLE_PYSTACK
  192. code_state = mp_pystack_alloc(sizeof(mp_code_state_t) + state_size);
  193. #else
  194. // If we use m_new_obj_var(), then on no memory, MemoryError will be
  195. // raised. But this is not correct exception for a function call,
  196. // RuntimeError should be raised instead. So, we use m_new_obj_var_maybe(),
  197. // return NULL, then vm.c takes the needed action (either raise
  198. // RuntimeError or fallback to stack allocation).
  199. code_state = m_new_obj_var_maybe(mp_code_state_t, byte, state_size);
  200. if (!code_state) {
  201. return NULL;
  202. }
  203. #endif
  204. INIT_CODESTATE(code_state, self, n_state, n_args, n_kw, args);
  205. // execute the byte code with the correct globals context
  206. mp_globals_set(self->globals);
  207. return code_state;
  208. }
  209. #endif
  210. STATIC mp_obj_t fun_bc_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
  211. MP_STACK_CHECK();
  212. DEBUG_printf("Input n_args: " UINT_FMT ", n_kw: " UINT_FMT "\n", n_args, n_kw);
  213. DEBUG_printf("Input pos args: ");
  214. dump_args(args, n_args);
  215. DEBUG_printf("Input kw args: ");
  216. dump_args(args + n_args, n_kw * 2);
  217. mp_obj_fun_bc_t *self = MP_OBJ_TO_PTR(self_in);
  218. size_t n_state, state_size;
  219. DECODE_CODESTATE_SIZE(self->bytecode, n_state, state_size);
  220. // allocate state for locals and stack
  221. mp_code_state_t *code_state = NULL;
  222. #if MICROPY_ENABLE_PYSTACK
  223. code_state = mp_pystack_alloc(sizeof(mp_code_state_t) + state_size);
  224. #else
  225. if (state_size > VM_MAX_STATE_ON_STACK) {
  226. code_state = m_new_obj_var_maybe(mp_code_state_t, byte, state_size);
  227. #if MICROPY_DEBUG_VM_STACK_OVERFLOW
  228. if (code_state != NULL) {
  229. memset(code_state->state, 0, state_size);
  230. }
  231. #endif
  232. }
  233. if (code_state == NULL) {
  234. code_state = alloca(sizeof(mp_code_state_t) + state_size);
  235. #if MICROPY_DEBUG_VM_STACK_OVERFLOW
  236. memset(code_state->state, 0, state_size);
  237. #endif
  238. state_size = 0; // indicate that we allocated using alloca
  239. }
  240. #endif
  241. INIT_CODESTATE(code_state, self, n_state, n_args, n_kw, args);
  242. // execute the byte code with the correct globals context
  243. mp_globals_set(self->globals);
  244. mp_vm_return_kind_t vm_return_kind = mp_execute_bytecode(code_state, MP_OBJ_NULL);
  245. mp_globals_set(code_state->old_globals);
  246. #if MICROPY_DEBUG_VM_STACK_OVERFLOW
  247. if (vm_return_kind == MP_VM_RETURN_NORMAL) {
  248. if (code_state->sp < code_state->state) {
  249. mp_printf(MICROPY_DEBUG_PRINTER, "VM stack underflow: " INT_FMT "\n", code_state->sp - code_state->state);
  250. assert(0);
  251. }
  252. }
  253. const byte *bytecode_ptr = self->bytecode;
  254. size_t n_state_unused, n_exc_stack_unused, scope_flags_unused;
  255. size_t n_pos_args, n_kwonly_args, n_def_args_unused;
  256. MP_BC_PRELUDE_SIG_DECODE_INTO(bytecode_ptr, n_state_unused, n_exc_stack_unused,
  257. scope_flags_unused, n_pos_args, n_kwonly_args, n_def_args_unused);
  258. // We can't check the case when an exception is returned in state[0]
  259. // and there are no arguments, because in this case our detection slot may have
  260. // been overwritten by the returned exception (which is allowed).
  261. if (!(vm_return_kind == MP_VM_RETURN_EXCEPTION && n_pos_args + n_kwonly_args == 0)) {
  262. // Just check to see that we have at least 1 null object left in the state.
  263. bool overflow = true;
  264. for (size_t i = 0; i < n_state - n_pos_args - n_kwonly_args; ++i) {
  265. if (code_state->state[i] == MP_OBJ_NULL) {
  266. overflow = false;
  267. break;
  268. }
  269. }
  270. if (overflow) {
  271. mp_printf(MICROPY_DEBUG_PRINTER, "VM stack overflow state=%p n_state+1=" UINT_FMT "\n", code_state->state, n_state);
  272. assert(0);
  273. }
  274. }
  275. #endif
  276. mp_obj_t result;
  277. if (vm_return_kind == MP_VM_RETURN_NORMAL) {
  278. // return value is in *sp
  279. result = *code_state->sp;
  280. } else {
  281. // must be an exception because normal functions can't yield
  282. assert(vm_return_kind == MP_VM_RETURN_EXCEPTION);
  283. // returned exception is in state[0]
  284. result = code_state->state[0];
  285. }
  286. #if MICROPY_ENABLE_PYSTACK
  287. mp_pystack_free(code_state);
  288. #else
  289. // free the state if it was allocated on the heap
  290. if (state_size != 0) {
  291. m_del_var(mp_code_state_t, byte, state_size, code_state);
  292. }
  293. #endif
  294. if (vm_return_kind == MP_VM_RETURN_NORMAL) {
  295. return result;
  296. } else { // MP_VM_RETURN_EXCEPTION
  297. nlr_raise(result);
  298. }
  299. }
  300. #if MICROPY_PY_FUNCTION_ATTRS
  301. void mp_obj_fun_bc_attr(mp_obj_t self_in, qstr attr, mp_obj_t *dest) {
  302. if (dest[0] != MP_OBJ_NULL) {
  303. // not load attribute
  304. return;
  305. }
  306. if (attr == MP_QSTR___name__) {
  307. dest[0] = MP_OBJ_NEW_QSTR(mp_obj_fun_get_name(self_in));
  308. }
  309. }
  310. #endif
  311. const mp_obj_type_t mp_type_fun_bc = {
  312. { &mp_type_type },
  313. .flags = MP_TYPE_FLAG_BINDS_SELF,
  314. .name = MP_QSTR_function,
  315. #if MICROPY_CPYTHON_COMPAT
  316. .print = fun_bc_print,
  317. #endif
  318. .call = fun_bc_call,
  319. .unary_op = mp_generic_unary_op,
  320. #if MICROPY_PY_FUNCTION_ATTRS
  321. .attr = mp_obj_fun_bc_attr,
  322. #endif
  323. };
  324. mp_obj_t mp_obj_new_fun_bc(mp_obj_t def_args_in, mp_obj_t def_kw_args, const byte *code, const mp_uint_t *const_table) {
  325. size_t n_def_args = 0;
  326. size_t n_extra_args = 0;
  327. mp_obj_tuple_t *def_args = MP_OBJ_TO_PTR(def_args_in);
  328. if (def_args_in != MP_OBJ_NULL) {
  329. assert(mp_obj_is_type(def_args_in, &mp_type_tuple));
  330. n_def_args = def_args->len;
  331. n_extra_args = def_args->len;
  332. }
  333. if (def_kw_args != MP_OBJ_NULL) {
  334. n_extra_args += 1;
  335. }
  336. mp_obj_fun_bc_t *o = m_new_obj_var(mp_obj_fun_bc_t, mp_obj_t, n_extra_args);
  337. o->base.type = &mp_type_fun_bc;
  338. o->globals = mp_globals_get();
  339. o->bytecode = code;
  340. o->const_table = const_table;
  341. if (def_args != NULL) {
  342. memcpy(o->extra_args, def_args->items, n_def_args * sizeof(mp_obj_t));
  343. }
  344. if (def_kw_args != MP_OBJ_NULL) {
  345. o->extra_args[n_def_args] = def_kw_args;
  346. }
  347. return MP_OBJ_FROM_PTR(o);
  348. }
  349. /******************************************************************************/
  350. /* native functions */
  351. #if MICROPY_EMIT_NATIVE
  352. STATIC mp_obj_t fun_native_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
  353. MP_STACK_CHECK();
  354. mp_obj_fun_bc_t *self = self_in;
  355. mp_call_fun_t fun = MICROPY_MAKE_POINTER_CALLABLE((void *)self->bytecode);
  356. return fun(self_in, n_args, n_kw, args);
  357. }
  358. STATIC const mp_obj_type_t mp_type_fun_native = {
  359. { &mp_type_type },
  360. .flags = MP_TYPE_FLAG_BINDS_SELF,
  361. .name = MP_QSTR_function,
  362. .call = fun_native_call,
  363. .unary_op = mp_generic_unary_op,
  364. };
  365. mp_obj_t mp_obj_new_fun_native(mp_obj_t def_args_in, mp_obj_t def_kw_args, const void *fun_data, const mp_uint_t *const_table) {
  366. mp_obj_fun_bc_t *o = mp_obj_new_fun_bc(def_args_in, def_kw_args, (const byte *)fun_data, const_table);
  367. o->base.type = &mp_type_fun_native;
  368. return o;
  369. }
  370. #endif // MICROPY_EMIT_NATIVE
  371. /******************************************************************************/
  372. /* inline assembler functions */
  373. #if MICROPY_EMIT_INLINE_ASM
  374. typedef struct _mp_obj_fun_asm_t {
  375. mp_obj_base_t base;
  376. size_t n_args;
  377. const void *fun_data; // GC must be able to trace this pointer
  378. mp_uint_t type_sig;
  379. } mp_obj_fun_asm_t;
  380. typedef mp_uint_t (*inline_asm_fun_0_t)(void);
  381. typedef mp_uint_t (*inline_asm_fun_1_t)(mp_uint_t);
  382. typedef mp_uint_t (*inline_asm_fun_2_t)(mp_uint_t, mp_uint_t);
  383. typedef mp_uint_t (*inline_asm_fun_3_t)(mp_uint_t, mp_uint_t, mp_uint_t);
  384. typedef mp_uint_t (*inline_asm_fun_4_t)(mp_uint_t, mp_uint_t, mp_uint_t, mp_uint_t);
  385. // convert a MicroPython object to a sensible value for inline asm
  386. STATIC mp_uint_t convert_obj_for_inline_asm(mp_obj_t obj) {
  387. // TODO for byte_array, pass pointer to the array
  388. if (mp_obj_is_small_int(obj)) {
  389. return MP_OBJ_SMALL_INT_VALUE(obj);
  390. } else if (obj == mp_const_none) {
  391. return 0;
  392. } else if (obj == mp_const_false) {
  393. return 0;
  394. } else if (obj == mp_const_true) {
  395. return 1;
  396. } else if (mp_obj_is_type(obj, &mp_type_int)) {
  397. return mp_obj_int_get_truncated(obj);
  398. } else if (mp_obj_is_str(obj)) {
  399. // pointer to the string (it's probably constant though!)
  400. size_t l;
  401. return (mp_uint_t)mp_obj_str_get_data(obj, &l);
  402. } else {
  403. const mp_obj_type_t *type = mp_obj_get_type(obj);
  404. #if MICROPY_PY_BUILTINS_FLOAT
  405. if (type == &mp_type_float) {
  406. // convert float to int (could also pass in float registers)
  407. return (mp_int_t)mp_obj_float_get(obj);
  408. }
  409. #endif
  410. if (type == &mp_type_tuple || type == &mp_type_list) {
  411. // pointer to start of tuple (could pass length, but then could use len(x) for that)
  412. size_t len;
  413. mp_obj_t *items;
  414. mp_obj_get_array(obj, &len, &items);
  415. return (mp_uint_t)items;
  416. } else {
  417. mp_buffer_info_t bufinfo;
  418. if (mp_get_buffer(obj, &bufinfo, MP_BUFFER_READ)) {
  419. // supports the buffer protocol, return a pointer to the data
  420. return (mp_uint_t)bufinfo.buf;
  421. } else {
  422. // just pass along a pointer to the object
  423. return (mp_uint_t)obj;
  424. }
  425. }
  426. }
  427. }
  428. STATIC mp_obj_t fun_asm_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
  429. mp_obj_fun_asm_t *self = self_in;
  430. mp_arg_check_num(n_args, n_kw, self->n_args, self->n_args, false);
  431. const void *fun = MICROPY_MAKE_POINTER_CALLABLE(self->fun_data);
  432. mp_uint_t ret;
  433. if (n_args == 0) {
  434. ret = ((inline_asm_fun_0_t)fun)();
  435. } else if (n_args == 1) {
  436. ret = ((inline_asm_fun_1_t)fun)(convert_obj_for_inline_asm(args[0]));
  437. } else if (n_args == 2) {
  438. ret = ((inline_asm_fun_2_t)fun)(convert_obj_for_inline_asm(args[0]), convert_obj_for_inline_asm(args[1]));
  439. } else if (n_args == 3) {
  440. ret = ((inline_asm_fun_3_t)fun)(convert_obj_for_inline_asm(args[0]), convert_obj_for_inline_asm(args[1]), convert_obj_for_inline_asm(args[2]));
  441. } else {
  442. // compiler allows at most 4 arguments
  443. assert(n_args == 4);
  444. ret = ((inline_asm_fun_4_t)fun)(
  445. convert_obj_for_inline_asm(args[0]),
  446. convert_obj_for_inline_asm(args[1]),
  447. convert_obj_for_inline_asm(args[2]),
  448. convert_obj_for_inline_asm(args[3])
  449. );
  450. }
  451. return mp_native_to_obj(ret, self->type_sig);
  452. }
  453. STATIC const mp_obj_type_t mp_type_fun_asm = {
  454. { &mp_type_type },
  455. .flags = MP_TYPE_FLAG_BINDS_SELF,
  456. .name = MP_QSTR_function,
  457. .call = fun_asm_call,
  458. .unary_op = mp_generic_unary_op,
  459. };
  460. mp_obj_t mp_obj_new_fun_asm(size_t n_args, const void *fun_data, mp_uint_t type_sig) {
  461. mp_obj_fun_asm_t *o = m_new_obj(mp_obj_fun_asm_t);
  462. o->base.type = &mp_type_fun_asm;
  463. o->n_args = n_args;
  464. o->fun_data = fun_data;
  465. o->type_sig = type_sig;
  466. return o;
  467. }
  468. #endif // MICROPY_EMIT_INLINE_ASM