vm.c 61 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459
  1. /*
  2. * This file is part of the MicroPython project, http://micropython.org/
  3. *
  4. * The MIT License (MIT)
  5. *
  6. * Copyright (c) 2013, 2014 Damien P. George
  7. * Copyright (c) 2014 Paul Sokolovsky
  8. *
  9. * Permission is hereby granted, free of charge, to any person obtaining a copy
  10. * of this software and associated documentation files (the "Software"), to deal
  11. * in the Software without restriction, including without limitation the rights
  12. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  13. * copies of the Software, and to permit persons to whom the Software is
  14. * furnished to do so, subject to the following conditions:
  15. *
  16. * The above copyright notice and this permission notice shall be included in
  17. * all copies or substantial portions of the Software.
  18. *
  19. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  20. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  21. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  22. * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  23. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  24. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  25. * THE SOFTWARE.
  26. */
  27. #include <stdio.h>
  28. #include <string.h>
  29. #include <assert.h>
  30. #include "py/emitglue.h"
  31. #include "py/objtype.h"
  32. #include "py/runtime.h"
  33. #include "py/bc0.h"
  34. #include "py/bc.h"
  35. #if 0
  36. #define TRACE(ip) printf("sp=%d ", (int)(sp - &code_state->state[0] + 1)); mp_bytecode_print2(ip, 1, code_state->fun_bc->const_table);
  37. #else
  38. #define TRACE(ip)
  39. #endif
  40. // Value stack grows up (this makes it incompatible with native C stack, but
  41. // makes sure that arguments to functions are in natural order arg1..argN
  42. // (Python semantics mandates left-to-right evaluation order, including for
  43. // function arguments). Stack pointer is pre-incremented and points at the
  44. // top element.
  45. // Exception stack also grows up, top element is also pointed at.
  46. // Exception stack unwind reasons (WHY_* in CPython-speak)
  47. // TODO perhaps compress this to RETURN=0, JUMP>0, with number of unwinds
  48. // left to do encoded in the JUMP number
  49. typedef enum {
  50. UNWIND_RETURN = 1,
  51. UNWIND_JUMP,
  52. } mp_unwind_reason_t;
  53. #define DECODE_UINT \
  54. mp_uint_t unum = 0; \
  55. do { \
  56. unum = (unum << 7) + (*ip & 0x7f); \
  57. } while ((*ip++ & 0x80) != 0)
  58. #define DECODE_ULABEL size_t ulab = (ip[0] | (ip[1] << 8)); ip += 2
  59. #define DECODE_SLABEL size_t slab = (ip[0] | (ip[1] << 8)) - 0x8000; ip += 2
  60. #if MICROPY_PERSISTENT_CODE
  61. #define DECODE_QSTR \
  62. qstr qst = ip[0] | ip[1] << 8; \
  63. ip += 2;
  64. #define DECODE_PTR \
  65. DECODE_UINT; \
  66. void *ptr = (void*)(uintptr_t)code_state->fun_bc->const_table[unum]
  67. #define DECODE_OBJ \
  68. DECODE_UINT; \
  69. mp_obj_t obj = (mp_obj_t)code_state->fun_bc->const_table[unum]
  70. #else
  71. #define DECODE_QSTR qstr qst = 0; \
  72. do { \
  73. qst = (qst << 7) + (*ip & 0x7f); \
  74. } while ((*ip++ & 0x80) != 0)
  75. #define DECODE_PTR \
  76. ip = (byte*)MP_ALIGN(ip, sizeof(void*)); \
  77. void *ptr = *(void**)ip; \
  78. ip += sizeof(void*)
  79. #define DECODE_OBJ \
  80. ip = (byte*)MP_ALIGN(ip, sizeof(mp_obj_t)); \
  81. mp_obj_t obj = *(mp_obj_t*)ip; \
  82. ip += sizeof(mp_obj_t)
  83. #endif
  84. #define PUSH(val) *++sp = (val)
  85. #define POP() (*sp--)
  86. #define TOP() (*sp)
  87. #define SET_TOP(val) *sp = (val)
  88. #if MICROPY_PY_SYS_EXC_INFO
  89. #define CLEAR_SYS_EXC_INFO() MP_STATE_VM(cur_exception) = NULL;
  90. #else
  91. #define CLEAR_SYS_EXC_INFO()
  92. #endif
  93. #define PUSH_EXC_BLOCK(with_or_finally) do { \
  94. DECODE_ULABEL; /* except labels are always forward */ \
  95. ++exc_sp; \
  96. exc_sp->handler = ip + ulab; \
  97. exc_sp->val_sp = MP_TAGPTR_MAKE(sp, ((with_or_finally) << 1) | currently_in_except_block); \
  98. exc_sp->prev_exc = NULL; \
  99. currently_in_except_block = 0; /* in a try block now */ \
  100. } while (0)
  101. #define POP_EXC_BLOCK() \
  102. currently_in_except_block = MP_TAGPTR_TAG0(exc_sp->val_sp); /* restore previous state */ \
  103. exc_sp--; /* pop back to previous exception handler */ \
  104. CLEAR_SYS_EXC_INFO() /* just clear sys.exc_info(), not compliant, but it shouldn't be used in 1st place */
  105. // fastn has items in reverse order (fastn[0] is local[0], fastn[-1] is local[1], etc)
  106. // sp points to bottom of stack which grows up
  107. // returns:
  108. // MP_VM_RETURN_NORMAL, sp valid, return value in *sp
  109. // MP_VM_RETURN_YIELD, ip, sp valid, yielded value in *sp
  110. // MP_VM_RETURN_EXCEPTION, exception in fastn[0]
  111. mp_vm_return_kind_t mp_execute_bytecode(mp_code_state_t *code_state, volatile mp_obj_t inject_exc) {
  112. #define SELECTIVE_EXC_IP (0)
  113. #if SELECTIVE_EXC_IP
  114. #define MARK_EXC_IP_SELECTIVE() { code_state->ip = ip; } /* stores ip 1 byte past last opcode */
  115. #define MARK_EXC_IP_GLOBAL()
  116. #else
  117. #define MARK_EXC_IP_SELECTIVE()
  118. #define MARK_EXC_IP_GLOBAL() { code_state->ip = ip; } /* stores ip pointing to last opcode */
  119. #endif
  120. #if MICROPY_OPT_COMPUTED_GOTO
  121. #include "py/vmentrytable.h"
  122. #define DISPATCH() do { \
  123. TRACE(ip); \
  124. MARK_EXC_IP_GLOBAL(); \
  125. goto *entry_table[*ip++]; \
  126. } while (0)
  127. #define DISPATCH_WITH_PEND_EXC_CHECK() goto pending_exception_check
  128. #define ENTRY(op) entry_##op
  129. #define ENTRY_DEFAULT entry_default
  130. #else
  131. #define DISPATCH() break
  132. #define DISPATCH_WITH_PEND_EXC_CHECK() goto pending_exception_check
  133. #define ENTRY(op) case op
  134. #define ENTRY_DEFAULT default
  135. #endif
  136. // nlr_raise needs to be implemented as a goto, so that the C compiler's flow analyser
  137. // sees that it's possible for us to jump from the dispatch loop to the exception
  138. // handler. Without this, the code may have a different stack layout in the dispatch
  139. // loop and the exception handler, leading to very obscure bugs.
  140. #define RAISE(o) do { nlr_pop(); nlr.ret_val = MP_OBJ_TO_PTR(o); goto exception_handler; } while (0)
  141. #if MICROPY_STACKLESS
  142. run_code_state: ;
  143. #endif
  144. // Pointers which are constant for particular invocation of mp_execute_bytecode()
  145. mp_obj_t * /*const*/ fastn;
  146. mp_exc_stack_t * /*const*/ exc_stack;
  147. {
  148. size_t n_state = mp_decode_uint_value(code_state->fun_bc->bytecode);
  149. fastn = &code_state->state[n_state - 1];
  150. exc_stack = (mp_exc_stack_t*)(code_state->state + n_state);
  151. }
  152. // variables that are visible to the exception handler (declared volatile)
  153. volatile bool currently_in_except_block = MP_TAGPTR_TAG0(code_state->exc_sp); // 0 or 1, to detect nested exceptions
  154. mp_exc_stack_t *volatile exc_sp = MP_TAGPTR_PTR(code_state->exc_sp); // stack grows up, exc_sp points to top of stack
  155. #if MICROPY_PY_THREAD_GIL && MICROPY_PY_THREAD_GIL_VM_DIVISOR
  156. // This needs to be volatile and outside the VM loop so it persists across handling
  157. // of any exceptions. Otherwise it's possible that the VM never gives up the GIL.
  158. volatile int gil_divisor = MICROPY_PY_THREAD_GIL_VM_DIVISOR;
  159. #endif
  160. // outer exception handling loop
  161. for (;;) {
  162. nlr_buf_t nlr;
  163. outer_dispatch_loop:
  164. if (nlr_push(&nlr) == 0) {
  165. // local variables that are not visible to the exception handler
  166. const byte *ip = code_state->ip;
  167. mp_obj_t *sp = code_state->sp;
  168. mp_obj_t obj_shared;
  169. MICROPY_VM_HOOK_INIT
  170. // If we have exception to inject, now that we finish setting up
  171. // execution context, raise it. This works as if RAISE_VARARGS
  172. // bytecode was executed.
  173. // Injecting exc into yield from generator is a special case,
  174. // handled by MP_BC_YIELD_FROM itself
  175. if (inject_exc != MP_OBJ_NULL && *ip != MP_BC_YIELD_FROM) {
  176. mp_obj_t exc = inject_exc;
  177. inject_exc = MP_OBJ_NULL;
  178. exc = mp_make_raise_obj(exc);
  179. RAISE(exc);
  180. }
  181. // loop to execute byte code
  182. for (;;) {
  183. dispatch_loop:
  184. #if MICROPY_OPT_COMPUTED_GOTO
  185. DISPATCH();
  186. #else
  187. TRACE(ip);
  188. MARK_EXC_IP_GLOBAL();
  189. switch (*ip++) {
  190. #endif
  191. ENTRY(MP_BC_LOAD_CONST_FALSE):
  192. PUSH(mp_const_false);
  193. DISPATCH();
  194. ENTRY(MP_BC_LOAD_CONST_NONE):
  195. PUSH(mp_const_none);
  196. DISPATCH();
  197. ENTRY(MP_BC_LOAD_CONST_TRUE):
  198. PUSH(mp_const_true);
  199. DISPATCH();
  200. ENTRY(MP_BC_LOAD_CONST_SMALL_INT): {
  201. mp_int_t num = 0;
  202. if ((ip[0] & 0x40) != 0) {
  203. // Number is negative
  204. num--;
  205. }
  206. do {
  207. num = (num << 7) | (*ip & 0x7f);
  208. } while ((*ip++ & 0x80) != 0);
  209. PUSH(MP_OBJ_NEW_SMALL_INT(num));
  210. DISPATCH();
  211. }
  212. ENTRY(MP_BC_LOAD_CONST_STRING): {
  213. DECODE_QSTR;
  214. PUSH(MP_OBJ_NEW_QSTR(qst));
  215. DISPATCH();
  216. }
  217. ENTRY(MP_BC_LOAD_CONST_OBJ): {
  218. DECODE_OBJ;
  219. PUSH(obj);
  220. DISPATCH();
  221. }
  222. ENTRY(MP_BC_LOAD_NULL):
  223. PUSH(MP_OBJ_NULL);
  224. DISPATCH();
  225. ENTRY(MP_BC_LOAD_FAST_N): {
  226. DECODE_UINT;
  227. obj_shared = fastn[-unum];
  228. load_check:
  229. if (obj_shared == MP_OBJ_NULL) {
  230. local_name_error: {
  231. MARK_EXC_IP_SELECTIVE();
  232. mp_obj_t obj = mp_obj_new_exception_msg(&mp_type_NameError, "local variable referenced before assignment");
  233. RAISE(obj);
  234. }
  235. }
  236. PUSH(obj_shared);
  237. DISPATCH();
  238. }
  239. ENTRY(MP_BC_LOAD_DEREF): {
  240. DECODE_UINT;
  241. obj_shared = mp_obj_cell_get(fastn[-unum]);
  242. goto load_check;
  243. }
  244. #if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
  245. ENTRY(MP_BC_LOAD_NAME): {
  246. MARK_EXC_IP_SELECTIVE();
  247. DECODE_QSTR;
  248. PUSH(mp_load_name(qst));
  249. DISPATCH();
  250. }
  251. #else
  252. ENTRY(MP_BC_LOAD_NAME): {
  253. MARK_EXC_IP_SELECTIVE();
  254. DECODE_QSTR;
  255. mp_obj_t key = MP_OBJ_NEW_QSTR(qst);
  256. mp_uint_t x = *ip;
  257. if (x < mp_locals_get()->map.alloc && mp_locals_get()->map.table[x].key == key) {
  258. PUSH(mp_locals_get()->map.table[x].value);
  259. } else {
  260. mp_map_elem_t *elem = mp_map_lookup(&mp_locals_get()->map, MP_OBJ_NEW_QSTR(qst), MP_MAP_LOOKUP);
  261. if (elem != NULL) {
  262. *(byte*)ip = (elem - &mp_locals_get()->map.table[0]) & 0xff;
  263. PUSH(elem->value);
  264. } else {
  265. PUSH(mp_load_name(MP_OBJ_QSTR_VALUE(key)));
  266. }
  267. }
  268. ip++;
  269. DISPATCH();
  270. }
  271. #endif
  272. #if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
  273. ENTRY(MP_BC_LOAD_GLOBAL): {
  274. MARK_EXC_IP_SELECTIVE();
  275. DECODE_QSTR;
  276. PUSH(mp_load_global(qst));
  277. DISPATCH();
  278. }
  279. #else
  280. ENTRY(MP_BC_LOAD_GLOBAL): {
  281. MARK_EXC_IP_SELECTIVE();
  282. DECODE_QSTR;
  283. mp_obj_t key = MP_OBJ_NEW_QSTR(qst);
  284. mp_uint_t x = *ip;
  285. if (x < mp_globals_get()->map.alloc && mp_globals_get()->map.table[x].key == key) {
  286. PUSH(mp_globals_get()->map.table[x].value);
  287. } else {
  288. mp_map_elem_t *elem = mp_map_lookup(&mp_globals_get()->map, MP_OBJ_NEW_QSTR(qst), MP_MAP_LOOKUP);
  289. if (elem != NULL) {
  290. *(byte*)ip = (elem - &mp_globals_get()->map.table[0]) & 0xff;
  291. PUSH(elem->value);
  292. } else {
  293. PUSH(mp_load_global(MP_OBJ_QSTR_VALUE(key)));
  294. }
  295. }
  296. ip++;
  297. DISPATCH();
  298. }
  299. #endif
  300. #if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
  301. ENTRY(MP_BC_LOAD_ATTR): {
  302. MARK_EXC_IP_SELECTIVE();
  303. DECODE_QSTR;
  304. SET_TOP(mp_load_attr(TOP(), qst));
  305. DISPATCH();
  306. }
  307. #else
  308. ENTRY(MP_BC_LOAD_ATTR): {
  309. MARK_EXC_IP_SELECTIVE();
  310. DECODE_QSTR;
  311. mp_obj_t top = TOP();
  312. if (mp_obj_get_type(top)->attr == mp_obj_instance_attr) {
  313. mp_obj_instance_t *self = MP_OBJ_TO_PTR(top);
  314. mp_uint_t x = *ip;
  315. mp_obj_t key = MP_OBJ_NEW_QSTR(qst);
  316. mp_map_elem_t *elem;
  317. if (x < self->members.alloc && self->members.table[x].key == key) {
  318. elem = &self->members.table[x];
  319. } else {
  320. elem = mp_map_lookup(&self->members, key, MP_MAP_LOOKUP);
  321. if (elem != NULL) {
  322. *(byte*)ip = elem - &self->members.table[0];
  323. } else {
  324. goto load_attr_cache_fail;
  325. }
  326. }
  327. SET_TOP(elem->value);
  328. ip++;
  329. DISPATCH();
  330. }
  331. load_attr_cache_fail:
  332. SET_TOP(mp_load_attr(top, qst));
  333. ip++;
  334. DISPATCH();
  335. }
  336. #endif
  337. ENTRY(MP_BC_LOAD_METHOD): {
  338. MARK_EXC_IP_SELECTIVE();
  339. DECODE_QSTR;
  340. mp_load_method(*sp, qst, sp);
  341. sp += 1;
  342. DISPATCH();
  343. }
  344. ENTRY(MP_BC_LOAD_SUPER_METHOD): {
  345. MARK_EXC_IP_SELECTIVE();
  346. DECODE_QSTR;
  347. sp -= 1;
  348. mp_load_super_method(qst, sp - 1);
  349. DISPATCH();
  350. }
  351. ENTRY(MP_BC_LOAD_BUILD_CLASS):
  352. MARK_EXC_IP_SELECTIVE();
  353. PUSH(mp_load_build_class());
  354. DISPATCH();
  355. ENTRY(MP_BC_LOAD_SUBSCR): {
  356. MARK_EXC_IP_SELECTIVE();
  357. mp_obj_t index = POP();
  358. SET_TOP(mp_obj_subscr(TOP(), index, MP_OBJ_SENTINEL));
  359. DISPATCH();
  360. }
  361. ENTRY(MP_BC_STORE_FAST_N): {
  362. DECODE_UINT;
  363. fastn[-unum] = POP();
  364. DISPATCH();
  365. }
  366. ENTRY(MP_BC_STORE_DEREF): {
  367. DECODE_UINT;
  368. mp_obj_cell_set(fastn[-unum], POP());
  369. DISPATCH();
  370. }
  371. ENTRY(MP_BC_STORE_NAME): {
  372. MARK_EXC_IP_SELECTIVE();
  373. DECODE_QSTR;
  374. mp_store_name(qst, POP());
  375. DISPATCH();
  376. }
  377. ENTRY(MP_BC_STORE_GLOBAL): {
  378. MARK_EXC_IP_SELECTIVE();
  379. DECODE_QSTR;
  380. mp_store_global(qst, POP());
  381. DISPATCH();
  382. }
  383. #if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
  384. ENTRY(MP_BC_STORE_ATTR): {
  385. MARK_EXC_IP_SELECTIVE();
  386. DECODE_QSTR;
  387. mp_store_attr(sp[0], qst, sp[-1]);
  388. sp -= 2;
  389. DISPATCH();
  390. }
  391. #else
  392. // This caching code works with MICROPY_PY_BUILTINS_PROPERTY and/or
  393. // MICROPY_PY_DESCRIPTORS enabled because if the attr exists in
  394. // self->members then it can't be a property or have descriptors. A
  395. // consequence of this is that we can't use MP_MAP_LOOKUP_ADD_IF_NOT_FOUND
  396. // in the fast-path below, because that store could override a property.
  397. ENTRY(MP_BC_STORE_ATTR): {
  398. MARK_EXC_IP_SELECTIVE();
  399. DECODE_QSTR;
  400. mp_obj_t top = TOP();
  401. if (mp_obj_get_type(top)->attr == mp_obj_instance_attr && sp[-1] != MP_OBJ_NULL) {
  402. mp_obj_instance_t *self = MP_OBJ_TO_PTR(top);
  403. mp_uint_t x = *ip;
  404. mp_obj_t key = MP_OBJ_NEW_QSTR(qst);
  405. mp_map_elem_t *elem;
  406. if (x < self->members.alloc && self->members.table[x].key == key) {
  407. elem = &self->members.table[x];
  408. } else {
  409. elem = mp_map_lookup(&self->members, key, MP_MAP_LOOKUP);
  410. if (elem != NULL) {
  411. *(byte*)ip = elem - &self->members.table[0];
  412. } else {
  413. goto store_attr_cache_fail;
  414. }
  415. }
  416. elem->value = sp[-1];
  417. sp -= 2;
  418. ip++;
  419. DISPATCH();
  420. }
  421. store_attr_cache_fail:
  422. mp_store_attr(sp[0], qst, sp[-1]);
  423. sp -= 2;
  424. ip++;
  425. DISPATCH();
  426. }
  427. #endif
  428. ENTRY(MP_BC_STORE_SUBSCR):
  429. MARK_EXC_IP_SELECTIVE();
  430. mp_obj_subscr(sp[-1], sp[0], sp[-2]);
  431. sp -= 3;
  432. DISPATCH();
  433. ENTRY(MP_BC_DELETE_FAST): {
  434. MARK_EXC_IP_SELECTIVE();
  435. DECODE_UINT;
  436. if (fastn[-unum] == MP_OBJ_NULL) {
  437. goto local_name_error;
  438. }
  439. fastn[-unum] = MP_OBJ_NULL;
  440. DISPATCH();
  441. }
  442. ENTRY(MP_BC_DELETE_DEREF): {
  443. MARK_EXC_IP_SELECTIVE();
  444. DECODE_UINT;
  445. if (mp_obj_cell_get(fastn[-unum]) == MP_OBJ_NULL) {
  446. goto local_name_error;
  447. }
  448. mp_obj_cell_set(fastn[-unum], MP_OBJ_NULL);
  449. DISPATCH();
  450. }
  451. ENTRY(MP_BC_DELETE_NAME): {
  452. MARK_EXC_IP_SELECTIVE();
  453. DECODE_QSTR;
  454. mp_delete_name(qst);
  455. DISPATCH();
  456. }
  457. ENTRY(MP_BC_DELETE_GLOBAL): {
  458. MARK_EXC_IP_SELECTIVE();
  459. DECODE_QSTR;
  460. mp_delete_global(qst);
  461. DISPATCH();
  462. }
  463. ENTRY(MP_BC_DUP_TOP): {
  464. mp_obj_t top = TOP();
  465. PUSH(top);
  466. DISPATCH();
  467. }
  468. ENTRY(MP_BC_DUP_TOP_TWO):
  469. sp += 2;
  470. sp[0] = sp[-2];
  471. sp[-1] = sp[-3];
  472. DISPATCH();
  473. ENTRY(MP_BC_POP_TOP):
  474. sp -= 1;
  475. DISPATCH();
  476. ENTRY(MP_BC_ROT_TWO): {
  477. mp_obj_t top = sp[0];
  478. sp[0] = sp[-1];
  479. sp[-1] = top;
  480. DISPATCH();
  481. }
  482. ENTRY(MP_BC_ROT_THREE): {
  483. mp_obj_t top = sp[0];
  484. sp[0] = sp[-1];
  485. sp[-1] = sp[-2];
  486. sp[-2] = top;
  487. DISPATCH();
  488. }
  489. ENTRY(MP_BC_JUMP): {
  490. DECODE_SLABEL;
  491. ip += slab;
  492. DISPATCH_WITH_PEND_EXC_CHECK();
  493. }
  494. ENTRY(MP_BC_POP_JUMP_IF_TRUE): {
  495. DECODE_SLABEL;
  496. if (mp_obj_is_true(POP())) {
  497. ip += slab;
  498. }
  499. DISPATCH_WITH_PEND_EXC_CHECK();
  500. }
  501. ENTRY(MP_BC_POP_JUMP_IF_FALSE): {
  502. DECODE_SLABEL;
  503. if (!mp_obj_is_true(POP())) {
  504. ip += slab;
  505. }
  506. DISPATCH_WITH_PEND_EXC_CHECK();
  507. }
  508. ENTRY(MP_BC_JUMP_IF_TRUE_OR_POP): {
  509. DECODE_SLABEL;
  510. if (mp_obj_is_true(TOP())) {
  511. ip += slab;
  512. } else {
  513. sp--;
  514. }
  515. DISPATCH_WITH_PEND_EXC_CHECK();
  516. }
  517. ENTRY(MP_BC_JUMP_IF_FALSE_OR_POP): {
  518. DECODE_SLABEL;
  519. if (mp_obj_is_true(TOP())) {
  520. sp--;
  521. } else {
  522. ip += slab;
  523. }
  524. DISPATCH_WITH_PEND_EXC_CHECK();
  525. }
  526. ENTRY(MP_BC_SETUP_WITH): {
  527. MARK_EXC_IP_SELECTIVE();
  528. // stack: (..., ctx_mgr)
  529. mp_obj_t obj = TOP();
  530. mp_load_method(obj, MP_QSTR___exit__, sp);
  531. mp_load_method(obj, MP_QSTR___enter__, sp + 2);
  532. mp_obj_t ret = mp_call_method_n_kw(0, 0, sp + 2);
  533. sp += 1;
  534. PUSH_EXC_BLOCK(1);
  535. PUSH(ret);
  536. // stack: (..., __exit__, ctx_mgr, as_value)
  537. DISPATCH();
  538. }
  539. ENTRY(MP_BC_WITH_CLEANUP): {
  540. MARK_EXC_IP_SELECTIVE();
  541. // Arriving here, there's "exception control block" on top of stack,
  542. // and __exit__ method (with self) underneath it. Bytecode calls __exit__,
  543. // and "deletes" it off stack, shifting "exception control block"
  544. // to its place.
  545. // The bytecode emitter ensures that there is enough space on the Python
  546. // value stack to hold the __exit__ method plus an additional 4 entries.
  547. if (TOP() == mp_const_none) {
  548. // stack: (..., __exit__, ctx_mgr, None)
  549. sp[1] = mp_const_none;
  550. sp[2] = mp_const_none;
  551. sp -= 2;
  552. mp_call_method_n_kw(3, 0, sp);
  553. SET_TOP(mp_const_none);
  554. } else if (MP_OBJ_IS_SMALL_INT(TOP())) {
  555. mp_int_t cause_val = MP_OBJ_SMALL_INT_VALUE(TOP());
  556. if (cause_val == UNWIND_RETURN) {
  557. // stack: (..., __exit__, ctx_mgr, ret_val, UNWIND_RETURN)
  558. mp_obj_t ret_val = sp[-1];
  559. sp[-1] = mp_const_none;
  560. sp[0] = mp_const_none;
  561. sp[1] = mp_const_none;
  562. mp_call_method_n_kw(3, 0, sp - 3);
  563. sp[-3] = ret_val;
  564. sp[-2] = MP_OBJ_NEW_SMALL_INT(UNWIND_RETURN);
  565. } else {
  566. assert(cause_val == UNWIND_JUMP);
  567. // stack: (..., __exit__, ctx_mgr, dest_ip, num_exc, UNWIND_JUMP)
  568. mp_obj_t dest_ip = sp[-2];
  569. mp_obj_t num_exc = sp[-1];
  570. sp[-2] = mp_const_none;
  571. sp[-1] = mp_const_none;
  572. sp[0] = mp_const_none;
  573. mp_call_method_n_kw(3, 0, sp - 4);
  574. sp[-4] = dest_ip;
  575. sp[-3] = num_exc;
  576. sp[-2] = MP_OBJ_NEW_SMALL_INT(UNWIND_JUMP);
  577. }
  578. sp -= 2; // we removed (__exit__, ctx_mgr)
  579. } else {
  580. assert(mp_obj_is_exception_instance(TOP()));
  581. // stack: (..., __exit__, ctx_mgr, exc_instance)
  582. // Need to pass (exc_type, exc_instance, None) as arguments to __exit__.
  583. sp[1] = sp[0];
  584. sp[0] = MP_OBJ_FROM_PTR(mp_obj_get_type(sp[0]));
  585. sp[2] = mp_const_none;
  586. sp -= 2;
  587. mp_obj_t ret_value = mp_call_method_n_kw(3, 0, sp);
  588. if (mp_obj_is_true(ret_value)) {
  589. // We need to silence/swallow the exception. This is done
  590. // by popping the exception and the __exit__ handler and
  591. // replacing it with None, which signals END_FINALLY to just
  592. // execute the finally handler normally.
  593. SET_TOP(mp_const_none);
  594. assert(exc_sp >= exc_stack);
  595. POP_EXC_BLOCK();
  596. } else {
  597. // We need to re-raise the exception. We pop __exit__ handler
  598. // by copying the exception instance down to the new top-of-stack.
  599. sp[0] = sp[3];
  600. }
  601. }
  602. DISPATCH();
  603. }
  604. ENTRY(MP_BC_UNWIND_JUMP): {
  605. MARK_EXC_IP_SELECTIVE();
  606. DECODE_SLABEL;
  607. PUSH((mp_obj_t)(mp_uint_t)(uintptr_t)(ip + slab)); // push destination ip for jump
  608. PUSH((mp_obj_t)(mp_uint_t)(*ip)); // push number of exception handlers to unwind (0x80 bit set if we also need to pop stack)
  609. unwind_jump:;
  610. mp_uint_t unum = (mp_uint_t)POP(); // get number of exception handlers to unwind
  611. while ((unum & 0x7f) > 0) {
  612. unum -= 1;
  613. assert(exc_sp >= exc_stack);
  614. if (MP_TAGPTR_TAG1(exc_sp->val_sp)) {
  615. // Getting here the stack looks like:
  616. // (..., X, dest_ip)
  617. // where X is pointed to by exc_sp->val_sp and in the case
  618. // of a "with" block contains the context manager info.
  619. // We're going to run "finally" code as a coroutine
  620. // (not calling it recursively). Set up a sentinel
  621. // on a stack so it can return back to us when it is
  622. // done (when WITH_CLEANUP or END_FINALLY reached).
  623. PUSH((mp_obj_t)unum); // push number of exception handlers left to unwind
  624. PUSH(MP_OBJ_NEW_SMALL_INT(UNWIND_JUMP)); // push sentinel
  625. ip = exc_sp->handler; // get exception handler byte code address
  626. exc_sp--; // pop exception handler
  627. goto dispatch_loop; // run the exception handler
  628. }
  629. POP_EXC_BLOCK();
  630. }
  631. ip = (const byte*)MP_OBJ_TO_PTR(POP()); // pop destination ip for jump
  632. if (unum != 0) {
  633. // pop the exhausted iterator
  634. sp -= MP_OBJ_ITER_BUF_NSLOTS;
  635. }
  636. DISPATCH_WITH_PEND_EXC_CHECK();
  637. }
  638. // matched against: POP_BLOCK or POP_EXCEPT (anything else?)
  639. ENTRY(MP_BC_SETUP_EXCEPT):
  640. ENTRY(MP_BC_SETUP_FINALLY): {
  641. MARK_EXC_IP_SELECTIVE();
  642. #if SELECTIVE_EXC_IP
  643. PUSH_EXC_BLOCK((code_state->ip[-1] == MP_BC_SETUP_FINALLY) ? 1 : 0);
  644. #else
  645. PUSH_EXC_BLOCK((code_state->ip[0] == MP_BC_SETUP_FINALLY) ? 1 : 0);
  646. #endif
  647. DISPATCH();
  648. }
  649. ENTRY(MP_BC_END_FINALLY):
  650. MARK_EXC_IP_SELECTIVE();
  651. // if TOS is None, just pops it and continues
  652. // if TOS is an integer, finishes coroutine and returns control to caller
  653. // if TOS is an exception, reraises the exception
  654. if (TOP() == mp_const_none) {
  655. sp--;
  656. } else if (MP_OBJ_IS_SMALL_INT(TOP())) {
  657. // We finished "finally" coroutine and now dispatch back
  658. // to our caller, based on TOS value
  659. mp_unwind_reason_t reason = MP_OBJ_SMALL_INT_VALUE(POP());
  660. if (reason == UNWIND_RETURN) {
  661. goto unwind_return;
  662. } else {
  663. assert(reason == UNWIND_JUMP);
  664. goto unwind_jump;
  665. }
  666. } else {
  667. assert(mp_obj_is_exception_instance(TOP()));
  668. RAISE(TOP());
  669. }
  670. DISPATCH();
  671. ENTRY(MP_BC_GET_ITER):
  672. MARK_EXC_IP_SELECTIVE();
  673. SET_TOP(mp_getiter(TOP(), NULL));
  674. DISPATCH();
  675. // An iterator for a for-loop takes MP_OBJ_ITER_BUF_NSLOTS slots on
  676. // the Python value stack. These slots are either used to store the
  677. // iterator object itself, or the first slot is MP_OBJ_NULL and
  678. // the second slot holds a reference to the iterator object.
  679. ENTRY(MP_BC_GET_ITER_STACK): {
  680. MARK_EXC_IP_SELECTIVE();
  681. mp_obj_t obj = TOP();
  682. mp_obj_iter_buf_t *iter_buf = (mp_obj_iter_buf_t*)sp;
  683. sp += MP_OBJ_ITER_BUF_NSLOTS - 1;
  684. obj = mp_getiter(obj, iter_buf);
  685. if (obj != MP_OBJ_FROM_PTR(iter_buf)) {
  686. // Iterator didn't use the stack so indicate that with MP_OBJ_NULL.
  687. sp[-MP_OBJ_ITER_BUF_NSLOTS + 1] = MP_OBJ_NULL;
  688. sp[-MP_OBJ_ITER_BUF_NSLOTS + 2] = obj;
  689. }
  690. DISPATCH();
  691. }
  692. ENTRY(MP_BC_FOR_ITER): {
  693. MARK_EXC_IP_SELECTIVE();
  694. DECODE_ULABEL; // the jump offset if iteration finishes; for labels are always forward
  695. code_state->sp = sp;
  696. mp_obj_t obj;
  697. if (sp[-MP_OBJ_ITER_BUF_NSLOTS + 1] == MP_OBJ_NULL) {
  698. obj = sp[-MP_OBJ_ITER_BUF_NSLOTS + 2];
  699. } else {
  700. obj = MP_OBJ_FROM_PTR(&sp[-MP_OBJ_ITER_BUF_NSLOTS + 1]);
  701. }
  702. mp_obj_t value = mp_iternext_allow_raise(obj);
  703. if (value == MP_OBJ_STOP_ITERATION) {
  704. sp -= MP_OBJ_ITER_BUF_NSLOTS; // pop the exhausted iterator
  705. ip += ulab; // jump to after for-block
  706. } else {
  707. PUSH(value); // push the next iteration value
  708. }
  709. DISPATCH();
  710. }
  711. // matched against: SETUP_EXCEPT, SETUP_FINALLY, SETUP_WITH
  712. ENTRY(MP_BC_POP_BLOCK):
  713. // we are exiting an exception handler, so pop the last one of the exception-stack
  714. assert(exc_sp >= exc_stack);
  715. POP_EXC_BLOCK();
  716. DISPATCH();
  717. // matched against: SETUP_EXCEPT
  718. ENTRY(MP_BC_POP_EXCEPT):
  719. assert(exc_sp >= exc_stack);
  720. assert(currently_in_except_block);
  721. POP_EXC_BLOCK();
  722. DISPATCH();
  723. ENTRY(MP_BC_BUILD_TUPLE): {
  724. MARK_EXC_IP_SELECTIVE();
  725. DECODE_UINT;
  726. sp -= unum - 1;
  727. SET_TOP(mp_obj_new_tuple(unum, sp));
  728. DISPATCH();
  729. }
  730. ENTRY(MP_BC_BUILD_LIST): {
  731. MARK_EXC_IP_SELECTIVE();
  732. DECODE_UINT;
  733. sp -= unum - 1;
  734. SET_TOP(mp_obj_new_list(unum, sp));
  735. DISPATCH();
  736. }
  737. ENTRY(MP_BC_BUILD_MAP): {
  738. MARK_EXC_IP_SELECTIVE();
  739. DECODE_UINT;
  740. PUSH(mp_obj_new_dict(unum));
  741. DISPATCH();
  742. }
  743. ENTRY(MP_BC_STORE_MAP):
  744. MARK_EXC_IP_SELECTIVE();
  745. sp -= 2;
  746. mp_obj_dict_store(sp[0], sp[2], sp[1]);
  747. DISPATCH();
  748. #if MICROPY_PY_BUILTINS_SET
  749. ENTRY(MP_BC_BUILD_SET): {
  750. MARK_EXC_IP_SELECTIVE();
  751. DECODE_UINT;
  752. sp -= unum - 1;
  753. SET_TOP(mp_obj_new_set(unum, sp));
  754. DISPATCH();
  755. }
  756. #endif
  757. #if MICROPY_PY_BUILTINS_SLICE
  758. ENTRY(MP_BC_BUILD_SLICE): {
  759. MARK_EXC_IP_SELECTIVE();
  760. DECODE_UINT;
  761. if (unum == 2) {
  762. mp_obj_t stop = POP();
  763. mp_obj_t start = TOP();
  764. SET_TOP(mp_obj_new_slice(start, stop, mp_const_none));
  765. } else {
  766. mp_obj_t step = POP();
  767. mp_obj_t stop = POP();
  768. mp_obj_t start = TOP();
  769. SET_TOP(mp_obj_new_slice(start, stop, step));
  770. }
  771. DISPATCH();
  772. }
  773. #endif
  774. ENTRY(MP_BC_STORE_COMP): {
  775. MARK_EXC_IP_SELECTIVE();
  776. DECODE_UINT;
  777. mp_obj_t obj = sp[-(unum >> 2)];
  778. if ((unum & 3) == 0) {
  779. mp_obj_list_append(obj, sp[0]);
  780. sp--;
  781. } else if (!MICROPY_PY_BUILTINS_SET || (unum & 3) == 1) {
  782. mp_obj_dict_store(obj, sp[0], sp[-1]);
  783. sp -= 2;
  784. #if MICROPY_PY_BUILTINS_SET
  785. } else {
  786. mp_obj_set_store(obj, sp[0]);
  787. sp--;
  788. #endif
  789. }
  790. DISPATCH();
  791. }
  792. ENTRY(MP_BC_UNPACK_SEQUENCE): {
  793. MARK_EXC_IP_SELECTIVE();
  794. DECODE_UINT;
  795. mp_unpack_sequence(sp[0], unum, sp);
  796. sp += unum - 1;
  797. DISPATCH();
  798. }
  799. ENTRY(MP_BC_UNPACK_EX): {
  800. MARK_EXC_IP_SELECTIVE();
  801. DECODE_UINT;
  802. mp_unpack_ex(sp[0], unum, sp);
  803. sp += (unum & 0xff) + ((unum >> 8) & 0xff);
  804. DISPATCH();
  805. }
  806. ENTRY(MP_BC_MAKE_FUNCTION): {
  807. DECODE_PTR;
  808. PUSH(mp_make_function_from_raw_code(ptr, MP_OBJ_NULL, MP_OBJ_NULL));
  809. DISPATCH();
  810. }
  811. ENTRY(MP_BC_MAKE_FUNCTION_DEFARGS): {
  812. DECODE_PTR;
  813. // Stack layout: def_tuple def_dict <- TOS
  814. mp_obj_t def_dict = POP();
  815. SET_TOP(mp_make_function_from_raw_code(ptr, TOP(), def_dict));
  816. DISPATCH();
  817. }
  818. ENTRY(MP_BC_MAKE_CLOSURE): {
  819. DECODE_PTR;
  820. size_t n_closed_over = *ip++;
  821. // Stack layout: closed_overs <- TOS
  822. sp -= n_closed_over - 1;
  823. SET_TOP(mp_make_closure_from_raw_code(ptr, n_closed_over, sp));
  824. DISPATCH();
  825. }
  826. ENTRY(MP_BC_MAKE_CLOSURE_DEFARGS): {
  827. DECODE_PTR;
  828. size_t n_closed_over = *ip++;
  829. // Stack layout: def_tuple def_dict closed_overs <- TOS
  830. sp -= 2 + n_closed_over - 1;
  831. SET_TOP(mp_make_closure_from_raw_code(ptr, 0x100 | n_closed_over, sp));
  832. DISPATCH();
  833. }
  834. ENTRY(MP_BC_CALL_FUNCTION): {
  835. MARK_EXC_IP_SELECTIVE();
  836. DECODE_UINT;
  837. // unum & 0xff == n_positional
  838. // (unum >> 8) & 0xff == n_keyword
  839. sp -= (unum & 0xff) + ((unum >> 7) & 0x1fe);
  840. #if MICROPY_STACKLESS
  841. if (mp_obj_get_type(*sp) == &mp_type_fun_bc) {
  842. code_state->ip = ip;
  843. code_state->sp = sp;
  844. code_state->exc_sp = MP_TAGPTR_MAKE(exc_sp, currently_in_except_block);
  845. mp_code_state_t *new_state = mp_obj_fun_bc_prepare_codestate(*sp, unum & 0xff, (unum >> 8) & 0xff, sp + 1);
  846. if (new_state) {
  847. new_state->prev = code_state;
  848. code_state = new_state;
  849. nlr_pop();
  850. goto run_code_state;
  851. }
  852. #if MICROPY_STACKLESS_STRICT
  853. else {
  854. deep_recursion_error:
  855. mp_exc_recursion_depth();
  856. }
  857. #endif
  858. }
  859. #endif
  860. SET_TOP(mp_call_function_n_kw(*sp, unum & 0xff, (unum >> 8) & 0xff, sp + 1));
  861. DISPATCH();
  862. }
  863. ENTRY(MP_BC_CALL_FUNCTION_VAR_KW): {
  864. MARK_EXC_IP_SELECTIVE();
  865. DECODE_UINT;
  866. // unum & 0xff == n_positional
  867. // (unum >> 8) & 0xff == n_keyword
  868. // We have following stack layout here:
  869. // fun arg0 arg1 ... kw0 val0 kw1 val1 ... seq dict <- TOS
  870. sp -= (unum & 0xff) + ((unum >> 7) & 0x1fe) + 2;
  871. #if MICROPY_STACKLESS
  872. if (mp_obj_get_type(*sp) == &mp_type_fun_bc) {
  873. code_state->ip = ip;
  874. code_state->sp = sp;
  875. code_state->exc_sp = MP_TAGPTR_MAKE(exc_sp, currently_in_except_block);
  876. mp_call_args_t out_args;
  877. mp_call_prepare_args_n_kw_var(false, unum, sp, &out_args);
  878. mp_code_state_t *new_state = mp_obj_fun_bc_prepare_codestate(out_args.fun,
  879. out_args.n_args, out_args.n_kw, out_args.args);
  880. m_del(mp_obj_t, out_args.args, out_args.n_alloc);
  881. if (new_state) {
  882. new_state->prev = code_state;
  883. code_state = new_state;
  884. nlr_pop();
  885. goto run_code_state;
  886. }
  887. #if MICROPY_STACKLESS_STRICT
  888. else {
  889. goto deep_recursion_error;
  890. }
  891. #endif
  892. }
  893. #endif
  894. SET_TOP(mp_call_method_n_kw_var(false, unum, sp));
  895. DISPATCH();
  896. }
  897. ENTRY(MP_BC_CALL_METHOD): {
  898. MARK_EXC_IP_SELECTIVE();
  899. DECODE_UINT;
  900. // unum & 0xff == n_positional
  901. // (unum >> 8) & 0xff == n_keyword
  902. sp -= (unum & 0xff) + ((unum >> 7) & 0x1fe) + 1;
  903. #if MICROPY_STACKLESS
  904. if (mp_obj_get_type(*sp) == &mp_type_fun_bc) {
  905. code_state->ip = ip;
  906. code_state->sp = sp;
  907. code_state->exc_sp = MP_TAGPTR_MAKE(exc_sp, currently_in_except_block);
  908. size_t n_args = unum & 0xff;
  909. size_t n_kw = (unum >> 8) & 0xff;
  910. int adjust = (sp[1] == MP_OBJ_NULL) ? 0 : 1;
  911. mp_code_state_t *new_state = mp_obj_fun_bc_prepare_codestate(*sp, n_args + adjust, n_kw, sp + 2 - adjust);
  912. if (new_state) {
  913. new_state->prev = code_state;
  914. code_state = new_state;
  915. nlr_pop();
  916. goto run_code_state;
  917. }
  918. #if MICROPY_STACKLESS_STRICT
  919. else {
  920. goto deep_recursion_error;
  921. }
  922. #endif
  923. }
  924. #endif
  925. SET_TOP(mp_call_method_n_kw(unum & 0xff, (unum >> 8) & 0xff, sp));
  926. DISPATCH();
  927. }
  928. ENTRY(MP_BC_CALL_METHOD_VAR_KW): {
  929. MARK_EXC_IP_SELECTIVE();
  930. DECODE_UINT;
  931. // unum & 0xff == n_positional
  932. // (unum >> 8) & 0xff == n_keyword
  933. // We have following stack layout here:
  934. // fun self arg0 arg1 ... kw0 val0 kw1 val1 ... seq dict <- TOS
  935. sp -= (unum & 0xff) + ((unum >> 7) & 0x1fe) + 3;
  936. #if MICROPY_STACKLESS
  937. if (mp_obj_get_type(*sp) == &mp_type_fun_bc) {
  938. code_state->ip = ip;
  939. code_state->sp = sp;
  940. code_state->exc_sp = MP_TAGPTR_MAKE(exc_sp, currently_in_except_block);
  941. mp_call_args_t out_args;
  942. mp_call_prepare_args_n_kw_var(true, unum, sp, &out_args);
  943. mp_code_state_t *new_state = mp_obj_fun_bc_prepare_codestate(out_args.fun,
  944. out_args.n_args, out_args.n_kw, out_args.args);
  945. m_del(mp_obj_t, out_args.args, out_args.n_alloc);
  946. if (new_state) {
  947. new_state->prev = code_state;
  948. code_state = new_state;
  949. nlr_pop();
  950. goto run_code_state;
  951. }
  952. #if MICROPY_STACKLESS_STRICT
  953. else {
  954. goto deep_recursion_error;
  955. }
  956. #endif
  957. }
  958. #endif
  959. SET_TOP(mp_call_method_n_kw_var(true, unum, sp));
  960. DISPATCH();
  961. }
  962. ENTRY(MP_BC_RETURN_VALUE):
  963. MARK_EXC_IP_SELECTIVE();
  964. // These next 3 lines pop a try-finally exception handler, if one
  965. // is there on the exception stack. Without this the finally block
  966. // is executed a second time when the return is executed, because
  967. // the try-finally exception handler is still on the stack.
  968. // TODO Possibly find a better way to handle this case.
  969. if (currently_in_except_block) {
  970. POP_EXC_BLOCK();
  971. }
  972. unwind_return:
  973. while (exc_sp >= exc_stack) {
  974. if (MP_TAGPTR_TAG1(exc_sp->val_sp)) {
  975. // Getting here the stack looks like:
  976. // (..., X, [iter0, iter1, ...,] ret_val)
  977. // where X is pointed to by exc_sp->val_sp and in the case
  978. // of a "with" block contains the context manager info.
  979. // There may be 0 or more for-iterators between X and the
  980. // return value, and these must be removed before control can
  981. // pass to the finally code. We simply copy the ret_value down
  982. // over these iterators, if they exist. If they don't then the
  983. // following is a null operation.
  984. mp_obj_t *finally_sp = MP_TAGPTR_PTR(exc_sp->val_sp);
  985. finally_sp[1] = sp[0];
  986. sp = &finally_sp[1];
  987. // We're going to run "finally" code as a coroutine
  988. // (not calling it recursively). Set up a sentinel
  989. // on a stack so it can return back to us when it is
  990. // done (when WITH_CLEANUP or END_FINALLY reached).
  991. PUSH(MP_OBJ_NEW_SMALL_INT(UNWIND_RETURN));
  992. ip = exc_sp->handler;
  993. exc_sp--;
  994. goto dispatch_loop;
  995. }
  996. exc_sp--;
  997. }
  998. nlr_pop();
  999. code_state->sp = sp;
  1000. assert(exc_sp == exc_stack - 1);
  1001. MICROPY_VM_HOOK_RETURN
  1002. #if MICROPY_STACKLESS
  1003. if (code_state->prev != NULL) {
  1004. mp_obj_t res = *sp;
  1005. mp_globals_set(code_state->old_globals);
  1006. code_state = code_state->prev;
  1007. *code_state->sp = res;
  1008. goto run_code_state;
  1009. }
  1010. #endif
  1011. return MP_VM_RETURN_NORMAL;
  1012. ENTRY(MP_BC_RAISE_VARARGS): {
  1013. MARK_EXC_IP_SELECTIVE();
  1014. mp_uint_t unum = *ip++;
  1015. mp_obj_t obj;
  1016. if (unum == 2) {
  1017. mp_warning("exception chaining not supported");
  1018. // ignore (pop) "from" argument
  1019. sp--;
  1020. }
  1021. if (unum == 0) {
  1022. // search for the inner-most previous exception, to reraise it
  1023. obj = MP_OBJ_NULL;
  1024. for (mp_exc_stack_t *e = exc_sp; e >= exc_stack; e--) {
  1025. if (e->prev_exc != NULL) {
  1026. obj = MP_OBJ_FROM_PTR(e->prev_exc);
  1027. break;
  1028. }
  1029. }
  1030. if (obj == MP_OBJ_NULL) {
  1031. obj = mp_obj_new_exception_msg(&mp_type_RuntimeError, "no active exception to reraise");
  1032. RAISE(obj);
  1033. }
  1034. } else {
  1035. obj = POP();
  1036. }
  1037. obj = mp_make_raise_obj(obj);
  1038. RAISE(obj);
  1039. }
  1040. ENTRY(MP_BC_YIELD_VALUE):
  1041. yield:
  1042. nlr_pop();
  1043. code_state->ip = ip;
  1044. code_state->sp = sp;
  1045. code_state->exc_sp = MP_TAGPTR_MAKE(exc_sp, currently_in_except_block);
  1046. return MP_VM_RETURN_YIELD;
  1047. ENTRY(MP_BC_YIELD_FROM): {
  1048. MARK_EXC_IP_SELECTIVE();
  1049. //#define EXC_MATCH(exc, type) MP_OBJ_IS_TYPE(exc, type)
  1050. #define EXC_MATCH(exc, type) mp_obj_exception_match(exc, type)
  1051. #define GENERATOR_EXIT_IF_NEEDED(t) if (t != MP_OBJ_NULL && EXC_MATCH(t, MP_OBJ_FROM_PTR(&mp_type_GeneratorExit))) { RAISE(t); }
  1052. mp_vm_return_kind_t ret_kind;
  1053. mp_obj_t send_value = POP();
  1054. mp_obj_t t_exc = MP_OBJ_NULL;
  1055. mp_obj_t ret_value;
  1056. if (inject_exc != MP_OBJ_NULL) {
  1057. t_exc = inject_exc;
  1058. inject_exc = MP_OBJ_NULL;
  1059. ret_kind = mp_resume(TOP(), MP_OBJ_NULL, t_exc, &ret_value);
  1060. } else {
  1061. ret_kind = mp_resume(TOP(), send_value, MP_OBJ_NULL, &ret_value);
  1062. }
  1063. if (ret_kind == MP_VM_RETURN_YIELD) {
  1064. ip--;
  1065. PUSH(ret_value);
  1066. goto yield;
  1067. } else if (ret_kind == MP_VM_RETURN_NORMAL) {
  1068. // Pop exhausted gen
  1069. sp--;
  1070. // TODO: When ret_value can be MP_OBJ_NULL here??
  1071. if (ret_value == MP_OBJ_NULL || ret_value == MP_OBJ_STOP_ITERATION) {
  1072. // Optimize StopIteration
  1073. // TODO: get StopIteration's value
  1074. PUSH(mp_const_none);
  1075. } else {
  1076. PUSH(ret_value);
  1077. }
  1078. // If we injected GeneratorExit downstream, then even
  1079. // if it was swallowed, we re-raise GeneratorExit
  1080. GENERATOR_EXIT_IF_NEEDED(t_exc);
  1081. DISPATCH();
  1082. } else {
  1083. assert(ret_kind == MP_VM_RETURN_EXCEPTION);
  1084. // Pop exhausted gen
  1085. sp--;
  1086. if (EXC_MATCH(ret_value, MP_OBJ_FROM_PTR(&mp_type_StopIteration))) {
  1087. PUSH(mp_obj_exception_get_value(ret_value));
  1088. // If we injected GeneratorExit downstream, then even
  1089. // if it was swallowed, we re-raise GeneratorExit
  1090. GENERATOR_EXIT_IF_NEEDED(t_exc);
  1091. DISPATCH();
  1092. } else {
  1093. RAISE(ret_value);
  1094. }
  1095. }
  1096. }
  1097. ENTRY(MP_BC_IMPORT_NAME): {
  1098. MARK_EXC_IP_SELECTIVE();
  1099. DECODE_QSTR;
  1100. mp_obj_t obj = POP();
  1101. SET_TOP(mp_import_name(qst, obj, TOP()));
  1102. DISPATCH();
  1103. }
  1104. ENTRY(MP_BC_IMPORT_FROM): {
  1105. MARK_EXC_IP_SELECTIVE();
  1106. DECODE_QSTR;
  1107. mp_obj_t obj = mp_import_from(TOP(), qst);
  1108. PUSH(obj);
  1109. DISPATCH();
  1110. }
  1111. ENTRY(MP_BC_IMPORT_STAR):
  1112. MARK_EXC_IP_SELECTIVE();
  1113. mp_import_all(POP());
  1114. DISPATCH();
  1115. #if MICROPY_OPT_COMPUTED_GOTO
  1116. ENTRY(MP_BC_LOAD_CONST_SMALL_INT_MULTI):
  1117. PUSH(MP_OBJ_NEW_SMALL_INT((mp_int_t)ip[-1] - MP_BC_LOAD_CONST_SMALL_INT_MULTI - 16));
  1118. DISPATCH();
  1119. ENTRY(MP_BC_LOAD_FAST_MULTI):
  1120. obj_shared = fastn[MP_BC_LOAD_FAST_MULTI - (mp_int_t)ip[-1]];
  1121. goto load_check;
  1122. ENTRY(MP_BC_STORE_FAST_MULTI):
  1123. fastn[MP_BC_STORE_FAST_MULTI - (mp_int_t)ip[-1]] = POP();
  1124. DISPATCH();
  1125. ENTRY(MP_BC_UNARY_OP_MULTI):
  1126. MARK_EXC_IP_SELECTIVE();
  1127. SET_TOP(mp_unary_op(ip[-1] - MP_BC_UNARY_OP_MULTI, TOP()));
  1128. DISPATCH();
  1129. ENTRY(MP_BC_BINARY_OP_MULTI): {
  1130. MARK_EXC_IP_SELECTIVE();
  1131. mp_obj_t rhs = POP();
  1132. mp_obj_t lhs = TOP();
  1133. SET_TOP(mp_binary_op(ip[-1] - MP_BC_BINARY_OP_MULTI, lhs, rhs));
  1134. DISPATCH();
  1135. }
  1136. ENTRY_DEFAULT:
  1137. MARK_EXC_IP_SELECTIVE();
  1138. #else
  1139. ENTRY_DEFAULT:
  1140. if (ip[-1] < MP_BC_LOAD_CONST_SMALL_INT_MULTI + 64) {
  1141. PUSH(MP_OBJ_NEW_SMALL_INT((mp_int_t)ip[-1] - MP_BC_LOAD_CONST_SMALL_INT_MULTI - 16));
  1142. DISPATCH();
  1143. } else if (ip[-1] < MP_BC_LOAD_FAST_MULTI + 16) {
  1144. obj_shared = fastn[MP_BC_LOAD_FAST_MULTI - (mp_int_t)ip[-1]];
  1145. goto load_check;
  1146. } else if (ip[-1] < MP_BC_STORE_FAST_MULTI + 16) {
  1147. fastn[MP_BC_STORE_FAST_MULTI - (mp_int_t)ip[-1]] = POP();
  1148. DISPATCH();
  1149. } else if (ip[-1] < MP_BC_UNARY_OP_MULTI + 7) {
  1150. SET_TOP(mp_unary_op(ip[-1] - MP_BC_UNARY_OP_MULTI, TOP()));
  1151. DISPATCH();
  1152. } else if (ip[-1] < MP_BC_BINARY_OP_MULTI + 36) {
  1153. mp_obj_t rhs = POP();
  1154. mp_obj_t lhs = TOP();
  1155. SET_TOP(mp_binary_op(ip[-1] - MP_BC_BINARY_OP_MULTI, lhs, rhs));
  1156. DISPATCH();
  1157. } else
  1158. #endif
  1159. {
  1160. mp_obj_t obj = mp_obj_new_exception_msg(&mp_type_NotImplementedError, "byte code not implemented");
  1161. nlr_pop();
  1162. fastn[0] = obj;
  1163. return MP_VM_RETURN_EXCEPTION;
  1164. }
  1165. #if !MICROPY_OPT_COMPUTED_GOTO
  1166. } // switch
  1167. #endif
  1168. pending_exception_check:
  1169. MICROPY_VM_HOOK_LOOP
  1170. #if MICROPY_ENABLE_SCHEDULER
  1171. // This is an inlined variant of mp_handle_pending
  1172. if (MP_STATE_VM(sched_state) == MP_SCHED_PENDING) {
  1173. MARK_EXC_IP_SELECTIVE();
  1174. mp_uint_t atomic_state = MICROPY_BEGIN_ATOMIC_SECTION();
  1175. mp_obj_t obj = MP_STATE_VM(mp_pending_exception);
  1176. if (obj != MP_OBJ_NULL) {
  1177. MP_STATE_VM(mp_pending_exception) = MP_OBJ_NULL;
  1178. if (!mp_sched_num_pending()) {
  1179. MP_STATE_VM(sched_state) = MP_SCHED_IDLE;
  1180. }
  1181. MICROPY_END_ATOMIC_SECTION(atomic_state);
  1182. RAISE(obj);
  1183. }
  1184. mp_handle_pending_tail(atomic_state);
  1185. }
  1186. #else
  1187. // This is an inlined variant of mp_handle_pending
  1188. if (MP_STATE_VM(mp_pending_exception) != MP_OBJ_NULL) {
  1189. MARK_EXC_IP_SELECTIVE();
  1190. mp_obj_t obj = MP_STATE_VM(mp_pending_exception);
  1191. MP_STATE_VM(mp_pending_exception) = MP_OBJ_NULL;
  1192. RAISE(obj);
  1193. }
  1194. #endif
  1195. #if MICROPY_PY_THREAD_GIL
  1196. #if MICROPY_PY_THREAD_GIL_VM_DIVISOR
  1197. if (--gil_divisor == 0) {
  1198. gil_divisor = MICROPY_PY_THREAD_GIL_VM_DIVISOR;
  1199. #else
  1200. {
  1201. #endif
  1202. #if MICROPY_ENABLE_SCHEDULER
  1203. // can only switch threads if the scheduler is unlocked
  1204. if (MP_STATE_VM(sched_state) == MP_SCHED_IDLE)
  1205. #endif
  1206. {
  1207. MP_THREAD_GIL_EXIT();
  1208. MP_THREAD_GIL_ENTER();
  1209. }
  1210. }
  1211. #endif
  1212. } // for loop
  1213. } else {
  1214. exception_handler:
  1215. // exception occurred
  1216. #if MICROPY_PY_SYS_EXC_INFO
  1217. MP_STATE_VM(cur_exception) = nlr.ret_val;
  1218. #endif
  1219. #if SELECTIVE_EXC_IP
  1220. // with selective ip, we store the ip 1 byte past the opcode, so move ptr back
  1221. code_state->ip -= 1;
  1222. #endif
  1223. if (mp_obj_is_subclass_fast(MP_OBJ_FROM_PTR(((mp_obj_base_t*)nlr.ret_val)->type), MP_OBJ_FROM_PTR(&mp_type_StopIteration))) {
  1224. if (code_state->ip) {
  1225. // check if it's a StopIteration within a for block
  1226. if (*code_state->ip == MP_BC_FOR_ITER) {
  1227. const byte *ip = code_state->ip + 1;
  1228. DECODE_ULABEL; // the jump offset if iteration finishes; for labels are always forward
  1229. code_state->ip = ip + ulab; // jump to after for-block
  1230. code_state->sp -= MP_OBJ_ITER_BUF_NSLOTS; // pop the exhausted iterator
  1231. goto outer_dispatch_loop; // continue with dispatch loop
  1232. } else if (*code_state->ip == MP_BC_YIELD_FROM) {
  1233. // StopIteration inside yield from call means return a value of
  1234. // yield from, so inject exception's value as yield from's result
  1235. *++code_state->sp = mp_obj_exception_get_value(MP_OBJ_FROM_PTR(nlr.ret_val));
  1236. code_state->ip++; // yield from is over, move to next instruction
  1237. goto outer_dispatch_loop; // continue with dispatch loop
  1238. }
  1239. }
  1240. }
  1241. #if MICROPY_STACKLESS
  1242. unwind_loop:
  1243. #endif
  1244. // set file and line number that the exception occurred at
  1245. // TODO: don't set traceback for exceptions re-raised by END_FINALLY.
  1246. // But consider how to handle nested exceptions.
  1247. // TODO need a better way of not adding traceback to constant objects (right now, just GeneratorExit_obj and MemoryError_obj)
  1248. if (nlr.ret_val != &mp_const_GeneratorExit_obj && nlr.ret_val != &mp_const_MemoryError_obj) {
  1249. const byte *ip = code_state->fun_bc->bytecode;
  1250. ip = mp_decode_uint_skip(ip); // skip n_state
  1251. ip = mp_decode_uint_skip(ip); // skip n_exc_stack
  1252. ip++; // skip scope_params
  1253. ip++; // skip n_pos_args
  1254. ip++; // skip n_kwonly_args
  1255. ip++; // skip n_def_pos_args
  1256. size_t bc = code_state->ip - ip;
  1257. size_t code_info_size = mp_decode_uint_value(ip);
  1258. ip = mp_decode_uint_skip(ip); // skip code_info_size
  1259. bc -= code_info_size;
  1260. #if MICROPY_PERSISTENT_CODE
  1261. qstr block_name = ip[0] | (ip[1] << 8);
  1262. qstr source_file = ip[2] | (ip[3] << 8);
  1263. ip += 4;
  1264. #else
  1265. qstr block_name = mp_decode_uint_value(ip);
  1266. ip = mp_decode_uint_skip(ip);
  1267. qstr source_file = mp_decode_uint_value(ip);
  1268. ip = mp_decode_uint_skip(ip);
  1269. #endif
  1270. size_t source_line = 1;
  1271. size_t c;
  1272. while ((c = *ip)) {
  1273. size_t b, l;
  1274. if ((c & 0x80) == 0) {
  1275. // 0b0LLBBBBB encoding
  1276. b = c & 0x1f;
  1277. l = c >> 5;
  1278. ip += 1;
  1279. } else {
  1280. // 0b1LLLBBBB 0bLLLLLLLL encoding (l's LSB in second byte)
  1281. b = c & 0xf;
  1282. l = ((c << 4) & 0x700) | ip[1];
  1283. ip += 2;
  1284. }
  1285. if (bc >= b) {
  1286. bc -= b;
  1287. source_line += l;
  1288. } else {
  1289. // found source line corresponding to bytecode offset
  1290. break;
  1291. }
  1292. }
  1293. mp_obj_exception_add_traceback(MP_OBJ_FROM_PTR(nlr.ret_val), source_file, source_line, block_name);
  1294. }
  1295. while (currently_in_except_block) {
  1296. // nested exception
  1297. assert(exc_sp >= exc_stack);
  1298. // TODO make a proper message for nested exception
  1299. // at the moment we are just raising the very last exception (the one that caused the nested exception)
  1300. // move up to previous exception handler
  1301. POP_EXC_BLOCK();
  1302. }
  1303. if (exc_sp >= exc_stack) {
  1304. // set flag to indicate that we are now handling an exception
  1305. currently_in_except_block = 1;
  1306. // catch exception and pass to byte code
  1307. code_state->ip = exc_sp->handler;
  1308. mp_obj_t *sp = MP_TAGPTR_PTR(exc_sp->val_sp);
  1309. // save this exception in the stack so it can be used in a reraise, if needed
  1310. exc_sp->prev_exc = nlr.ret_val;
  1311. // push exception object so it can be handled by bytecode
  1312. PUSH(MP_OBJ_FROM_PTR(nlr.ret_val));
  1313. code_state->sp = sp;
  1314. #if MICROPY_STACKLESS
  1315. } else if (code_state->prev != NULL) {
  1316. mp_globals_set(code_state->old_globals);
  1317. code_state = code_state->prev;
  1318. size_t n_state = mp_decode_uint_value(code_state->fun_bc->bytecode);
  1319. fastn = &code_state->state[n_state - 1];
  1320. exc_stack = (mp_exc_stack_t*)(code_state->state + n_state);
  1321. // variables that are visible to the exception handler (declared volatile)
  1322. currently_in_except_block = MP_TAGPTR_TAG0(code_state->exc_sp); // 0 or 1, to detect nested exceptions
  1323. exc_sp = MP_TAGPTR_PTR(code_state->exc_sp); // stack grows up, exc_sp points to top of stack
  1324. goto unwind_loop;
  1325. #endif
  1326. } else {
  1327. // propagate exception to higher level
  1328. // TODO what to do about ip and sp? they don't really make sense at this point
  1329. fastn[0] = MP_OBJ_FROM_PTR(nlr.ret_val); // must put exception here because sp is invalid
  1330. return MP_VM_RETURN_EXCEPTION;
  1331. }
  1332. }
  1333. }
  1334. }