vm.c 64 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536
  1. /*
  2. * This file is part of the MicroPython project, http://micropython.org/
  3. *
  4. * The MIT License (MIT)
  5. *
  6. * Copyright (c) 2013-2019 Damien P. George
  7. * Copyright (c) 2014-2015 Paul Sokolovsky
  8. *
  9. * Permission is hereby granted, free of charge, to any person obtaining a copy
  10. * of this software and associated documentation files (the "Software"), to deal
  11. * in the Software without restriction, including without limitation the rights
  12. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  13. * copies of the Software, and to permit persons to whom the Software is
  14. * furnished to do so, subject to the following conditions:
  15. *
  16. * The above copyright notice and this permission notice shall be included in
  17. * all copies or substantial portions of the Software.
  18. *
  19. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  20. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  21. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  22. * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  23. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  24. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  25. * THE SOFTWARE.
  26. */
  27. #include <stdio.h>
  28. #include <string.h>
  29. #include <assert.h>
  30. #include "py/emitglue.h"
  31. #include "py/objtype.h"
  32. #include "py/runtime.h"
  33. #include "py/bc0.h"
  34. #include "py/bc.h"
  35. #include "py/profile.h"
  36. #if 0
  37. #define TRACE(ip) printf("sp=%d ", (int)(sp - &code_state->state[0] + 1)); mp_bytecode_print2(ip, 1, code_state->fun_bc->const_table);
  38. #else
  39. #define TRACE(ip)
  40. #endif
  41. // Value stack grows up (this makes it incompatible with native C stack, but
  42. // makes sure that arguments to functions are in natural order arg1..argN
  43. // (Python semantics mandates left-to-right evaluation order, including for
  44. // function arguments). Stack pointer is pre-incremented and points at the
  45. // top element.
  46. // Exception stack also grows up, top element is also pointed at.
  47. #define DECODE_UINT \
  48. mp_uint_t unum = 0; \
  49. do { \
  50. unum = (unum << 7) + (*ip & 0x7f); \
  51. } while ((*ip++ & 0x80) != 0)
  52. #define DECODE_ULABEL size_t ulab = (ip[0] | (ip[1] << 8)); ip += 2
  53. #define DECODE_SLABEL size_t slab = (ip[0] | (ip[1] << 8)) - 0x8000; ip += 2
  54. #if MICROPY_PERSISTENT_CODE
  55. #define DECODE_QSTR \
  56. qstr qst = ip[0] | ip[1] << 8; \
  57. ip += 2;
  58. #define DECODE_PTR \
  59. DECODE_UINT; \
  60. void *ptr = (void*)(uintptr_t)code_state->fun_bc->const_table[unum]
  61. #define DECODE_OBJ \
  62. DECODE_UINT; \
  63. mp_obj_t obj = (mp_obj_t)code_state->fun_bc->const_table[unum]
  64. #else
  65. #define DECODE_QSTR qstr qst = 0; \
  66. do { \
  67. qst = (qst << 7) + (*ip & 0x7f); \
  68. } while ((*ip++ & 0x80) != 0)
  69. #define DECODE_PTR \
  70. ip = (byte*)MP_ALIGN(ip, sizeof(void*)); \
  71. void *ptr = *(void**)ip; \
  72. ip += sizeof(void*)
  73. #define DECODE_OBJ \
  74. ip = (byte*)MP_ALIGN(ip, sizeof(mp_obj_t)); \
  75. mp_obj_t obj = *(mp_obj_t*)ip; \
  76. ip += sizeof(mp_obj_t)
  77. #endif
  78. #define PUSH(val) *++sp = (val)
  79. #define POP() (*sp--)
  80. #define TOP() (*sp)
  81. #define SET_TOP(val) *sp = (val)
  82. #if MICROPY_PY_SYS_EXC_INFO
  83. #define CLEAR_SYS_EXC_INFO() MP_STATE_VM(cur_exception) = NULL;
  84. #else
  85. #define CLEAR_SYS_EXC_INFO()
  86. #endif
  87. #define PUSH_EXC_BLOCK(with_or_finally) do { \
  88. DECODE_ULABEL; /* except labels are always forward */ \
  89. ++exc_sp; \
  90. exc_sp->handler = ip + ulab; \
  91. exc_sp->val_sp = MP_TAGPTR_MAKE(sp, ((with_or_finally) << 1)); \
  92. exc_sp->prev_exc = NULL; \
  93. } while (0)
  94. #define POP_EXC_BLOCK() \
  95. exc_sp--; /* pop back to previous exception handler */ \
  96. CLEAR_SYS_EXC_INFO() /* just clear sys.exc_info(), not compliant, but it shouldn't be used in 1st place */
  97. #define CANCEL_ACTIVE_FINALLY(sp) do { \
  98. if (mp_obj_is_small_int(sp[-1])) { \
  99. /* Stack: (..., prev_dest_ip, prev_cause, dest_ip) */ \
  100. /* Cancel the unwind through the previous finally, replace with current one */ \
  101. sp[-2] = sp[0]; \
  102. sp -= 2; \
  103. } else { \
  104. assert(sp[-1] == mp_const_none || mp_obj_is_exception_instance(sp[-1])); \
  105. /* Stack: (..., None/exception, dest_ip) */ \
  106. /* Silence the finally's exception value (may be None or an exception) */ \
  107. sp[-1] = sp[0]; \
  108. --sp; \
  109. } \
  110. } while (0)
  111. #if MICROPY_PY_SYS_SETTRACE
  112. #define FRAME_SETUP() do { \
  113. assert(code_state != code_state->prev_state); \
  114. MP_STATE_THREAD(current_code_state) = code_state; \
  115. assert(code_state != code_state->prev_state); \
  116. } while(0)
  117. #define FRAME_ENTER() do { \
  118. assert(code_state != code_state->prev_state); \
  119. code_state->prev_state = MP_STATE_THREAD(current_code_state); \
  120. assert(code_state != code_state->prev_state); \
  121. if (!mp_prof_is_executing) { \
  122. mp_prof_frame_enter(code_state); \
  123. } \
  124. } while(0)
  125. #define FRAME_LEAVE() do { \
  126. assert(code_state != code_state->prev_state); \
  127. MP_STATE_THREAD(current_code_state) = code_state->prev_state; \
  128. assert(code_state != code_state->prev_state); \
  129. } while(0)
  130. #define FRAME_UPDATE() do { \
  131. assert(MP_STATE_THREAD(current_code_state) == code_state); \
  132. if (!mp_prof_is_executing) { \
  133. code_state->frame = MP_OBJ_TO_PTR(mp_prof_frame_update(code_state)); \
  134. } \
  135. } while(0)
  136. #define TRACE_TICK(current_ip, current_sp, is_exception) do { \
  137. assert(code_state != code_state->prev_state); \
  138. assert(MP_STATE_THREAD(current_code_state) == code_state); \
  139. if (!mp_prof_is_executing && code_state->frame && MP_STATE_THREAD(prof_trace_callback)) { \
  140. MP_PROF_INSTR_DEBUG_PRINT(code_state->ip); \
  141. } \
  142. if (!mp_prof_is_executing && code_state->frame && code_state->frame->callback) { \
  143. mp_prof_instr_tick(code_state, is_exception); \
  144. } \
  145. } while(0)
  146. #else // MICROPY_PY_SYS_SETTRACE
  147. #define FRAME_SETUP()
  148. #define FRAME_ENTER()
  149. #define FRAME_LEAVE()
  150. #define FRAME_UPDATE()
  151. #define TRACE_TICK(current_ip, current_sp, is_exception)
  152. #endif // MICROPY_PY_SYS_SETTRACE
  153. #if MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
  154. static inline mp_map_elem_t *mp_map_cached_lookup(mp_map_t *map, qstr qst, uint8_t *idx_cache) {
  155. size_t idx = *idx_cache;
  156. mp_obj_t key = MP_OBJ_NEW_QSTR(qst);
  157. mp_map_elem_t *elem = NULL;
  158. if (idx < map->alloc && map->table[idx].key == key) {
  159. elem = &map->table[idx];
  160. } else {
  161. elem = mp_map_lookup(map, key, MP_MAP_LOOKUP);
  162. if (elem != NULL) {
  163. *idx_cache = (elem - &map->table[0]) & 0xff;
  164. }
  165. }
  166. return elem;
  167. }
  168. #endif
  169. // fastn has items in reverse order (fastn[0] is local[0], fastn[-1] is local[1], etc)
  170. // sp points to bottom of stack which grows up
  171. // returns:
  172. // MP_VM_RETURN_NORMAL, sp valid, return value in *sp
  173. // MP_VM_RETURN_YIELD, ip, sp valid, yielded value in *sp
  174. // MP_VM_RETURN_EXCEPTION, exception in state[0]
  175. mp_vm_return_kind_t mp_execute_bytecode(mp_code_state_t *code_state, volatile mp_obj_t inject_exc) {
  176. #define SELECTIVE_EXC_IP (0)
  177. #if SELECTIVE_EXC_IP
  178. #define MARK_EXC_IP_SELECTIVE() { code_state->ip = ip; } /* stores ip 1 byte past last opcode */
  179. #define MARK_EXC_IP_GLOBAL()
  180. #else
  181. #define MARK_EXC_IP_SELECTIVE()
  182. #define MARK_EXC_IP_GLOBAL() { code_state->ip = ip; } /* stores ip pointing to last opcode */
  183. #endif
  184. #if MICROPY_OPT_COMPUTED_GOTO
  185. #include "py/vmentrytable.h"
  186. #define DISPATCH() do { \
  187. TRACE(ip); \
  188. MARK_EXC_IP_GLOBAL(); \
  189. TRACE_TICK(ip, sp, false); \
  190. goto *entry_table[*ip++]; \
  191. } while (0)
  192. #define DISPATCH_WITH_PEND_EXC_CHECK() goto pending_exception_check
  193. #define ENTRY(op) entry_##op
  194. #define ENTRY_DEFAULT entry_default
  195. #else
  196. #define DISPATCH() goto dispatch_loop
  197. #define DISPATCH_WITH_PEND_EXC_CHECK() goto pending_exception_check
  198. #define ENTRY(op) case op
  199. #define ENTRY_DEFAULT default
  200. #endif
  201. // nlr_raise needs to be implemented as a goto, so that the C compiler's flow analyser
  202. // sees that it's possible for us to jump from the dispatch loop to the exception
  203. // handler. Without this, the code may have a different stack layout in the dispatch
  204. // loop and the exception handler, leading to very obscure bugs.
  205. #define RAISE(o) do { nlr_pop(); nlr.ret_val = MP_OBJ_TO_PTR(o); goto exception_handler; } while (0)
  206. #if MICROPY_STACKLESS
  207. run_code_state: ;
  208. #endif
  209. FRAME_ENTER();
  210. #if MICROPY_STACKLESS
  211. run_code_state_from_return: ;
  212. #endif
  213. FRAME_SETUP();
  214. // Pointers which are constant for particular invocation of mp_execute_bytecode()
  215. mp_obj_t * /*const*/ fastn;
  216. mp_exc_stack_t * /*const*/ exc_stack;
  217. {
  218. size_t n_state = code_state->n_state;
  219. fastn = &code_state->state[n_state - 1];
  220. exc_stack = (mp_exc_stack_t*)(code_state->state + n_state);
  221. }
  222. // variables that are visible to the exception handler (declared volatile)
  223. mp_exc_stack_t *volatile exc_sp = MP_CODE_STATE_EXC_SP_IDX_TO_PTR(exc_stack, code_state->exc_sp_idx); // stack grows up, exc_sp points to top of stack
  224. #if MICROPY_PY_THREAD_GIL && MICROPY_PY_THREAD_GIL_VM_DIVISOR
  225. // This needs to be volatile and outside the VM loop so it persists across handling
  226. // of any exceptions. Otherwise it's possible that the VM never gives up the GIL.
  227. volatile int gil_divisor = MICROPY_PY_THREAD_GIL_VM_DIVISOR;
  228. #endif
  229. // outer exception handling loop
  230. for (;;) {
  231. nlr_buf_t nlr;
  232. outer_dispatch_loop:
  233. if (nlr_push(&nlr) == 0) {
  234. // local variables that are not visible to the exception handler
  235. const byte *ip = code_state->ip;
  236. mp_obj_t *sp = code_state->sp;
  237. mp_obj_t obj_shared;
  238. MICROPY_VM_HOOK_INIT
  239. // If we have exception to inject, now that we finish setting up
  240. // execution context, raise it. This works as if MP_BC_RAISE_OBJ
  241. // bytecode was executed.
  242. // Injecting exc into yield from generator is a special case,
  243. // handled by MP_BC_YIELD_FROM itself
  244. if (inject_exc != MP_OBJ_NULL && *ip != MP_BC_YIELD_FROM) {
  245. mp_obj_t exc = inject_exc;
  246. inject_exc = MP_OBJ_NULL;
  247. exc = mp_make_raise_obj(exc);
  248. RAISE(exc);
  249. }
  250. // loop to execute byte code
  251. for (;;) {
  252. dispatch_loop:
  253. #if MICROPY_OPT_COMPUTED_GOTO
  254. DISPATCH();
  255. #else
  256. TRACE(ip);
  257. MARK_EXC_IP_GLOBAL();
  258. TRACE_TICK(ip, sp, false);
  259. switch (*ip++) {
  260. #endif
  261. ENTRY(MP_BC_LOAD_CONST_FALSE):
  262. PUSH(mp_const_false);
  263. DISPATCH();
  264. ENTRY(MP_BC_LOAD_CONST_NONE):
  265. PUSH(mp_const_none);
  266. DISPATCH();
  267. ENTRY(MP_BC_LOAD_CONST_TRUE):
  268. PUSH(mp_const_true);
  269. DISPATCH();
  270. ENTRY(MP_BC_LOAD_CONST_SMALL_INT): {
  271. mp_int_t num = 0;
  272. if ((ip[0] & 0x40) != 0) {
  273. // Number is negative
  274. num--;
  275. }
  276. do {
  277. num = (num << 7) | (*ip & 0x7f);
  278. } while ((*ip++ & 0x80) != 0);
  279. PUSH(MP_OBJ_NEW_SMALL_INT(num));
  280. DISPATCH();
  281. }
  282. ENTRY(MP_BC_LOAD_CONST_STRING): {
  283. DECODE_QSTR;
  284. PUSH(MP_OBJ_NEW_QSTR(qst));
  285. DISPATCH();
  286. }
  287. ENTRY(MP_BC_LOAD_CONST_OBJ): {
  288. DECODE_OBJ;
  289. PUSH(obj);
  290. DISPATCH();
  291. }
  292. ENTRY(MP_BC_LOAD_NULL):
  293. PUSH(MP_OBJ_NULL);
  294. DISPATCH();
  295. ENTRY(MP_BC_LOAD_FAST_N): {
  296. DECODE_UINT;
  297. obj_shared = fastn[-unum];
  298. load_check:
  299. if (obj_shared == MP_OBJ_NULL) {
  300. local_name_error: {
  301. MARK_EXC_IP_SELECTIVE();
  302. mp_obj_t obj = mp_obj_new_exception_msg(&mp_type_NameError, "local variable referenced before assignment");
  303. RAISE(obj);
  304. }
  305. }
  306. PUSH(obj_shared);
  307. DISPATCH();
  308. }
  309. ENTRY(MP_BC_LOAD_DEREF): {
  310. DECODE_UINT;
  311. obj_shared = mp_obj_cell_get(fastn[-unum]);
  312. goto load_check;
  313. }
  314. #if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
  315. ENTRY(MP_BC_LOAD_NAME): {
  316. MARK_EXC_IP_SELECTIVE();
  317. DECODE_QSTR;
  318. PUSH(mp_load_name(qst));
  319. DISPATCH();
  320. }
  321. #else
  322. ENTRY(MP_BC_LOAD_NAME): {
  323. MARK_EXC_IP_SELECTIVE();
  324. DECODE_QSTR;
  325. mp_map_elem_t *elem = mp_map_cached_lookup(&mp_locals_get()->map, qst, (uint8_t*)ip);
  326. mp_obj_t obj;
  327. if (elem != NULL) {
  328. obj = elem->value;
  329. } else {
  330. obj = mp_load_name(qst);
  331. }
  332. PUSH(obj);
  333. ip++;
  334. DISPATCH();
  335. }
  336. #endif
  337. #if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
  338. ENTRY(MP_BC_LOAD_GLOBAL): {
  339. MARK_EXC_IP_SELECTIVE();
  340. DECODE_QSTR;
  341. PUSH(mp_load_global(qst));
  342. DISPATCH();
  343. }
  344. #else
  345. ENTRY(MP_BC_LOAD_GLOBAL): {
  346. MARK_EXC_IP_SELECTIVE();
  347. DECODE_QSTR;
  348. mp_map_elem_t *elem = mp_map_cached_lookup(&mp_globals_get()->map, qst, (uint8_t*)ip);
  349. mp_obj_t obj;
  350. if (elem != NULL) {
  351. obj = elem->value;
  352. } else {
  353. obj = mp_load_global(qst);
  354. }
  355. PUSH(obj);
  356. ip++;
  357. DISPATCH();
  358. }
  359. #endif
  360. #if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
  361. ENTRY(MP_BC_LOAD_ATTR): {
  362. FRAME_UPDATE();
  363. MARK_EXC_IP_SELECTIVE();
  364. DECODE_QSTR;
  365. SET_TOP(mp_load_attr(TOP(), qst));
  366. DISPATCH();
  367. }
  368. #else
  369. ENTRY(MP_BC_LOAD_ATTR): {
  370. FRAME_UPDATE();
  371. MARK_EXC_IP_SELECTIVE();
  372. DECODE_QSTR;
  373. mp_obj_t top = TOP();
  374. mp_map_elem_t *elem = NULL;
  375. if (mp_obj_is_instance_type(mp_obj_get_type(top))) {
  376. mp_obj_instance_t *self = MP_OBJ_TO_PTR(top);
  377. elem = mp_map_cached_lookup(&self->members, qst, (uint8_t*)ip);
  378. }
  379. mp_obj_t obj;
  380. if (elem != NULL) {
  381. obj = elem->value;
  382. } else {
  383. obj = mp_load_attr(top, qst);
  384. }
  385. SET_TOP(obj);
  386. ip++;
  387. DISPATCH();
  388. }
  389. #endif
  390. ENTRY(MP_BC_LOAD_METHOD): {
  391. MARK_EXC_IP_SELECTIVE();
  392. DECODE_QSTR;
  393. mp_load_method(*sp, qst, sp);
  394. sp += 1;
  395. DISPATCH();
  396. }
  397. ENTRY(MP_BC_LOAD_SUPER_METHOD): {
  398. MARK_EXC_IP_SELECTIVE();
  399. DECODE_QSTR;
  400. sp -= 1;
  401. mp_load_super_method(qst, sp - 1);
  402. DISPATCH();
  403. }
  404. ENTRY(MP_BC_LOAD_BUILD_CLASS):
  405. MARK_EXC_IP_SELECTIVE();
  406. PUSH(mp_load_build_class());
  407. DISPATCH();
  408. ENTRY(MP_BC_LOAD_SUBSCR): {
  409. MARK_EXC_IP_SELECTIVE();
  410. mp_obj_t index = POP();
  411. SET_TOP(mp_obj_subscr(TOP(), index, MP_OBJ_SENTINEL));
  412. DISPATCH();
  413. }
  414. ENTRY(MP_BC_STORE_FAST_N): {
  415. DECODE_UINT;
  416. fastn[-unum] = POP();
  417. DISPATCH();
  418. }
  419. ENTRY(MP_BC_STORE_DEREF): {
  420. DECODE_UINT;
  421. mp_obj_cell_set(fastn[-unum], POP());
  422. DISPATCH();
  423. }
  424. ENTRY(MP_BC_STORE_NAME): {
  425. MARK_EXC_IP_SELECTIVE();
  426. DECODE_QSTR;
  427. mp_store_name(qst, POP());
  428. DISPATCH();
  429. }
  430. ENTRY(MP_BC_STORE_GLOBAL): {
  431. MARK_EXC_IP_SELECTIVE();
  432. DECODE_QSTR;
  433. mp_store_global(qst, POP());
  434. DISPATCH();
  435. }
  436. #if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
  437. ENTRY(MP_BC_STORE_ATTR): {
  438. FRAME_UPDATE();
  439. MARK_EXC_IP_SELECTIVE();
  440. DECODE_QSTR;
  441. mp_store_attr(sp[0], qst, sp[-1]);
  442. sp -= 2;
  443. DISPATCH();
  444. }
  445. #else
  446. // This caching code works with MICROPY_PY_BUILTINS_PROPERTY and/or
  447. // MICROPY_PY_DESCRIPTORS enabled because if the attr exists in
  448. // self->members then it can't be a property or have descriptors. A
  449. // consequence of this is that we can't use MP_MAP_LOOKUP_ADD_IF_NOT_FOUND
  450. // in the fast-path below, because that store could override a property.
  451. ENTRY(MP_BC_STORE_ATTR): {
  452. FRAME_UPDATE();
  453. MARK_EXC_IP_SELECTIVE();
  454. DECODE_QSTR;
  455. mp_map_elem_t *elem = NULL;
  456. mp_obj_t top = TOP();
  457. if (mp_obj_is_instance_type(mp_obj_get_type(top)) && sp[-1] != MP_OBJ_NULL) {
  458. mp_obj_instance_t *self = MP_OBJ_TO_PTR(top);
  459. elem = mp_map_cached_lookup(&self->members, qst, (uint8_t*)ip);
  460. }
  461. if (elem != NULL) {
  462. elem->value = sp[-1];
  463. } else {
  464. mp_store_attr(sp[0], qst, sp[-1]);
  465. }
  466. sp -= 2;
  467. ip++;
  468. DISPATCH();
  469. }
  470. #endif
  471. ENTRY(MP_BC_STORE_SUBSCR):
  472. MARK_EXC_IP_SELECTIVE();
  473. mp_obj_subscr(sp[-1], sp[0], sp[-2]);
  474. sp -= 3;
  475. DISPATCH();
  476. ENTRY(MP_BC_DELETE_FAST): {
  477. MARK_EXC_IP_SELECTIVE();
  478. DECODE_UINT;
  479. if (fastn[-unum] == MP_OBJ_NULL) {
  480. goto local_name_error;
  481. }
  482. fastn[-unum] = MP_OBJ_NULL;
  483. DISPATCH();
  484. }
  485. ENTRY(MP_BC_DELETE_DEREF): {
  486. MARK_EXC_IP_SELECTIVE();
  487. DECODE_UINT;
  488. if (mp_obj_cell_get(fastn[-unum]) == MP_OBJ_NULL) {
  489. goto local_name_error;
  490. }
  491. mp_obj_cell_set(fastn[-unum], MP_OBJ_NULL);
  492. DISPATCH();
  493. }
  494. ENTRY(MP_BC_DELETE_NAME): {
  495. MARK_EXC_IP_SELECTIVE();
  496. DECODE_QSTR;
  497. mp_delete_name(qst);
  498. DISPATCH();
  499. }
  500. ENTRY(MP_BC_DELETE_GLOBAL): {
  501. MARK_EXC_IP_SELECTIVE();
  502. DECODE_QSTR;
  503. mp_delete_global(qst);
  504. DISPATCH();
  505. }
  506. ENTRY(MP_BC_DUP_TOP): {
  507. mp_obj_t top = TOP();
  508. PUSH(top);
  509. DISPATCH();
  510. }
  511. ENTRY(MP_BC_DUP_TOP_TWO):
  512. sp += 2;
  513. sp[0] = sp[-2];
  514. sp[-1] = sp[-3];
  515. DISPATCH();
  516. ENTRY(MP_BC_POP_TOP):
  517. sp -= 1;
  518. DISPATCH();
  519. ENTRY(MP_BC_ROT_TWO): {
  520. mp_obj_t top = sp[0];
  521. sp[0] = sp[-1];
  522. sp[-1] = top;
  523. DISPATCH();
  524. }
  525. ENTRY(MP_BC_ROT_THREE): {
  526. mp_obj_t top = sp[0];
  527. sp[0] = sp[-1];
  528. sp[-1] = sp[-2];
  529. sp[-2] = top;
  530. DISPATCH();
  531. }
  532. ENTRY(MP_BC_JUMP): {
  533. DECODE_SLABEL;
  534. ip += slab;
  535. DISPATCH_WITH_PEND_EXC_CHECK();
  536. }
  537. ENTRY(MP_BC_POP_JUMP_IF_TRUE): {
  538. DECODE_SLABEL;
  539. if (mp_obj_is_true(POP())) {
  540. ip += slab;
  541. }
  542. DISPATCH_WITH_PEND_EXC_CHECK();
  543. }
  544. ENTRY(MP_BC_POP_JUMP_IF_FALSE): {
  545. DECODE_SLABEL;
  546. if (!mp_obj_is_true(POP())) {
  547. ip += slab;
  548. }
  549. DISPATCH_WITH_PEND_EXC_CHECK();
  550. }
  551. ENTRY(MP_BC_JUMP_IF_TRUE_OR_POP): {
  552. DECODE_SLABEL;
  553. if (mp_obj_is_true(TOP())) {
  554. ip += slab;
  555. } else {
  556. sp--;
  557. }
  558. DISPATCH_WITH_PEND_EXC_CHECK();
  559. }
  560. ENTRY(MP_BC_JUMP_IF_FALSE_OR_POP): {
  561. DECODE_SLABEL;
  562. if (mp_obj_is_true(TOP())) {
  563. sp--;
  564. } else {
  565. ip += slab;
  566. }
  567. DISPATCH_WITH_PEND_EXC_CHECK();
  568. }
  569. ENTRY(MP_BC_SETUP_WITH): {
  570. MARK_EXC_IP_SELECTIVE();
  571. // stack: (..., ctx_mgr)
  572. mp_obj_t obj = TOP();
  573. mp_load_method(obj, MP_QSTR___exit__, sp);
  574. mp_load_method(obj, MP_QSTR___enter__, sp + 2);
  575. mp_obj_t ret = mp_call_method_n_kw(0, 0, sp + 2);
  576. sp += 1;
  577. PUSH_EXC_BLOCK(1);
  578. PUSH(ret);
  579. // stack: (..., __exit__, ctx_mgr, as_value)
  580. DISPATCH();
  581. }
  582. ENTRY(MP_BC_WITH_CLEANUP): {
  583. MARK_EXC_IP_SELECTIVE();
  584. // Arriving here, there's "exception control block" on top of stack,
  585. // and __exit__ method (with self) underneath it. Bytecode calls __exit__,
  586. // and "deletes" it off stack, shifting "exception control block"
  587. // to its place.
  588. // The bytecode emitter ensures that there is enough space on the Python
  589. // value stack to hold the __exit__ method plus an additional 4 entries.
  590. if (TOP() == mp_const_none) {
  591. // stack: (..., __exit__, ctx_mgr, None)
  592. sp[1] = mp_const_none;
  593. sp[2] = mp_const_none;
  594. sp -= 2;
  595. mp_call_method_n_kw(3, 0, sp);
  596. SET_TOP(mp_const_none);
  597. } else if (mp_obj_is_small_int(TOP())) {
  598. // Getting here there are two distinct cases:
  599. // - unwind return, stack: (..., __exit__, ctx_mgr, ret_val, SMALL_INT(-1))
  600. // - unwind jump, stack: (..., __exit__, ctx_mgr, dest_ip, SMALL_INT(num_exc))
  601. // For both cases we do exactly the same thing.
  602. mp_obj_t data = sp[-1];
  603. mp_obj_t cause = sp[0];
  604. sp[-1] = mp_const_none;
  605. sp[0] = mp_const_none;
  606. sp[1] = mp_const_none;
  607. mp_call_method_n_kw(3, 0, sp - 3);
  608. sp[-3] = data;
  609. sp[-2] = cause;
  610. sp -= 2; // we removed (__exit__, ctx_mgr)
  611. } else {
  612. assert(mp_obj_is_exception_instance(TOP()));
  613. // stack: (..., __exit__, ctx_mgr, exc_instance)
  614. // Need to pass (exc_type, exc_instance, None) as arguments to __exit__.
  615. sp[1] = sp[0];
  616. sp[0] = MP_OBJ_FROM_PTR(mp_obj_get_type(sp[0]));
  617. sp[2] = mp_const_none;
  618. sp -= 2;
  619. mp_obj_t ret_value = mp_call_method_n_kw(3, 0, sp);
  620. if (mp_obj_is_true(ret_value)) {
  621. // We need to silence/swallow the exception. This is done
  622. // by popping the exception and the __exit__ handler and
  623. // replacing it with None, which signals END_FINALLY to just
  624. // execute the finally handler normally.
  625. SET_TOP(mp_const_none);
  626. } else {
  627. // We need to re-raise the exception. We pop __exit__ handler
  628. // by copying the exception instance down to the new top-of-stack.
  629. sp[0] = sp[3];
  630. }
  631. }
  632. DISPATCH();
  633. }
  634. ENTRY(MP_BC_UNWIND_JUMP): {
  635. MARK_EXC_IP_SELECTIVE();
  636. DECODE_SLABEL;
  637. PUSH((mp_obj_t)(mp_uint_t)(uintptr_t)(ip + slab)); // push destination ip for jump
  638. PUSH((mp_obj_t)(mp_uint_t)(*ip)); // push number of exception handlers to unwind (0x80 bit set if we also need to pop stack)
  639. unwind_jump:;
  640. mp_uint_t unum = (mp_uint_t)POP(); // get number of exception handlers to unwind
  641. while ((unum & 0x7f) > 0) {
  642. unum -= 1;
  643. assert(exc_sp >= exc_stack);
  644. if (MP_TAGPTR_TAG1(exc_sp->val_sp)) {
  645. if (exc_sp->handler > ip) {
  646. // Found a finally handler that isn't active; run it.
  647. // Getting here the stack looks like:
  648. // (..., X, dest_ip)
  649. // where X is pointed to by exc_sp->val_sp and in the case
  650. // of a "with" block contains the context manager info.
  651. assert(&sp[-1] == MP_TAGPTR_PTR(exc_sp->val_sp));
  652. // We're going to run "finally" code as a coroutine
  653. // (not calling it recursively). Set up a sentinel
  654. // on the stack so it can return back to us when it is
  655. // done (when WITH_CLEANUP or END_FINALLY reached).
  656. // The sentinel is the number of exception handlers left to
  657. // unwind, which is a non-negative integer.
  658. PUSH(MP_OBJ_NEW_SMALL_INT(unum));
  659. ip = exc_sp->handler;
  660. goto dispatch_loop;
  661. } else {
  662. // Found a finally handler that is already active; cancel it.
  663. CANCEL_ACTIVE_FINALLY(sp);
  664. }
  665. }
  666. POP_EXC_BLOCK();
  667. }
  668. ip = (const byte*)MP_OBJ_TO_PTR(POP()); // pop destination ip for jump
  669. if (unum != 0) {
  670. // pop the exhausted iterator
  671. sp -= MP_OBJ_ITER_BUF_NSLOTS;
  672. }
  673. DISPATCH_WITH_PEND_EXC_CHECK();
  674. }
  675. ENTRY(MP_BC_SETUP_EXCEPT):
  676. ENTRY(MP_BC_SETUP_FINALLY): {
  677. MARK_EXC_IP_SELECTIVE();
  678. #if SELECTIVE_EXC_IP
  679. PUSH_EXC_BLOCK((code_state->ip[-1] == MP_BC_SETUP_FINALLY) ? 1 : 0);
  680. #else
  681. PUSH_EXC_BLOCK((code_state->ip[0] == MP_BC_SETUP_FINALLY) ? 1 : 0);
  682. #endif
  683. DISPATCH();
  684. }
  685. ENTRY(MP_BC_END_FINALLY):
  686. MARK_EXC_IP_SELECTIVE();
  687. // if TOS is None, just pops it and continues
  688. // if TOS is an integer, finishes coroutine and returns control to caller
  689. // if TOS is an exception, reraises the exception
  690. assert(exc_sp >= exc_stack);
  691. POP_EXC_BLOCK();
  692. if (TOP() == mp_const_none) {
  693. sp--;
  694. } else if (mp_obj_is_small_int(TOP())) {
  695. // We finished "finally" coroutine and now dispatch back
  696. // to our caller, based on TOS value
  697. mp_int_t cause = MP_OBJ_SMALL_INT_VALUE(POP());
  698. if (cause < 0) {
  699. // A negative cause indicates unwind return
  700. goto unwind_return;
  701. } else {
  702. // Otherwise it's an unwind jump and we must push as a raw
  703. // number the number of exception handlers to unwind
  704. PUSH((mp_obj_t)cause);
  705. goto unwind_jump;
  706. }
  707. } else {
  708. assert(mp_obj_is_exception_instance(TOP()));
  709. RAISE(TOP());
  710. }
  711. DISPATCH();
  712. ENTRY(MP_BC_GET_ITER):
  713. MARK_EXC_IP_SELECTIVE();
  714. SET_TOP(mp_getiter(TOP(), NULL));
  715. DISPATCH();
  716. // An iterator for a for-loop takes MP_OBJ_ITER_BUF_NSLOTS slots on
  717. // the Python value stack. These slots are either used to store the
  718. // iterator object itself, or the first slot is MP_OBJ_NULL and
  719. // the second slot holds a reference to the iterator object.
  720. ENTRY(MP_BC_GET_ITER_STACK): {
  721. MARK_EXC_IP_SELECTIVE();
  722. mp_obj_t obj = TOP();
  723. mp_obj_iter_buf_t *iter_buf = (mp_obj_iter_buf_t*)sp;
  724. sp += MP_OBJ_ITER_BUF_NSLOTS - 1;
  725. obj = mp_getiter(obj, iter_buf);
  726. if (obj != MP_OBJ_FROM_PTR(iter_buf)) {
  727. // Iterator didn't use the stack so indicate that with MP_OBJ_NULL.
  728. sp[-MP_OBJ_ITER_BUF_NSLOTS + 1] = MP_OBJ_NULL;
  729. sp[-MP_OBJ_ITER_BUF_NSLOTS + 2] = obj;
  730. }
  731. DISPATCH();
  732. }
  733. ENTRY(MP_BC_FOR_ITER): {
  734. FRAME_UPDATE();
  735. MARK_EXC_IP_SELECTIVE();
  736. DECODE_ULABEL; // the jump offset if iteration finishes; for labels are always forward
  737. code_state->sp = sp;
  738. mp_obj_t obj;
  739. if (sp[-MP_OBJ_ITER_BUF_NSLOTS + 1] == MP_OBJ_NULL) {
  740. obj = sp[-MP_OBJ_ITER_BUF_NSLOTS + 2];
  741. } else {
  742. obj = MP_OBJ_FROM_PTR(&sp[-MP_OBJ_ITER_BUF_NSLOTS + 1]);
  743. }
  744. mp_obj_t value = mp_iternext_allow_raise(obj);
  745. if (value == MP_OBJ_STOP_ITERATION) {
  746. sp -= MP_OBJ_ITER_BUF_NSLOTS; // pop the exhausted iterator
  747. ip += ulab; // jump to after for-block
  748. } else {
  749. PUSH(value); // push the next iteration value
  750. #if MICROPY_PY_SYS_SETTRACE
  751. // LINE event should trigger for every iteration so invalidate last trigger
  752. if (code_state->frame) {
  753. code_state->frame->lineno = 0;
  754. }
  755. #endif
  756. }
  757. DISPATCH();
  758. }
  759. ENTRY(MP_BC_POP_EXCEPT_JUMP): {
  760. assert(exc_sp >= exc_stack);
  761. POP_EXC_BLOCK();
  762. DECODE_ULABEL;
  763. ip += ulab;
  764. DISPATCH_WITH_PEND_EXC_CHECK();
  765. }
  766. ENTRY(MP_BC_BUILD_TUPLE): {
  767. MARK_EXC_IP_SELECTIVE();
  768. DECODE_UINT;
  769. sp -= unum - 1;
  770. SET_TOP(mp_obj_new_tuple(unum, sp));
  771. DISPATCH();
  772. }
  773. ENTRY(MP_BC_BUILD_LIST): {
  774. MARK_EXC_IP_SELECTIVE();
  775. DECODE_UINT;
  776. sp -= unum - 1;
  777. SET_TOP(mp_obj_new_list(unum, sp));
  778. DISPATCH();
  779. }
  780. ENTRY(MP_BC_BUILD_MAP): {
  781. MARK_EXC_IP_SELECTIVE();
  782. DECODE_UINT;
  783. PUSH(mp_obj_new_dict(unum));
  784. DISPATCH();
  785. }
  786. ENTRY(MP_BC_STORE_MAP):
  787. MARK_EXC_IP_SELECTIVE();
  788. sp -= 2;
  789. mp_obj_dict_store(sp[0], sp[2], sp[1]);
  790. DISPATCH();
  791. #if MICROPY_PY_BUILTINS_SET
  792. ENTRY(MP_BC_BUILD_SET): {
  793. MARK_EXC_IP_SELECTIVE();
  794. DECODE_UINT;
  795. sp -= unum - 1;
  796. SET_TOP(mp_obj_new_set(unum, sp));
  797. DISPATCH();
  798. }
  799. #endif
  800. #if MICROPY_PY_BUILTINS_SLICE
  801. ENTRY(MP_BC_BUILD_SLICE): {
  802. MARK_EXC_IP_SELECTIVE();
  803. mp_obj_t step = mp_const_none;
  804. if (*ip++ == 3) {
  805. // 3-argument slice includes step
  806. step = POP();
  807. }
  808. mp_obj_t stop = POP();
  809. mp_obj_t start = TOP();
  810. SET_TOP(mp_obj_new_slice(start, stop, step));
  811. DISPATCH();
  812. }
  813. #endif
  814. ENTRY(MP_BC_STORE_COMP): {
  815. MARK_EXC_IP_SELECTIVE();
  816. DECODE_UINT;
  817. mp_obj_t obj = sp[-(unum >> 2)];
  818. if ((unum & 3) == 0) {
  819. mp_obj_list_append(obj, sp[0]);
  820. sp--;
  821. } else if (!MICROPY_PY_BUILTINS_SET || (unum & 3) == 1) {
  822. mp_obj_dict_store(obj, sp[0], sp[-1]);
  823. sp -= 2;
  824. #if MICROPY_PY_BUILTINS_SET
  825. } else {
  826. mp_obj_set_store(obj, sp[0]);
  827. sp--;
  828. #endif
  829. }
  830. DISPATCH();
  831. }
  832. ENTRY(MP_BC_UNPACK_SEQUENCE): {
  833. MARK_EXC_IP_SELECTIVE();
  834. DECODE_UINT;
  835. mp_unpack_sequence(sp[0], unum, sp);
  836. sp += unum - 1;
  837. DISPATCH();
  838. }
  839. ENTRY(MP_BC_UNPACK_EX): {
  840. MARK_EXC_IP_SELECTIVE();
  841. DECODE_UINT;
  842. mp_unpack_ex(sp[0], unum, sp);
  843. sp += (unum & 0xff) + ((unum >> 8) & 0xff);
  844. DISPATCH();
  845. }
  846. ENTRY(MP_BC_MAKE_FUNCTION): {
  847. DECODE_PTR;
  848. PUSH(mp_make_function_from_raw_code(ptr, MP_OBJ_NULL, MP_OBJ_NULL));
  849. DISPATCH();
  850. }
  851. ENTRY(MP_BC_MAKE_FUNCTION_DEFARGS): {
  852. DECODE_PTR;
  853. // Stack layout: def_tuple def_dict <- TOS
  854. mp_obj_t def_dict = POP();
  855. SET_TOP(mp_make_function_from_raw_code(ptr, TOP(), def_dict));
  856. DISPATCH();
  857. }
  858. ENTRY(MP_BC_MAKE_CLOSURE): {
  859. DECODE_PTR;
  860. size_t n_closed_over = *ip++;
  861. // Stack layout: closed_overs <- TOS
  862. sp -= n_closed_over - 1;
  863. SET_TOP(mp_make_closure_from_raw_code(ptr, n_closed_over, sp));
  864. DISPATCH();
  865. }
  866. ENTRY(MP_BC_MAKE_CLOSURE_DEFARGS): {
  867. DECODE_PTR;
  868. size_t n_closed_over = *ip++;
  869. // Stack layout: def_tuple def_dict closed_overs <- TOS
  870. sp -= 2 + n_closed_over - 1;
  871. SET_TOP(mp_make_closure_from_raw_code(ptr, 0x100 | n_closed_over, sp));
  872. DISPATCH();
  873. }
  874. ENTRY(MP_BC_CALL_FUNCTION): {
  875. FRAME_UPDATE();
  876. MARK_EXC_IP_SELECTIVE();
  877. DECODE_UINT;
  878. // unum & 0xff == n_positional
  879. // (unum >> 8) & 0xff == n_keyword
  880. sp -= (unum & 0xff) + ((unum >> 7) & 0x1fe);
  881. #if MICROPY_STACKLESS
  882. if (mp_obj_get_type(*sp) == &mp_type_fun_bc) {
  883. code_state->ip = ip;
  884. code_state->sp = sp;
  885. code_state->exc_sp_idx = MP_CODE_STATE_EXC_SP_IDX_FROM_PTR(exc_stack, exc_sp);
  886. mp_code_state_t *new_state = mp_obj_fun_bc_prepare_codestate(*sp, unum & 0xff, (unum >> 8) & 0xff, sp + 1);
  887. #if !MICROPY_ENABLE_PYSTACK
  888. if (new_state == NULL) {
  889. // Couldn't allocate codestate on heap: in the strict case raise
  890. // an exception, otherwise just fall through to stack allocation.
  891. #if MICROPY_STACKLESS_STRICT
  892. deep_recursion_error:
  893. mp_raise_recursion_depth();
  894. #endif
  895. } else
  896. #endif
  897. {
  898. new_state->prev = code_state;
  899. code_state = new_state;
  900. nlr_pop();
  901. goto run_code_state;
  902. }
  903. }
  904. #endif
  905. SET_TOP(mp_call_function_n_kw(*sp, unum & 0xff, (unum >> 8) & 0xff, sp + 1));
  906. DISPATCH();
  907. }
  908. ENTRY(MP_BC_CALL_FUNCTION_VAR_KW): {
  909. FRAME_UPDATE();
  910. MARK_EXC_IP_SELECTIVE();
  911. DECODE_UINT;
  912. // unum & 0xff == n_positional
  913. // (unum >> 8) & 0xff == n_keyword
  914. // We have following stack layout here:
  915. // fun arg0 arg1 ... kw0 val0 kw1 val1 ... seq dict <- TOS
  916. sp -= (unum & 0xff) + ((unum >> 7) & 0x1fe) + 2;
  917. #if MICROPY_STACKLESS
  918. if (mp_obj_get_type(*sp) == &mp_type_fun_bc) {
  919. code_state->ip = ip;
  920. code_state->sp = sp;
  921. code_state->exc_sp_idx = MP_CODE_STATE_EXC_SP_IDX_FROM_PTR(exc_stack, exc_sp);
  922. mp_call_args_t out_args;
  923. mp_call_prepare_args_n_kw_var(false, unum, sp, &out_args);
  924. mp_code_state_t *new_state = mp_obj_fun_bc_prepare_codestate(out_args.fun,
  925. out_args.n_args, out_args.n_kw, out_args.args);
  926. #if !MICROPY_ENABLE_PYSTACK
  927. // Freeing args at this point does not follow a LIFO order so only do it if
  928. // pystack is not enabled. For pystack, they are freed when code_state is.
  929. mp_nonlocal_free(out_args.args, out_args.n_alloc * sizeof(mp_obj_t));
  930. #endif
  931. #if !MICROPY_ENABLE_PYSTACK
  932. if (new_state == NULL) {
  933. // Couldn't allocate codestate on heap: in the strict case raise
  934. // an exception, otherwise just fall through to stack allocation.
  935. #if MICROPY_STACKLESS_STRICT
  936. goto deep_recursion_error;
  937. #endif
  938. } else
  939. #endif
  940. {
  941. new_state->prev = code_state;
  942. code_state = new_state;
  943. nlr_pop();
  944. goto run_code_state;
  945. }
  946. }
  947. #endif
  948. SET_TOP(mp_call_method_n_kw_var(false, unum, sp));
  949. DISPATCH();
  950. }
  951. ENTRY(MP_BC_CALL_METHOD): {
  952. FRAME_UPDATE();
  953. MARK_EXC_IP_SELECTIVE();
  954. DECODE_UINT;
  955. // unum & 0xff == n_positional
  956. // (unum >> 8) & 0xff == n_keyword
  957. sp -= (unum & 0xff) + ((unum >> 7) & 0x1fe) + 1;
  958. #if MICROPY_STACKLESS
  959. if (mp_obj_get_type(*sp) == &mp_type_fun_bc) {
  960. code_state->ip = ip;
  961. code_state->sp = sp;
  962. code_state->exc_sp_idx = MP_CODE_STATE_EXC_SP_IDX_FROM_PTR(exc_stack, exc_sp);
  963. size_t n_args = unum & 0xff;
  964. size_t n_kw = (unum >> 8) & 0xff;
  965. int adjust = (sp[1] == MP_OBJ_NULL) ? 0 : 1;
  966. mp_code_state_t *new_state = mp_obj_fun_bc_prepare_codestate(*sp, n_args + adjust, n_kw, sp + 2 - adjust);
  967. #if !MICROPY_ENABLE_PYSTACK
  968. if (new_state == NULL) {
  969. // Couldn't allocate codestate on heap: in the strict case raise
  970. // an exception, otherwise just fall through to stack allocation.
  971. #if MICROPY_STACKLESS_STRICT
  972. goto deep_recursion_error;
  973. #endif
  974. } else
  975. #endif
  976. {
  977. new_state->prev = code_state;
  978. code_state = new_state;
  979. nlr_pop();
  980. goto run_code_state;
  981. }
  982. }
  983. #endif
  984. SET_TOP(mp_call_method_n_kw(unum & 0xff, (unum >> 8) & 0xff, sp));
  985. DISPATCH();
  986. }
  987. ENTRY(MP_BC_CALL_METHOD_VAR_KW): {
  988. FRAME_UPDATE();
  989. MARK_EXC_IP_SELECTIVE();
  990. DECODE_UINT;
  991. // unum & 0xff == n_positional
  992. // (unum >> 8) & 0xff == n_keyword
  993. // We have following stack layout here:
  994. // fun self arg0 arg1 ... kw0 val0 kw1 val1 ... seq dict <- TOS
  995. sp -= (unum & 0xff) + ((unum >> 7) & 0x1fe) + 3;
  996. #if MICROPY_STACKLESS
  997. if (mp_obj_get_type(*sp) == &mp_type_fun_bc) {
  998. code_state->ip = ip;
  999. code_state->sp = sp;
  1000. code_state->exc_sp_idx = MP_CODE_STATE_EXC_SP_IDX_FROM_PTR(exc_stack, exc_sp);
  1001. mp_call_args_t out_args;
  1002. mp_call_prepare_args_n_kw_var(true, unum, sp, &out_args);
  1003. mp_code_state_t *new_state = mp_obj_fun_bc_prepare_codestate(out_args.fun,
  1004. out_args.n_args, out_args.n_kw, out_args.args);
  1005. #if !MICROPY_ENABLE_PYSTACK
  1006. // Freeing args at this point does not follow a LIFO order so only do it if
  1007. // pystack is not enabled. For pystack, they are freed when code_state is.
  1008. mp_nonlocal_free(out_args.args, out_args.n_alloc * sizeof(mp_obj_t));
  1009. #endif
  1010. #if !MICROPY_ENABLE_PYSTACK
  1011. if (new_state == NULL) {
  1012. // Couldn't allocate codestate on heap: in the strict case raise
  1013. // an exception, otherwise just fall through to stack allocation.
  1014. #if MICROPY_STACKLESS_STRICT
  1015. goto deep_recursion_error;
  1016. #endif
  1017. } else
  1018. #endif
  1019. {
  1020. new_state->prev = code_state;
  1021. code_state = new_state;
  1022. nlr_pop();
  1023. goto run_code_state;
  1024. }
  1025. }
  1026. #endif
  1027. SET_TOP(mp_call_method_n_kw_var(true, unum, sp));
  1028. DISPATCH();
  1029. }
  1030. ENTRY(MP_BC_RETURN_VALUE):
  1031. MARK_EXC_IP_SELECTIVE();
  1032. unwind_return:
  1033. // Search for and execute finally handlers that aren't already active
  1034. while (exc_sp >= exc_stack) {
  1035. if (MP_TAGPTR_TAG1(exc_sp->val_sp)) {
  1036. if (exc_sp->handler > ip) {
  1037. // Found a finally handler that isn't active; run it.
  1038. // Getting here the stack looks like:
  1039. // (..., X, [iter0, iter1, ...,] ret_val)
  1040. // where X is pointed to by exc_sp->val_sp and in the case
  1041. // of a "with" block contains the context manager info.
  1042. // There may be 0 or more for-iterators between X and the
  1043. // return value, and these must be removed before control can
  1044. // pass to the finally code. We simply copy the ret_value down
  1045. // over these iterators, if they exist. If they don't then the
  1046. // following is a null operation.
  1047. mp_obj_t *finally_sp = MP_TAGPTR_PTR(exc_sp->val_sp);
  1048. finally_sp[1] = sp[0];
  1049. sp = &finally_sp[1];
  1050. // We're going to run "finally" code as a coroutine
  1051. // (not calling it recursively). Set up a sentinel
  1052. // on a stack so it can return back to us when it is
  1053. // done (when WITH_CLEANUP or END_FINALLY reached).
  1054. PUSH(MP_OBJ_NEW_SMALL_INT(-1));
  1055. ip = exc_sp->handler;
  1056. goto dispatch_loop;
  1057. } else {
  1058. // Found a finally handler that is already active; cancel it.
  1059. CANCEL_ACTIVE_FINALLY(sp);
  1060. }
  1061. }
  1062. POP_EXC_BLOCK();
  1063. }
  1064. nlr_pop();
  1065. code_state->sp = sp;
  1066. assert(exc_sp == exc_stack - 1);
  1067. MICROPY_VM_HOOK_RETURN
  1068. #if MICROPY_STACKLESS
  1069. if (code_state->prev != NULL) {
  1070. mp_obj_t res = *sp;
  1071. mp_globals_set(code_state->old_globals);
  1072. mp_code_state_t *new_code_state = code_state->prev;
  1073. #if MICROPY_ENABLE_PYSTACK
  1074. // Free code_state, and args allocated by mp_call_prepare_args_n_kw_var
  1075. // (The latter is implicitly freed when using pystack due to its LIFO nature.)
  1076. // The sizeof in the following statement does not include the size of the variable
  1077. // part of the struct. This arg is anyway not used if pystack is enabled.
  1078. mp_nonlocal_free(code_state, sizeof(mp_code_state_t));
  1079. #endif
  1080. code_state = new_code_state;
  1081. *code_state->sp = res;
  1082. goto run_code_state_from_return;
  1083. }
  1084. #endif
  1085. FRAME_LEAVE();
  1086. return MP_VM_RETURN_NORMAL;
  1087. ENTRY(MP_BC_RAISE_LAST): {
  1088. MARK_EXC_IP_SELECTIVE();
  1089. // search for the inner-most previous exception, to reraise it
  1090. mp_obj_t obj = MP_OBJ_NULL;
  1091. for (mp_exc_stack_t *e = exc_sp; e >= exc_stack; --e) {
  1092. if (e->prev_exc != NULL) {
  1093. obj = MP_OBJ_FROM_PTR(e->prev_exc);
  1094. break;
  1095. }
  1096. }
  1097. if (obj == MP_OBJ_NULL) {
  1098. obj = mp_obj_new_exception_msg(&mp_type_RuntimeError, "no active exception to reraise");
  1099. }
  1100. RAISE(obj);
  1101. }
  1102. ENTRY(MP_BC_RAISE_OBJ): {
  1103. MARK_EXC_IP_SELECTIVE();
  1104. mp_obj_t obj = mp_make_raise_obj(TOP());
  1105. RAISE(obj);
  1106. }
  1107. ENTRY(MP_BC_RAISE_FROM): {
  1108. MARK_EXC_IP_SELECTIVE();
  1109. mp_warning(NULL, "exception chaining not supported");
  1110. sp--; // ignore (pop) "from" argument
  1111. mp_obj_t obj = mp_make_raise_obj(TOP());
  1112. RAISE(obj);
  1113. }
  1114. ENTRY(MP_BC_YIELD_VALUE):
  1115. yield:
  1116. nlr_pop();
  1117. code_state->ip = ip;
  1118. code_state->sp = sp;
  1119. code_state->exc_sp_idx = MP_CODE_STATE_EXC_SP_IDX_FROM_PTR(exc_stack, exc_sp);
  1120. FRAME_LEAVE();
  1121. return MP_VM_RETURN_YIELD;
  1122. ENTRY(MP_BC_YIELD_FROM): {
  1123. MARK_EXC_IP_SELECTIVE();
  1124. //#define EXC_MATCH(exc, type) mp_obj_is_type(exc, type)
  1125. #define EXC_MATCH(exc, type) mp_obj_exception_match(exc, type)
  1126. #define GENERATOR_EXIT_IF_NEEDED(t) if (t != MP_OBJ_NULL && EXC_MATCH(t, MP_OBJ_FROM_PTR(&mp_type_GeneratorExit))) { mp_obj_t raise_t = mp_make_raise_obj(t); RAISE(raise_t); }
  1127. mp_vm_return_kind_t ret_kind;
  1128. mp_obj_t send_value = POP();
  1129. mp_obj_t t_exc = MP_OBJ_NULL;
  1130. mp_obj_t ret_value;
  1131. code_state->sp = sp; // Save sp because it's needed if mp_resume raises StopIteration
  1132. if (inject_exc != MP_OBJ_NULL) {
  1133. t_exc = inject_exc;
  1134. inject_exc = MP_OBJ_NULL;
  1135. ret_kind = mp_resume(TOP(), MP_OBJ_NULL, t_exc, &ret_value);
  1136. } else {
  1137. ret_kind = mp_resume(TOP(), send_value, MP_OBJ_NULL, &ret_value);
  1138. }
  1139. if (ret_kind == MP_VM_RETURN_YIELD) {
  1140. ip--;
  1141. PUSH(ret_value);
  1142. goto yield;
  1143. } else if (ret_kind == MP_VM_RETURN_NORMAL) {
  1144. // Pop exhausted gen
  1145. sp--;
  1146. if (ret_value == MP_OBJ_STOP_ITERATION) {
  1147. // Optimize StopIteration
  1148. // TODO: get StopIteration's value
  1149. PUSH(mp_const_none);
  1150. } else {
  1151. PUSH(ret_value);
  1152. }
  1153. // If we injected GeneratorExit downstream, then even
  1154. // if it was swallowed, we re-raise GeneratorExit
  1155. GENERATOR_EXIT_IF_NEEDED(t_exc);
  1156. DISPATCH();
  1157. } else {
  1158. assert(ret_kind == MP_VM_RETURN_EXCEPTION);
  1159. assert(!EXC_MATCH(ret_value, MP_OBJ_FROM_PTR(&mp_type_StopIteration)));
  1160. // Pop exhausted gen
  1161. sp--;
  1162. RAISE(ret_value);
  1163. }
  1164. }
  1165. ENTRY(MP_BC_IMPORT_NAME): {
  1166. FRAME_UPDATE();
  1167. MARK_EXC_IP_SELECTIVE();
  1168. DECODE_QSTR;
  1169. mp_obj_t obj = POP();
  1170. SET_TOP(mp_import_name(qst, obj, TOP()));
  1171. DISPATCH();
  1172. }
  1173. ENTRY(MP_BC_IMPORT_FROM): {
  1174. FRAME_UPDATE();
  1175. MARK_EXC_IP_SELECTIVE();
  1176. DECODE_QSTR;
  1177. mp_obj_t obj = mp_import_from(TOP(), qst);
  1178. PUSH(obj);
  1179. DISPATCH();
  1180. }
  1181. ENTRY(MP_BC_IMPORT_STAR):
  1182. MARK_EXC_IP_SELECTIVE();
  1183. mp_import_all(POP());
  1184. DISPATCH();
  1185. #if MICROPY_OPT_COMPUTED_GOTO
  1186. ENTRY(MP_BC_LOAD_CONST_SMALL_INT_MULTI):
  1187. PUSH(MP_OBJ_NEW_SMALL_INT((mp_int_t)ip[-1] - MP_BC_LOAD_CONST_SMALL_INT_MULTI - MP_BC_LOAD_CONST_SMALL_INT_MULTI_EXCESS));
  1188. DISPATCH();
  1189. ENTRY(MP_BC_LOAD_FAST_MULTI):
  1190. obj_shared = fastn[MP_BC_LOAD_FAST_MULTI - (mp_int_t)ip[-1]];
  1191. goto load_check;
  1192. ENTRY(MP_BC_STORE_FAST_MULTI):
  1193. fastn[MP_BC_STORE_FAST_MULTI - (mp_int_t)ip[-1]] = POP();
  1194. DISPATCH();
  1195. ENTRY(MP_BC_UNARY_OP_MULTI):
  1196. MARK_EXC_IP_SELECTIVE();
  1197. SET_TOP(mp_unary_op(ip[-1] - MP_BC_UNARY_OP_MULTI, TOP()));
  1198. DISPATCH();
  1199. ENTRY(MP_BC_BINARY_OP_MULTI): {
  1200. MARK_EXC_IP_SELECTIVE();
  1201. mp_obj_t rhs = POP();
  1202. mp_obj_t lhs = TOP();
  1203. SET_TOP(mp_binary_op(ip[-1] - MP_BC_BINARY_OP_MULTI, lhs, rhs));
  1204. DISPATCH();
  1205. }
  1206. ENTRY_DEFAULT:
  1207. MARK_EXC_IP_SELECTIVE();
  1208. #else
  1209. ENTRY_DEFAULT:
  1210. if (ip[-1] < MP_BC_LOAD_CONST_SMALL_INT_MULTI + MP_BC_LOAD_CONST_SMALL_INT_MULTI_NUM) {
  1211. PUSH(MP_OBJ_NEW_SMALL_INT((mp_int_t)ip[-1] - MP_BC_LOAD_CONST_SMALL_INT_MULTI - MP_BC_LOAD_CONST_SMALL_INT_MULTI_EXCESS));
  1212. DISPATCH();
  1213. } else if (ip[-1] < MP_BC_LOAD_FAST_MULTI + MP_BC_LOAD_FAST_MULTI_NUM) {
  1214. obj_shared = fastn[MP_BC_LOAD_FAST_MULTI - (mp_int_t)ip[-1]];
  1215. goto load_check;
  1216. } else if (ip[-1] < MP_BC_STORE_FAST_MULTI + MP_BC_STORE_FAST_MULTI_NUM) {
  1217. fastn[MP_BC_STORE_FAST_MULTI - (mp_int_t)ip[-1]] = POP();
  1218. DISPATCH();
  1219. } else if (ip[-1] < MP_BC_UNARY_OP_MULTI + MP_BC_UNARY_OP_MULTI_NUM) {
  1220. SET_TOP(mp_unary_op(ip[-1] - MP_BC_UNARY_OP_MULTI, TOP()));
  1221. DISPATCH();
  1222. } else if (ip[-1] < MP_BC_BINARY_OP_MULTI + MP_BC_BINARY_OP_MULTI_NUM) {
  1223. mp_obj_t rhs = POP();
  1224. mp_obj_t lhs = TOP();
  1225. SET_TOP(mp_binary_op(ip[-1] - MP_BC_BINARY_OP_MULTI, lhs, rhs));
  1226. DISPATCH();
  1227. } else
  1228. #endif
  1229. {
  1230. mp_obj_t obj = mp_obj_new_exception_msg(&mp_type_NotImplementedError, "opcode");
  1231. nlr_pop();
  1232. code_state->state[0] = obj;
  1233. FRAME_LEAVE();
  1234. return MP_VM_RETURN_EXCEPTION;
  1235. }
  1236. #if !MICROPY_OPT_COMPUTED_GOTO
  1237. } // switch
  1238. #endif
  1239. pending_exception_check:
  1240. MICROPY_VM_HOOK_LOOP
  1241. #if MICROPY_ENABLE_SCHEDULER
  1242. // This is an inlined variant of mp_handle_pending
  1243. if (MP_STATE_VM(sched_state) == MP_SCHED_PENDING) {
  1244. MARK_EXC_IP_SELECTIVE();
  1245. mp_uint_t atomic_state = MICROPY_BEGIN_ATOMIC_SECTION();
  1246. mp_obj_t obj = MP_STATE_VM(mp_pending_exception);
  1247. if (obj != MP_OBJ_NULL) {
  1248. MP_STATE_VM(mp_pending_exception) = MP_OBJ_NULL;
  1249. if (!mp_sched_num_pending()) {
  1250. MP_STATE_VM(sched_state) = MP_SCHED_IDLE;
  1251. }
  1252. MICROPY_END_ATOMIC_SECTION(atomic_state);
  1253. RAISE(obj);
  1254. }
  1255. mp_handle_pending_tail(atomic_state);
  1256. }
  1257. #else
  1258. // This is an inlined variant of mp_handle_pending
  1259. if (MP_STATE_VM(mp_pending_exception) != MP_OBJ_NULL) {
  1260. MARK_EXC_IP_SELECTIVE();
  1261. mp_obj_t obj = MP_STATE_VM(mp_pending_exception);
  1262. MP_STATE_VM(mp_pending_exception) = MP_OBJ_NULL;
  1263. RAISE(obj);
  1264. }
  1265. #endif
  1266. #if MICROPY_PY_THREAD_GIL
  1267. #if MICROPY_PY_THREAD_GIL_VM_DIVISOR
  1268. if (--gil_divisor == 0)
  1269. #endif
  1270. {
  1271. #if MICROPY_PY_THREAD_GIL_VM_DIVISOR
  1272. gil_divisor = MICROPY_PY_THREAD_GIL_VM_DIVISOR;
  1273. #endif
  1274. #if MICROPY_ENABLE_SCHEDULER
  1275. // can only switch threads if the scheduler is unlocked
  1276. if (MP_STATE_VM(sched_state) == MP_SCHED_IDLE)
  1277. #endif
  1278. {
  1279. MP_THREAD_GIL_EXIT();
  1280. MP_THREAD_GIL_ENTER();
  1281. }
  1282. }
  1283. #endif
  1284. } // for loop
  1285. } else {
  1286. exception_handler:
  1287. // exception occurred
  1288. #if MICROPY_PY_SYS_EXC_INFO
  1289. MP_STATE_VM(cur_exception) = nlr.ret_val;
  1290. #endif
  1291. #if SELECTIVE_EXC_IP
  1292. // with selective ip, we store the ip 1 byte past the opcode, so move ptr back
  1293. code_state->ip -= 1;
  1294. #endif
  1295. if (mp_obj_is_subclass_fast(MP_OBJ_FROM_PTR(((mp_obj_base_t*)nlr.ret_val)->type), MP_OBJ_FROM_PTR(&mp_type_StopIteration))) {
  1296. if (code_state->ip) {
  1297. // check if it's a StopIteration within a for block
  1298. if (*code_state->ip == MP_BC_FOR_ITER) {
  1299. const byte *ip = code_state->ip + 1;
  1300. DECODE_ULABEL; // the jump offset if iteration finishes; for labels are always forward
  1301. code_state->ip = ip + ulab; // jump to after for-block
  1302. code_state->sp -= MP_OBJ_ITER_BUF_NSLOTS; // pop the exhausted iterator
  1303. goto outer_dispatch_loop; // continue with dispatch loop
  1304. } else if (*code_state->ip == MP_BC_YIELD_FROM) {
  1305. // StopIteration inside yield from call means return a value of
  1306. // yield from, so inject exception's value as yield from's result
  1307. // (Instead of stack pop then push we just replace exhausted gen with value)
  1308. *code_state->sp = mp_obj_exception_get_value(MP_OBJ_FROM_PTR(nlr.ret_val));
  1309. code_state->ip++; // yield from is over, move to next instruction
  1310. goto outer_dispatch_loop; // continue with dispatch loop
  1311. }
  1312. }
  1313. }
  1314. #if MICROPY_PY_SYS_SETTRACE
  1315. // Exceptions are traced here
  1316. if (mp_obj_is_subclass_fast(MP_OBJ_FROM_PTR(((mp_obj_base_t*)nlr.ret_val)->type), MP_OBJ_FROM_PTR(&mp_type_Exception))) {
  1317. TRACE_TICK(code_state->ip, code_state->sp, true /* yes, it's an exception */);
  1318. }
  1319. #endif
  1320. #if MICROPY_STACKLESS
  1321. unwind_loop:
  1322. #endif
  1323. // Set traceback info (file and line number) where the exception occurred, but not for:
  1324. // - constant GeneratorExit object, because it's const
  1325. // - exceptions re-raised by END_FINALLY
  1326. // - exceptions re-raised explicitly by "raise"
  1327. if (nlr.ret_val != &mp_const_GeneratorExit_obj
  1328. && *code_state->ip != MP_BC_END_FINALLY
  1329. && *code_state->ip != MP_BC_RAISE_LAST) {
  1330. const byte *ip = code_state->fun_bc->bytecode;
  1331. MP_BC_PRELUDE_SIG_DECODE(ip);
  1332. MP_BC_PRELUDE_SIZE_DECODE(ip);
  1333. const byte *bytecode_start = ip + n_info + n_cell;
  1334. #if !MICROPY_PERSISTENT_CODE
  1335. // so bytecode is aligned
  1336. bytecode_start = MP_ALIGN(bytecode_start, sizeof(mp_uint_t));
  1337. #endif
  1338. size_t bc = code_state->ip - bytecode_start;
  1339. #if MICROPY_PERSISTENT_CODE
  1340. qstr block_name = ip[0] | (ip[1] << 8);
  1341. qstr source_file = ip[2] | (ip[3] << 8);
  1342. ip += 4;
  1343. #else
  1344. qstr block_name = mp_decode_uint_value(ip);
  1345. ip = mp_decode_uint_skip(ip);
  1346. qstr source_file = mp_decode_uint_value(ip);
  1347. ip = mp_decode_uint_skip(ip);
  1348. #endif
  1349. size_t source_line = mp_bytecode_get_source_line(ip, bc);
  1350. mp_obj_exception_add_traceback(MP_OBJ_FROM_PTR(nlr.ret_val), source_file, source_line, block_name);
  1351. }
  1352. while (exc_sp >= exc_stack && exc_sp->handler <= code_state->ip) {
  1353. // nested exception
  1354. assert(exc_sp >= exc_stack);
  1355. // TODO make a proper message for nested exception
  1356. // at the moment we are just raising the very last exception (the one that caused the nested exception)
  1357. // move up to previous exception handler
  1358. POP_EXC_BLOCK();
  1359. }
  1360. if (exc_sp >= exc_stack) {
  1361. // catch exception and pass to byte code
  1362. code_state->ip = exc_sp->handler;
  1363. mp_obj_t *sp = MP_TAGPTR_PTR(exc_sp->val_sp);
  1364. // save this exception in the stack so it can be used in a reraise, if needed
  1365. exc_sp->prev_exc = nlr.ret_val;
  1366. // push exception object so it can be handled by bytecode
  1367. PUSH(MP_OBJ_FROM_PTR(nlr.ret_val));
  1368. code_state->sp = sp;
  1369. #if MICROPY_STACKLESS
  1370. } else if (code_state->prev != NULL) {
  1371. mp_globals_set(code_state->old_globals);
  1372. mp_code_state_t *new_code_state = code_state->prev;
  1373. #if MICROPY_ENABLE_PYSTACK
  1374. // Free code_state, and args allocated by mp_call_prepare_args_n_kw_var
  1375. // (The latter is implicitly freed when using pystack due to its LIFO nature.)
  1376. // The sizeof in the following statement does not include the size of the variable
  1377. // part of the struct. This arg is anyway not used if pystack is enabled.
  1378. mp_nonlocal_free(code_state, sizeof(mp_code_state_t));
  1379. #endif
  1380. code_state = new_code_state;
  1381. size_t n_state = code_state->n_state;
  1382. fastn = &code_state->state[n_state - 1];
  1383. exc_stack = (mp_exc_stack_t*)(code_state->state + n_state);
  1384. // variables that are visible to the exception handler (declared volatile)
  1385. exc_sp = MP_CODE_STATE_EXC_SP_IDX_TO_PTR(exc_stack, code_state->exc_sp_idx); // stack grows up, exc_sp points to top of stack
  1386. goto unwind_loop;
  1387. #endif
  1388. } else {
  1389. // propagate exception to higher level
  1390. // Note: ip and sp don't have usable values at this point
  1391. code_state->state[0] = MP_OBJ_FROM_PTR(nlr.ret_val); // put exception here because sp is invalid
  1392. FRAME_LEAVE();
  1393. return MP_VM_RETURN_EXCEPTION;
  1394. }
  1395. }
  1396. }
  1397. }