emitnative.c 119 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894
  1. /*
  2. * This file is part of the MicroPython project, http://micropython.org/
  3. *
  4. * The MIT License (MIT)
  5. *
  6. * Copyright (c) 2013, 2014 Damien P. George
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a copy
  9. * of this software and associated documentation files (the "Software"), to deal
  10. * in the Software without restriction, including without limitation the rights
  11. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  12. * copies of the Software, and to permit persons to whom the Software is
  13. * furnished to do so, subject to the following conditions:
  14. *
  15. * The above copyright notice and this permission notice shall be included in
  16. * all copies or substantial portions of the Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  19. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  20. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  21. * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  22. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  23. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  24. * THE SOFTWARE.
  25. */
  26. // Essentially normal Python has 1 type: Python objects
  27. // Viper has more than 1 type, and is just a more complicated (a superset of) Python.
  28. // If you declare everything in Viper as a Python object (ie omit type decls) then
  29. // it should in principle be exactly the same as Python native.
  30. // Having types means having more opcodes, like binary_op_nat_nat, binary_op_nat_obj etc.
  31. // In practice we won't have a VM but rather do this in asm which is actually very minimal.
  32. // Because it breaks strict Python equivalence it should be a completely separate
  33. // decorator. It breaks equivalence because overflow on integers wraps around.
  34. // It shouldn't break equivalence if you don't use the new types, but since the
  35. // type decls might be used in normal Python for other reasons, it's probably safest,
  36. // cleanest and clearest to make it a separate decorator.
  37. // Actually, it does break equivalence because integers default to native integers,
  38. // not Python objects.
  39. // for x in l[0:8]: can be compiled into a native loop if l has pointer type
  40. #include <stdio.h>
  41. #include <string.h>
  42. #include <assert.h>
  43. #include "py/emit.h"
  44. #include "py/nativeglue.h"
  45. #include "py/objstr.h"
  46. #if MICROPY_DEBUG_VERBOSE // print debugging info
  47. #define DEBUG_PRINT (1)
  48. #define DEBUG_printf DEBUG_printf
  49. #else // don't print debugging info
  50. #define DEBUG_printf(...) (void)0
  51. #endif
  52. // wrapper around everything in this file
  53. #if N_X64 || N_X86 || N_THUMB || N_ARM || N_XTENSA || N_XTENSAWIN
  54. // C stack layout for native functions:
  55. // 0: nlr_buf_t [optional]
  56. // emit->code_state_start: mp_code_state_t
  57. // emit->stack_start: Python object stack | emit->n_state
  58. // locals (reversed, L0 at end) |
  59. //
  60. // C stack layout for native generator functions:
  61. // 0=emit->stack_start: nlr_buf_t
  62. //
  63. // Then REG_GENERATOR_STATE points to:
  64. // 0=emit->code_state_start: mp_code_state_t
  65. // emit->stack_start: Python object stack | emit->n_state
  66. // locals (reversed, L0 at end) |
  67. //
  68. // C stack layout for viper functions:
  69. // 0: nlr_buf_t [optional]
  70. // emit->code_state_start: fun_obj, old_globals [optional]
  71. // emit->stack_start: Python object stack | emit->n_state
  72. // locals (reversed, L0 at end) |
  73. // (L0-L2 may be in regs instead)
  74. // Native emitter needs to know the following sizes and offsets of C structs (on the target):
  75. #if MICROPY_DYNAMIC_COMPILER
  76. #define SIZEOF_NLR_BUF (2 + mp_dynamic_compiler.nlr_buf_num_regs + 1) // the +1 is conservative in case MICROPY_ENABLE_PYSTACK enabled
  77. #else
  78. #define SIZEOF_NLR_BUF (sizeof(nlr_buf_t) / sizeof(uintptr_t))
  79. #endif
  80. #define SIZEOF_CODE_STATE (sizeof(mp_code_state_t) / sizeof(uintptr_t))
  81. #define OFFSETOF_CODE_STATE_STATE (offsetof(mp_code_state_t, state) / sizeof(uintptr_t))
  82. #define OFFSETOF_CODE_STATE_FUN_BC (offsetof(mp_code_state_t, fun_bc) / sizeof(uintptr_t))
  83. #define OFFSETOF_CODE_STATE_IP (offsetof(mp_code_state_t, ip) / sizeof(uintptr_t))
  84. #define OFFSETOF_CODE_STATE_SP (offsetof(mp_code_state_t, sp) / sizeof(uintptr_t))
  85. #define OFFSETOF_OBJ_FUN_BC_GLOBALS (offsetof(mp_obj_fun_bc_t, globals) / sizeof(uintptr_t))
  86. #define OFFSETOF_OBJ_FUN_BC_BYTECODE (offsetof(mp_obj_fun_bc_t, bytecode) / sizeof(uintptr_t))
  87. #define OFFSETOF_OBJ_FUN_BC_CONST_TABLE (offsetof(mp_obj_fun_bc_t, const_table) / sizeof(uintptr_t))
  88. // If not already defined, set parent args to same as child call registers
  89. #ifndef REG_PARENT_RET
  90. #define REG_PARENT_RET REG_RET
  91. #define REG_PARENT_ARG_1 REG_ARG_1
  92. #define REG_PARENT_ARG_2 REG_ARG_2
  93. #define REG_PARENT_ARG_3 REG_ARG_3
  94. #define REG_PARENT_ARG_4 REG_ARG_4
  95. #endif
  96. // Word index of nlr_buf_t.ret_val
  97. #define NLR_BUF_IDX_RET_VAL (1)
  98. // Whether the viper function needs access to fun_obj
  99. #define NEED_FUN_OBJ(emit) ((emit)->scope->exc_stack_size > 0 \
  100. || ((emit)->scope->scope_flags & (MP_SCOPE_FLAG_REFGLOBALS | MP_SCOPE_FLAG_HASCONSTS)))
  101. // Whether the native/viper function needs to be wrapped in an exception handler
  102. #define NEED_GLOBAL_EXC_HANDLER(emit) ((emit)->scope->exc_stack_size > 0 \
  103. || ((emit)->scope->scope_flags & (MP_SCOPE_FLAG_GENERATOR | MP_SCOPE_FLAG_REFGLOBALS)))
  104. // Whether registers can be used to store locals (only true if there are no
  105. // exception handlers, because otherwise an nlr_jump will restore registers to
  106. // their state at the start of the function and updates to locals will be lost)
  107. #define CAN_USE_REGS_FOR_LOCALS(emit) ((emit)->scope->exc_stack_size == 0 && !(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR))
  108. // Indices within the local C stack for various variables
  109. #define LOCAL_IDX_EXC_VAL(emit) (NLR_BUF_IDX_RET_VAL)
  110. #define LOCAL_IDX_EXC_HANDLER_PC(emit) (NLR_BUF_IDX_LOCAL_1)
  111. #define LOCAL_IDX_EXC_HANDLER_UNWIND(emit) (NLR_BUF_IDX_LOCAL_2)
  112. #define LOCAL_IDX_RET_VAL(emit) (NLR_BUF_IDX_LOCAL_3)
  113. #define LOCAL_IDX_FUN_OBJ(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_FUN_BC)
  114. #define LOCAL_IDX_OLD_GLOBALS(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
  115. #define LOCAL_IDX_GEN_PC(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
  116. #define LOCAL_IDX_LOCAL_VAR(emit, local_num) ((emit)->stack_start + (emit)->n_state - 1 - (local_num))
  117. #define REG_GENERATOR_STATE (REG_LOCAL_3)
  118. #define EMIT_NATIVE_VIPER_TYPE_ERROR(emit, ...) do { \
  119. *emit->error_slot = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, __VA_ARGS__); \
  120. } while (0)
  121. typedef enum {
  122. STACK_VALUE,
  123. STACK_REG,
  124. STACK_IMM,
  125. } stack_info_kind_t;
  126. // these enums must be distinct and the bottom 4 bits
  127. // must correspond to the correct MP_NATIVE_TYPE_xxx value
  128. typedef enum {
  129. VTYPE_PYOBJ = 0x00 | MP_NATIVE_TYPE_OBJ,
  130. VTYPE_BOOL = 0x00 | MP_NATIVE_TYPE_BOOL,
  131. VTYPE_INT = 0x00 | MP_NATIVE_TYPE_INT,
  132. VTYPE_UINT = 0x00 | MP_NATIVE_TYPE_UINT,
  133. VTYPE_PTR = 0x00 | MP_NATIVE_TYPE_PTR,
  134. VTYPE_PTR8 = 0x00 | MP_NATIVE_TYPE_PTR8,
  135. VTYPE_PTR16 = 0x00 | MP_NATIVE_TYPE_PTR16,
  136. VTYPE_PTR32 = 0x00 | MP_NATIVE_TYPE_PTR32,
  137. VTYPE_PTR_NONE = 0x50 | MP_NATIVE_TYPE_PTR,
  138. VTYPE_UNBOUND = 0x60 | MP_NATIVE_TYPE_OBJ,
  139. VTYPE_BUILTIN_CAST = 0x70 | MP_NATIVE_TYPE_OBJ,
  140. } vtype_kind_t;
  141. STATIC qstr vtype_to_qstr(vtype_kind_t vtype) {
  142. switch (vtype) {
  143. case VTYPE_PYOBJ: return MP_QSTR_object;
  144. case VTYPE_BOOL: return MP_QSTR_bool;
  145. case VTYPE_INT: return MP_QSTR_int;
  146. case VTYPE_UINT: return MP_QSTR_uint;
  147. case VTYPE_PTR: return MP_QSTR_ptr;
  148. case VTYPE_PTR8: return MP_QSTR_ptr8;
  149. case VTYPE_PTR16: return MP_QSTR_ptr16;
  150. case VTYPE_PTR32: return MP_QSTR_ptr32;
  151. case VTYPE_PTR_NONE: default: return MP_QSTR_None;
  152. }
  153. }
  154. typedef struct _stack_info_t {
  155. vtype_kind_t vtype;
  156. stack_info_kind_t kind;
  157. union {
  158. int u_reg;
  159. mp_int_t u_imm;
  160. } data;
  161. } stack_info_t;
  162. #define UNWIND_LABEL_UNUSED (0x7fff)
  163. #define UNWIND_LABEL_DO_FINAL_UNWIND (0x7ffe)
  164. typedef struct _exc_stack_entry_t {
  165. uint16_t label : 15;
  166. uint16_t is_finally : 1;
  167. uint16_t unwind_label : 15;
  168. uint16_t is_active : 1;
  169. } exc_stack_entry_t;
  170. struct _emit_t {
  171. mp_obj_t *error_slot;
  172. uint *label_slot;
  173. uint exit_label;
  174. int pass;
  175. bool do_viper_types;
  176. mp_uint_t local_vtype_alloc;
  177. vtype_kind_t *local_vtype;
  178. mp_uint_t stack_info_alloc;
  179. stack_info_t *stack_info;
  180. vtype_kind_t saved_stack_vtype;
  181. size_t exc_stack_alloc;
  182. size_t exc_stack_size;
  183. exc_stack_entry_t *exc_stack;
  184. int prelude_offset;
  185. int start_offset;
  186. int n_state;
  187. uint16_t code_state_start;
  188. uint16_t stack_start;
  189. int stack_size;
  190. uint16_t n_cell;
  191. uint16_t const_table_cur_obj;
  192. uint16_t const_table_num_obj;
  193. uint16_t const_table_cur_raw_code;
  194. mp_uint_t *const_table;
  195. #if MICROPY_PERSISTENT_CODE_SAVE
  196. uint16_t qstr_link_cur;
  197. mp_qstr_link_entry_t *qstr_link;
  198. #endif
  199. bool last_emit_was_return_value;
  200. scope_t *scope;
  201. ASM_T *as;
  202. };
  203. STATIC const uint8_t reg_local_table[REG_LOCAL_NUM] = {REG_LOCAL_1, REG_LOCAL_2, REG_LOCAL_3};
  204. STATIC void emit_native_global_exc_entry(emit_t *emit);
  205. STATIC void emit_native_global_exc_exit(emit_t *emit);
  206. STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj);
  207. emit_t *EXPORT_FUN(new)(mp_obj_t *error_slot, uint *label_slot, mp_uint_t max_num_labels) {
  208. emit_t *emit = m_new0(emit_t, 1);
  209. emit->error_slot = error_slot;
  210. emit->label_slot = label_slot;
  211. emit->stack_info_alloc = 8;
  212. emit->stack_info = m_new(stack_info_t, emit->stack_info_alloc);
  213. emit->exc_stack_alloc = 8;
  214. emit->exc_stack = m_new(exc_stack_entry_t, emit->exc_stack_alloc);
  215. emit->as = m_new0(ASM_T, 1);
  216. mp_asm_base_init(&emit->as->base, max_num_labels);
  217. return emit;
  218. }
  219. void EXPORT_FUN(free)(emit_t *emit) {
  220. mp_asm_base_deinit(&emit->as->base, false);
  221. m_del_obj(ASM_T, emit->as);
  222. m_del(exc_stack_entry_t, emit->exc_stack, emit->exc_stack_alloc);
  223. m_del(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc);
  224. m_del(stack_info_t, emit->stack_info, emit->stack_info_alloc);
  225. m_del_obj(emit_t, emit);
  226. }
  227. STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg);
  228. STATIC void emit_native_mov_reg_const(emit_t *emit, int reg_dest, int const_val) {
  229. ASM_LOAD_REG_REG_OFFSET(emit->as, reg_dest, REG_FUN_TABLE, const_val);
  230. }
  231. STATIC void emit_native_mov_state_reg(emit_t *emit, int local_num, int reg_src) {
  232. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  233. ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, REG_GENERATOR_STATE, local_num);
  234. } else {
  235. ASM_MOV_LOCAL_REG(emit->as, local_num, reg_src);
  236. }
  237. }
  238. STATIC void emit_native_mov_reg_state(emit_t *emit, int reg_dest, int local_num) {
  239. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  240. ASM_LOAD_REG_REG_OFFSET(emit->as, reg_dest, REG_GENERATOR_STATE, local_num);
  241. } else {
  242. ASM_MOV_REG_LOCAL(emit->as, reg_dest, local_num);
  243. }
  244. }
  245. STATIC void emit_native_mov_reg_state_addr(emit_t *emit, int reg_dest, int local_num) {
  246. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  247. ASM_MOV_REG_IMM(emit->as, reg_dest, local_num * ASM_WORD_SIZE);
  248. ASM_ADD_REG_REG(emit->as, reg_dest, REG_GENERATOR_STATE);
  249. } else {
  250. ASM_MOV_REG_LOCAL_ADDR(emit->as, reg_dest, local_num);
  251. }
  252. }
  253. STATIC void emit_native_mov_reg_qstr(emit_t *emit, int arg_reg, qstr qst) {
  254. #if MICROPY_PERSISTENT_CODE_SAVE
  255. size_t loc = ASM_MOV_REG_IMM_FIX_U16(emit->as, arg_reg, qst);
  256. size_t link_idx = emit->qstr_link_cur++;
  257. if (emit->pass == MP_PASS_EMIT) {
  258. emit->qstr_link[link_idx].off = loc << 2 | 1;
  259. emit->qstr_link[link_idx].qst = qst;
  260. }
  261. #else
  262. ASM_MOV_REG_IMM(emit->as, arg_reg, qst);
  263. #endif
  264. }
  265. STATIC void emit_native_mov_reg_qstr_obj(emit_t *emit, int reg_dest, qstr qst) {
  266. #if MICROPY_PERSISTENT_CODE_SAVE
  267. size_t loc = ASM_MOV_REG_IMM_FIX_WORD(emit->as, reg_dest, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
  268. size_t link_idx = emit->qstr_link_cur++;
  269. if (emit->pass == MP_PASS_EMIT) {
  270. emit->qstr_link[link_idx].off = loc << 2 | 2;
  271. emit->qstr_link[link_idx].qst = qst;
  272. }
  273. #else
  274. ASM_MOV_REG_IMM(emit->as, reg_dest, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
  275. #endif
  276. }
  277. #define emit_native_mov_state_imm_via(emit, local_num, imm, reg_temp) \
  278. do { \
  279. ASM_MOV_REG_IMM((emit)->as, (reg_temp), (imm)); \
  280. emit_native_mov_state_reg((emit), (local_num), (reg_temp)); \
  281. } while (false)
  282. STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
  283. DEBUG_printf("start_pass(pass=%u, scope=%p)\n", pass, scope);
  284. emit->pass = pass;
  285. emit->do_viper_types = scope->emit_options == MP_EMIT_OPT_VIPER;
  286. emit->stack_size = 0;
  287. #if N_PRELUDE_AS_BYTES_OBJ
  288. emit->const_table_cur_obj = emit->do_viper_types ? 0 : 1; // reserve first obj for prelude bytes obj
  289. #else
  290. emit->const_table_cur_obj = 0;
  291. #endif
  292. emit->const_table_cur_raw_code = 0;
  293. #if MICROPY_PERSISTENT_CODE_SAVE
  294. emit->qstr_link_cur = 0;
  295. #endif
  296. emit->last_emit_was_return_value = false;
  297. emit->scope = scope;
  298. // allocate memory for keeping track of the types of locals
  299. if (emit->local_vtype_alloc < scope->num_locals) {
  300. emit->local_vtype = m_renew(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc, scope->num_locals);
  301. emit->local_vtype_alloc = scope->num_locals;
  302. }
  303. // set default type for arguments
  304. mp_uint_t num_args = emit->scope->num_pos_args + emit->scope->num_kwonly_args;
  305. if (scope->scope_flags & MP_SCOPE_FLAG_VARARGS) {
  306. num_args += 1;
  307. }
  308. if (scope->scope_flags & MP_SCOPE_FLAG_VARKEYWORDS) {
  309. num_args += 1;
  310. }
  311. for (mp_uint_t i = 0; i < num_args; i++) {
  312. emit->local_vtype[i] = VTYPE_PYOBJ;
  313. }
  314. // Set viper type for arguments
  315. if (emit->do_viper_types) {
  316. for (int i = 0; i < emit->scope->id_info_len; ++i) {
  317. id_info_t *id = &emit->scope->id_info[i];
  318. if (id->flags & ID_FLAG_IS_PARAM) {
  319. assert(id->local_num < emit->local_vtype_alloc);
  320. emit->local_vtype[id->local_num] = id->flags >> ID_FLAG_VIPER_TYPE_POS;
  321. }
  322. }
  323. }
  324. // local variables begin unbound, and have unknown type
  325. for (mp_uint_t i = num_args; i < emit->local_vtype_alloc; i++) {
  326. emit->local_vtype[i] = VTYPE_UNBOUND;
  327. }
  328. // values on stack begin unbound
  329. for (mp_uint_t i = 0; i < emit->stack_info_alloc; i++) {
  330. emit->stack_info[i].kind = STACK_VALUE;
  331. emit->stack_info[i].vtype = VTYPE_UNBOUND;
  332. }
  333. mp_asm_base_start_pass(&emit->as->base, pass == MP_PASS_EMIT ? MP_ASM_PASS_EMIT : MP_ASM_PASS_COMPUTE);
  334. // generate code for entry to function
  335. // Work out start of code state (mp_code_state_t or reduced version for viper)
  336. emit->code_state_start = 0;
  337. if (NEED_GLOBAL_EXC_HANDLER(emit)) {
  338. emit->code_state_start = SIZEOF_NLR_BUF;
  339. }
  340. if (emit->do_viper_types) {
  341. // Work out size of state (locals plus stack)
  342. // n_state counts all stack and locals, even those in registers
  343. emit->n_state = scope->num_locals + scope->stack_size;
  344. int num_locals_in_regs = 0;
  345. if (CAN_USE_REGS_FOR_LOCALS(emit)) {
  346. num_locals_in_regs = scope->num_locals;
  347. if (num_locals_in_regs > REG_LOCAL_NUM) {
  348. num_locals_in_regs = REG_LOCAL_NUM;
  349. }
  350. // Need a spot for REG_LOCAL_3 if 4 or more args (see below)
  351. if (scope->num_pos_args >= 4) {
  352. --num_locals_in_regs;
  353. }
  354. }
  355. // Work out where the locals and Python stack start within the C stack
  356. if (NEED_GLOBAL_EXC_HANDLER(emit)) {
  357. // Reserve 2 words for function object and old globals
  358. emit->stack_start = emit->code_state_start + 2;
  359. } else if (scope->scope_flags & MP_SCOPE_FLAG_HASCONSTS) {
  360. // Reserve 1 word for function object, to access const table
  361. emit->stack_start = emit->code_state_start + 1;
  362. } else {
  363. emit->stack_start = emit->code_state_start + 0;
  364. }
  365. // Entry to function
  366. ASM_ENTRY(emit->as, emit->stack_start + emit->n_state - num_locals_in_regs);
  367. #if N_X86
  368. asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1);
  369. #endif
  370. // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
  371. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONST_TABLE);
  372. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_LOCAL_3, 0);
  373. // Store function object (passed as first arg) to stack if needed
  374. if (NEED_FUN_OBJ(emit)) {
  375. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
  376. }
  377. // Put n_args in REG_ARG_1, n_kw in REG_ARG_2, args array in REG_LOCAL_3
  378. #if N_X86
  379. asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_1);
  380. asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_2);
  381. asm_x86_mov_arg_to_r32(emit->as, 3, REG_LOCAL_3);
  382. #else
  383. ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_PARENT_ARG_2);
  384. ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_3);
  385. ASM_MOV_REG_REG(emit->as, REG_LOCAL_3, REG_PARENT_ARG_4);
  386. #endif
  387. // Check number of args matches this function, and call mp_arg_check_num_sig if not
  388. ASM_JUMP_IF_REG_NONZERO(emit->as, REG_ARG_2, *emit->label_slot + 4, true);
  389. ASM_MOV_REG_IMM(emit->as, REG_ARG_3, scope->num_pos_args);
  390. ASM_JUMP_IF_REG_EQ(emit->as, REG_ARG_1, REG_ARG_3, *emit->label_slot + 5);
  391. mp_asm_base_label_assign(&emit->as->base, *emit->label_slot + 4);
  392. ASM_MOV_REG_IMM(emit->as, REG_ARG_3, MP_OBJ_FUN_MAKE_SIG(scope->num_pos_args, scope->num_pos_args, false));
  393. ASM_CALL_IND(emit->as, MP_F_ARG_CHECK_NUM_SIG);
  394. mp_asm_base_label_assign(&emit->as->base, *emit->label_slot + 5);
  395. // Store arguments into locals (reg or stack), converting to native if needed
  396. for (int i = 0; i < emit->scope->num_pos_args; i++) {
  397. int r = REG_ARG_1;
  398. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_LOCAL_3, i);
  399. if (emit->local_vtype[i] != VTYPE_PYOBJ) {
  400. emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, emit->local_vtype[i], REG_ARG_2);
  401. r = REG_RET;
  402. }
  403. // REG_LOCAL_3 points to the args array so be sure not to overwrite it if it's still needed
  404. if (i < REG_LOCAL_NUM && CAN_USE_REGS_FOR_LOCALS(emit) && (i != 2 || emit->scope->num_pos_args == 3)) {
  405. ASM_MOV_REG_REG(emit->as, reg_local_table[i], r);
  406. } else {
  407. emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, i), r);
  408. }
  409. }
  410. // Get 3rd local from the stack back into REG_LOCAL_3 if this reg couldn't be written to above
  411. if (emit->scope->num_pos_args >= 4 && CAN_USE_REGS_FOR_LOCALS(emit)) {
  412. ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_3, LOCAL_IDX_LOCAL_VAR(emit, 2));
  413. }
  414. emit_native_global_exc_entry(emit);
  415. } else {
  416. // work out size of state (locals plus stack)
  417. emit->n_state = scope->num_locals + scope->stack_size;
  418. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  419. emit->code_state_start = 0;
  420. emit->stack_start = SIZEOF_CODE_STATE;
  421. #if N_PRELUDE_AS_BYTES_OBJ
  422. // Load index of prelude bytes object in const_table
  423. mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)(emit->scope->num_pos_args + emit->scope->num_kwonly_args + 1));
  424. #else
  425. mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->prelude_offset);
  426. #endif
  427. mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->start_offset);
  428. ASM_ENTRY(emit->as, SIZEOF_NLR_BUF);
  429. // Put address of code_state into REG_GENERATOR_STATE
  430. #if N_X86
  431. asm_x86_mov_arg_to_r32(emit->as, 0, REG_GENERATOR_STATE);
  432. #else
  433. ASM_MOV_REG_REG(emit->as, REG_GENERATOR_STATE, REG_PARENT_ARG_1);
  434. #endif
  435. // Put throw value into LOCAL_IDX_EXC_VAL slot, for yield/yield-from
  436. #if N_X86
  437. asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2);
  438. #endif
  439. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_PARENT_ARG_2);
  440. // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
  441. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, LOCAL_IDX_FUN_OBJ(emit));
  442. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONST_TABLE);
  443. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_TEMP0, emit->scope->num_pos_args + emit->scope->num_kwonly_args);
  444. } else {
  445. // The locals and stack start after the code_state structure
  446. emit->stack_start = emit->code_state_start + SIZEOF_CODE_STATE;
  447. // Allocate space on C-stack for code_state structure, which includes state
  448. ASM_ENTRY(emit->as, emit->stack_start + emit->n_state);
  449. // Prepare incoming arguments for call to mp_setup_code_state
  450. #if N_X86
  451. asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1);
  452. asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2);
  453. asm_x86_mov_arg_to_r32(emit->as, 2, REG_PARENT_ARG_3);
  454. asm_x86_mov_arg_to_r32(emit->as, 3, REG_PARENT_ARG_4);
  455. #endif
  456. // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
  457. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONST_TABLE);
  458. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_LOCAL_3, emit->scope->num_pos_args + emit->scope->num_kwonly_args);
  459. // Set code_state.fun_bc
  460. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
  461. // Set code_state.ip (offset from start of this function to prelude info)
  462. #if N_PRELUDE_AS_BYTES_OBJ
  463. // Prelude is a bytes object in const_table; store ip = prelude->data - fun_bc->bytecode
  464. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_LOCAL_3, emit->scope->num_pos_args + emit->scope->num_kwonly_args + 1);
  465. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_LOCAL_3, offsetof(mp_obj_str_t, data) / sizeof(uintptr_t));
  466. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_PARENT_ARG_1, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_BYTECODE);
  467. ASM_SUB_REG_REG(emit->as, REG_LOCAL_3, REG_PARENT_ARG_1);
  468. emit_native_mov_state_reg(emit, emit->code_state_start + OFFSETOF_CODE_STATE_IP, REG_LOCAL_3);
  469. #else
  470. // TODO this encoding may change size in the final pass, need to make it fixed
  471. emit_native_mov_state_imm_via(emit, emit->code_state_start + OFFSETOF_CODE_STATE_IP, emit->prelude_offset, REG_PARENT_ARG_1);
  472. #endif
  473. // Set code_state.n_state (only works on little endian targets due to n_state being uint16_t)
  474. emit_native_mov_state_imm_via(emit, emit->code_state_start + offsetof(mp_code_state_t, n_state) / sizeof(uintptr_t), emit->n_state, REG_ARG_1);
  475. // Put address of code_state into first arg
  476. ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, emit->code_state_start);
  477. // Copy next 3 args if needed
  478. #if REG_ARG_2 != REG_PARENT_ARG_2
  479. ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_2);
  480. #endif
  481. #if REG_ARG_3 != REG_PARENT_ARG_3
  482. ASM_MOV_REG_REG(emit->as, REG_ARG_3, REG_PARENT_ARG_3);
  483. #endif
  484. #if REG_ARG_4 != REG_PARENT_ARG_4
  485. ASM_MOV_REG_REG(emit->as, REG_ARG_4, REG_PARENT_ARG_4);
  486. #endif
  487. // Call mp_setup_code_state to prepare code_state structure
  488. #if N_THUMB
  489. asm_thumb_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_THUMB_REG_R4);
  490. #elif N_ARM
  491. asm_arm_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_ARM_REG_R4);
  492. #else
  493. ASM_CALL_IND(emit->as, MP_F_SETUP_CODE_STATE);
  494. #endif
  495. }
  496. emit_native_global_exc_entry(emit);
  497. // cache some locals in registers, but only if no exception handlers
  498. if (CAN_USE_REGS_FOR_LOCALS(emit)) {
  499. for (int i = 0; i < REG_LOCAL_NUM && i < scope->num_locals; ++i) {
  500. ASM_MOV_REG_LOCAL(emit->as, reg_local_table[i], LOCAL_IDX_LOCAL_VAR(emit, i));
  501. }
  502. }
  503. // set the type of closed over variables
  504. for (mp_uint_t i = 0; i < scope->id_info_len; i++) {
  505. id_info_t *id = &scope->id_info[i];
  506. if (id->kind == ID_INFO_KIND_CELL) {
  507. emit->local_vtype[id->local_num] = VTYPE_PYOBJ;
  508. }
  509. }
  510. if (pass == MP_PASS_EMIT) {
  511. // write argument names as qstr objects
  512. // see comment in corresponding part of emitbc.c about the logic here
  513. for (int i = 0; i < scope->num_pos_args + scope->num_kwonly_args; i++) {
  514. qstr qst = MP_QSTR__star_;
  515. for (int j = 0; j < scope->id_info_len; ++j) {
  516. id_info_t *id = &scope->id_info[j];
  517. if ((id->flags & ID_FLAG_IS_PARAM) && id->local_num == i) {
  518. qst = id->qst;
  519. break;
  520. }
  521. }
  522. emit->const_table[i] = (mp_uint_t)MP_OBJ_NEW_QSTR(qst);
  523. }
  524. }
  525. }
  526. }
  527. static inline void emit_native_write_code_info_byte(emit_t *emit, byte val) {
  528. mp_asm_base_data(&emit->as->base, 1, val);
  529. }
  530. STATIC void emit_native_end_pass(emit_t *emit) {
  531. emit_native_global_exc_exit(emit);
  532. if (!emit->do_viper_types) {
  533. emit->prelude_offset = mp_asm_base_get_code_pos(&emit->as->base);
  534. size_t n_state = emit->n_state;
  535. size_t n_exc_stack = 0; // exc-stack not needed for native code
  536. MP_BC_PRELUDE_SIG_ENCODE(n_state, n_exc_stack, emit->scope, emit_native_write_code_info_byte, emit);
  537. #if MICROPY_PERSISTENT_CODE
  538. size_t n_info = 4;
  539. #else
  540. size_t n_info = 1;
  541. #endif
  542. MP_BC_PRELUDE_SIZE_ENCODE(n_info, emit->n_cell, emit_native_write_code_info_byte, emit);
  543. #if MICROPY_PERSISTENT_CODE
  544. mp_asm_base_data(&emit->as->base, 1, emit->scope->simple_name);
  545. mp_asm_base_data(&emit->as->base, 1, emit->scope->simple_name >> 8);
  546. mp_asm_base_data(&emit->as->base, 1, emit->scope->source_file);
  547. mp_asm_base_data(&emit->as->base, 1, emit->scope->source_file >> 8);
  548. #else
  549. mp_asm_base_data(&emit->as->base, 1, 1);
  550. #endif
  551. // bytecode prelude: initialise closed over variables
  552. size_t cell_start = mp_asm_base_get_code_pos(&emit->as->base);
  553. for (int i = 0; i < emit->scope->id_info_len; i++) {
  554. id_info_t *id = &emit->scope->id_info[i];
  555. if (id->kind == ID_INFO_KIND_CELL) {
  556. assert(id->local_num <= 255);
  557. mp_asm_base_data(&emit->as->base, 1, id->local_num); // write the local which should be converted to a cell
  558. }
  559. }
  560. emit->n_cell = mp_asm_base_get_code_pos(&emit->as->base) - cell_start;
  561. #if N_PRELUDE_AS_BYTES_OBJ
  562. // Prelude bytes object is after qstr arg names and mp_fun_table
  563. size_t table_off = emit->scope->num_pos_args + emit->scope->num_kwonly_args + 1;
  564. if (emit->pass == MP_PASS_EMIT) {
  565. void *buf = emit->as->base.code_base + emit->prelude_offset;
  566. size_t n = emit->as->base.code_offset - emit->prelude_offset;
  567. emit->const_table[table_off] = (uintptr_t)mp_obj_new_bytes(buf, n);
  568. }
  569. #endif
  570. }
  571. ASM_END_PASS(emit->as);
  572. // check stack is back to zero size
  573. assert(emit->stack_size == 0);
  574. assert(emit->exc_stack_size == 0);
  575. // Deal with const table accounting
  576. assert(emit->pass <= MP_PASS_STACK_SIZE || (emit->const_table_num_obj == emit->const_table_cur_obj));
  577. emit->const_table_num_obj = emit->const_table_cur_obj;
  578. if (emit->pass == MP_PASS_CODE_SIZE) {
  579. size_t const_table_alloc = 1 + emit->const_table_num_obj + emit->const_table_cur_raw_code;
  580. size_t nqstr = 0;
  581. if (!emit->do_viper_types) {
  582. // Add room for qstr names of arguments
  583. nqstr = emit->scope->num_pos_args + emit->scope->num_kwonly_args;
  584. const_table_alloc += nqstr;
  585. }
  586. emit->const_table = m_new(mp_uint_t, const_table_alloc);
  587. #if !MICROPY_DYNAMIC_COMPILER
  588. // Store mp_fun_table pointer just after qstrs
  589. // (but in dynamic-compiler mode eliminate dependency on mp_fun_table)
  590. emit->const_table[nqstr] = (mp_uint_t)(uintptr_t)&mp_fun_table;
  591. #endif
  592. #if MICROPY_PERSISTENT_CODE_SAVE
  593. size_t qstr_link_alloc = emit->qstr_link_cur;
  594. if (qstr_link_alloc > 0) {
  595. emit->qstr_link = m_new(mp_qstr_link_entry_t, qstr_link_alloc);
  596. }
  597. #endif
  598. }
  599. if (emit->pass == MP_PASS_EMIT) {
  600. void *f = mp_asm_base_get_code(&emit->as->base);
  601. mp_uint_t f_len = mp_asm_base_get_code_size(&emit->as->base);
  602. mp_emit_glue_assign_native(emit->scope->raw_code,
  603. emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY,
  604. f, f_len, emit->const_table,
  605. #if MICROPY_PERSISTENT_CODE_SAVE
  606. emit->prelude_offset,
  607. emit->const_table_cur_obj, emit->const_table_cur_raw_code,
  608. emit->qstr_link_cur, emit->qstr_link,
  609. #endif
  610. emit->scope->num_pos_args, emit->scope->scope_flags, 0);
  611. }
  612. }
  613. STATIC bool emit_native_last_emit_was_return_value(emit_t *emit) {
  614. return emit->last_emit_was_return_value;
  615. }
  616. STATIC void ensure_extra_stack(emit_t *emit, size_t delta) {
  617. if (emit->stack_size + delta > emit->stack_info_alloc) {
  618. size_t new_alloc = (emit->stack_size + delta + 8) & ~3;
  619. emit->stack_info = m_renew(stack_info_t, emit->stack_info, emit->stack_info_alloc, new_alloc);
  620. emit->stack_info_alloc = new_alloc;
  621. }
  622. }
  623. STATIC void adjust_stack(emit_t *emit, mp_int_t stack_size_delta) {
  624. assert((mp_int_t)emit->stack_size + stack_size_delta >= 0);
  625. assert((mp_int_t)emit->stack_size + stack_size_delta <= (mp_int_t)emit->stack_info_alloc);
  626. emit->stack_size += stack_size_delta;
  627. if (emit->pass > MP_PASS_SCOPE && emit->stack_size > emit->scope->stack_size) {
  628. emit->scope->stack_size = emit->stack_size;
  629. }
  630. #ifdef DEBUG_PRINT
  631. DEBUG_printf(" adjust_stack; stack_size=%d+%d; stack now:", emit->stack_size - stack_size_delta, stack_size_delta);
  632. for (int i = 0; i < emit->stack_size; i++) {
  633. stack_info_t *si = &emit->stack_info[i];
  634. DEBUG_printf(" (v=%d k=%d %d)", si->vtype, si->kind, si->data.u_reg);
  635. }
  636. DEBUG_printf("\n");
  637. #endif
  638. }
  639. STATIC void emit_native_adjust_stack_size(emit_t *emit, mp_int_t delta) {
  640. DEBUG_printf("adjust_stack_size(" INT_FMT ")\n", delta);
  641. if (delta > 0) {
  642. ensure_extra_stack(emit, delta);
  643. }
  644. // If we are adjusting the stack in a positive direction (pushing) then we
  645. // need to fill in values for the stack kind and vtype of the newly-pushed
  646. // entries. These should be set to "value" (ie not reg or imm) because we
  647. // should only need to adjust the stack due to a jump to this part in the
  648. // code (and hence we have settled the stack before the jump).
  649. for (mp_int_t i = 0; i < delta; i++) {
  650. stack_info_t *si = &emit->stack_info[emit->stack_size + i];
  651. si->kind = STACK_VALUE;
  652. // TODO we don't know the vtype to use here. At the moment this is a
  653. // hack to get the case of multi comparison working.
  654. if (delta == 1) {
  655. si->vtype = emit->saved_stack_vtype;
  656. } else {
  657. si->vtype = VTYPE_PYOBJ;
  658. }
  659. }
  660. adjust_stack(emit, delta);
  661. }
  662. STATIC void emit_native_set_source_line(emit_t *emit, mp_uint_t source_line) {
  663. (void)emit;
  664. (void)source_line;
  665. }
  666. // this must be called at start of emit functions
  667. STATIC void emit_native_pre(emit_t *emit) {
  668. emit->last_emit_was_return_value = false;
  669. }
  670. // depth==0 is top, depth==1 is before top, etc
  671. STATIC stack_info_t *peek_stack(emit_t *emit, mp_uint_t depth) {
  672. return &emit->stack_info[emit->stack_size - 1 - depth];
  673. }
  674. // depth==0 is top, depth==1 is before top, etc
  675. STATIC vtype_kind_t peek_vtype(emit_t *emit, mp_uint_t depth) {
  676. if (emit->do_viper_types) {
  677. return peek_stack(emit, depth)->vtype;
  678. } else {
  679. // Type is always PYOBJ even if the intermediate stored value is not
  680. return VTYPE_PYOBJ;
  681. }
  682. }
  683. // pos=1 is TOS, pos=2 is next, etc
  684. // use pos=0 for no skipping
  685. STATIC void need_reg_single(emit_t *emit, int reg_needed, int skip_stack_pos) {
  686. skip_stack_pos = emit->stack_size - skip_stack_pos;
  687. for (int i = 0; i < emit->stack_size; i++) {
  688. if (i != skip_stack_pos) {
  689. stack_info_t *si = &emit->stack_info[i];
  690. if (si->kind == STACK_REG && si->data.u_reg == reg_needed) {
  691. si->kind = STACK_VALUE;
  692. emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
  693. }
  694. }
  695. }
  696. }
  697. STATIC void need_reg_all(emit_t *emit) {
  698. for (int i = 0; i < emit->stack_size; i++) {
  699. stack_info_t *si = &emit->stack_info[i];
  700. if (si->kind == STACK_REG) {
  701. si->kind = STACK_VALUE;
  702. emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
  703. }
  704. }
  705. }
  706. STATIC vtype_kind_t load_reg_stack_imm(emit_t *emit, int reg_dest, const stack_info_t *si, bool convert_to_pyobj) {
  707. if (!convert_to_pyobj && emit->do_viper_types) {
  708. ASM_MOV_REG_IMM(emit->as, reg_dest, si->data.u_imm);
  709. return si->vtype;
  710. } else {
  711. if (si->vtype == VTYPE_PYOBJ) {
  712. ASM_MOV_REG_IMM(emit->as, reg_dest, si->data.u_imm);
  713. } else if (si->vtype == VTYPE_BOOL) {
  714. emit_native_mov_reg_const(emit, reg_dest, MP_F_CONST_FALSE_OBJ + si->data.u_imm);
  715. } else if (si->vtype == VTYPE_INT || si->vtype == VTYPE_UINT) {
  716. ASM_MOV_REG_IMM(emit->as, reg_dest, (uintptr_t)MP_OBJ_NEW_SMALL_INT(si->data.u_imm));
  717. } else if (si->vtype == VTYPE_PTR_NONE) {
  718. emit_native_mov_reg_const(emit, reg_dest, MP_F_CONST_NONE_OBJ);
  719. } else {
  720. mp_raise_NotImplementedError("conversion to object");
  721. }
  722. return VTYPE_PYOBJ;
  723. }
  724. }
  725. STATIC void need_stack_settled(emit_t *emit) {
  726. DEBUG_printf(" need_stack_settled; stack_size=%d\n", emit->stack_size);
  727. for (int i = 0; i < emit->stack_size; i++) {
  728. stack_info_t *si = &emit->stack_info[i];
  729. if (si->kind == STACK_REG) {
  730. DEBUG_printf(" reg(%u) to local(%u)\n", si->data.u_reg, emit->stack_start + i);
  731. si->kind = STACK_VALUE;
  732. emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
  733. }
  734. }
  735. for (int i = 0; i < emit->stack_size; i++) {
  736. stack_info_t *si = &emit->stack_info[i];
  737. if (si->kind == STACK_IMM) {
  738. DEBUG_printf(" imm(" INT_FMT ") to local(%u)\n", si->data.u_imm, emit->stack_start + i);
  739. si->kind = STACK_VALUE;
  740. si->vtype = load_reg_stack_imm(emit, REG_TEMP0, si, false);
  741. emit_native_mov_state_reg(emit, emit->stack_start + i, REG_TEMP0);
  742. }
  743. }
  744. }
  745. // pos=1 is TOS, pos=2 is next, etc
  746. STATIC void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int reg_dest) {
  747. need_reg_single(emit, reg_dest, pos);
  748. stack_info_t *si = &emit->stack_info[emit->stack_size - pos];
  749. *vtype = si->vtype;
  750. switch (si->kind) {
  751. case STACK_VALUE:
  752. emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - pos);
  753. break;
  754. case STACK_REG:
  755. if (si->data.u_reg != reg_dest) {
  756. ASM_MOV_REG_REG(emit->as, reg_dest, si->data.u_reg);
  757. }
  758. break;
  759. case STACK_IMM:
  760. *vtype = load_reg_stack_imm(emit, reg_dest, si, false);
  761. break;
  762. }
  763. }
  764. // does an efficient X=pop(); discard(); push(X)
  765. // needs a (non-temp) register in case the poped element was stored in the stack
  766. STATIC void emit_fold_stack_top(emit_t *emit, int reg_dest) {
  767. stack_info_t *si = &emit->stack_info[emit->stack_size - 2];
  768. si[0] = si[1];
  769. if (si->kind == STACK_VALUE) {
  770. // if folded element was on the stack we need to put it in a register
  771. emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - 1);
  772. si->kind = STACK_REG;
  773. si->data.u_reg = reg_dest;
  774. }
  775. adjust_stack(emit, -1);
  776. }
  777. // If stacked value is in a register and the register is not r1 or r2, then
  778. // *reg_dest is set to that register. Otherwise the value is put in *reg_dest.
  779. STATIC void emit_pre_pop_reg_flexible(emit_t *emit, vtype_kind_t *vtype, int *reg_dest, int not_r1, int not_r2) {
  780. emit->last_emit_was_return_value = false;
  781. stack_info_t *si = peek_stack(emit, 0);
  782. if (si->kind == STACK_REG && si->data.u_reg != not_r1 && si->data.u_reg != not_r2) {
  783. *vtype = si->vtype;
  784. *reg_dest = si->data.u_reg;
  785. need_reg_single(emit, *reg_dest, 1);
  786. } else {
  787. emit_access_stack(emit, 1, vtype, *reg_dest);
  788. }
  789. adjust_stack(emit, -1);
  790. }
  791. STATIC void emit_pre_pop_discard(emit_t *emit) {
  792. emit->last_emit_was_return_value = false;
  793. adjust_stack(emit, -1);
  794. }
  795. STATIC void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest) {
  796. emit->last_emit_was_return_value = false;
  797. emit_access_stack(emit, 1, vtype, reg_dest);
  798. adjust_stack(emit, -1);
  799. }
  800. STATIC void emit_pre_pop_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb) {
  801. emit_pre_pop_reg(emit, vtypea, rega);
  802. emit_pre_pop_reg(emit, vtypeb, regb);
  803. }
  804. STATIC void emit_pre_pop_reg_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb, vtype_kind_t *vtypec, int regc) {
  805. emit_pre_pop_reg(emit, vtypea, rega);
  806. emit_pre_pop_reg(emit, vtypeb, regb);
  807. emit_pre_pop_reg(emit, vtypec, regc);
  808. }
  809. STATIC void emit_post(emit_t *emit) {
  810. (void)emit;
  811. }
  812. STATIC void emit_post_top_set_vtype(emit_t *emit, vtype_kind_t new_vtype) {
  813. stack_info_t *si = &emit->stack_info[emit->stack_size - 1];
  814. si->vtype = new_vtype;
  815. }
  816. STATIC void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg) {
  817. ensure_extra_stack(emit, 1);
  818. stack_info_t *si = &emit->stack_info[emit->stack_size];
  819. si->vtype = vtype;
  820. si->kind = STACK_REG;
  821. si->data.u_reg = reg;
  822. adjust_stack(emit, 1);
  823. }
  824. STATIC void emit_post_push_imm(emit_t *emit, vtype_kind_t vtype, mp_int_t imm) {
  825. ensure_extra_stack(emit, 1);
  826. stack_info_t *si = &emit->stack_info[emit->stack_size];
  827. si->vtype = vtype;
  828. si->kind = STACK_IMM;
  829. si->data.u_imm = imm;
  830. adjust_stack(emit, 1);
  831. }
  832. STATIC void emit_post_push_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb) {
  833. emit_post_push_reg(emit, vtypea, rega);
  834. emit_post_push_reg(emit, vtypeb, regb);
  835. }
  836. STATIC void emit_post_push_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc) {
  837. emit_post_push_reg(emit, vtypea, rega);
  838. emit_post_push_reg(emit, vtypeb, regb);
  839. emit_post_push_reg(emit, vtypec, regc);
  840. }
  841. STATIC void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc, vtype_kind_t vtyped, int regd) {
  842. emit_post_push_reg(emit, vtypea, rega);
  843. emit_post_push_reg(emit, vtypeb, regb);
  844. emit_post_push_reg(emit, vtypec, regc);
  845. emit_post_push_reg(emit, vtyped, regd);
  846. }
  847. STATIC void emit_call(emit_t *emit, mp_fun_kind_t fun_kind) {
  848. need_reg_all(emit);
  849. ASM_CALL_IND(emit->as, fun_kind);
  850. }
  851. STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
  852. need_reg_all(emit);
  853. ASM_MOV_REG_IMM(emit->as, arg_reg, arg_val);
  854. ASM_CALL_IND(emit->as, fun_kind);
  855. }
  856. STATIC void emit_call_with_2_imm_args(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2) {
  857. need_reg_all(emit);
  858. ASM_MOV_REG_IMM(emit->as, arg_reg1, arg_val1);
  859. ASM_MOV_REG_IMM(emit->as, arg_reg2, arg_val2);
  860. ASM_CALL_IND(emit->as, fun_kind);
  861. }
  862. STATIC void emit_call_with_qstr_arg(emit_t *emit, mp_fun_kind_t fun_kind, qstr qst, int arg_reg) {
  863. need_reg_all(emit);
  864. emit_native_mov_reg_qstr(emit, arg_reg, qst);
  865. ASM_CALL_IND(emit->as, fun_kind);
  866. }
  867. // vtype of all n_pop objects is VTYPE_PYOBJ
  868. // Will convert any items that are not VTYPE_PYOBJ to this type and put them back on the stack.
  869. // If any conversions of non-immediate values are needed, then it uses REG_ARG_1, REG_ARG_2 and REG_RET.
  870. // Otherwise, it does not use any temporary registers (but may use reg_dest before loading it with stack pointer).
  871. STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_pop) {
  872. need_reg_all(emit);
  873. // First, store any immediate values to their respective place on the stack.
  874. for (mp_uint_t i = 0; i < n_pop; i++) {
  875. stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
  876. // must push any imm's to stack
  877. // must convert them to VTYPE_PYOBJ for viper code
  878. if (si->kind == STACK_IMM) {
  879. si->kind = STACK_VALUE;
  880. si->vtype = load_reg_stack_imm(emit, reg_dest, si, true);
  881. emit_native_mov_state_reg(emit, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
  882. }
  883. // verify that this value is on the stack
  884. assert(si->kind == STACK_VALUE);
  885. }
  886. // Second, convert any non-VTYPE_PYOBJ to that type.
  887. for (mp_uint_t i = 0; i < n_pop; i++) {
  888. stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
  889. if (si->vtype != VTYPE_PYOBJ) {
  890. mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;
  891. emit_native_mov_reg_state(emit, REG_ARG_1, local_num);
  892. emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, si->vtype, REG_ARG_2); // arg2 = type
  893. emit_native_mov_state_reg(emit, local_num, REG_RET);
  894. si->vtype = VTYPE_PYOBJ;
  895. DEBUG_printf(" convert_native_to_obj(local_num=" UINT_FMT ")\n", local_num);
  896. }
  897. }
  898. // Adujust the stack for a pop of n_pop items, and load the stack pointer into reg_dest.
  899. adjust_stack(emit, -n_pop);
  900. emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
  901. }
  902. // vtype of all n_push objects is VTYPE_PYOBJ
  903. STATIC void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_push) {
  904. need_reg_all(emit);
  905. ensure_extra_stack(emit, n_push);
  906. for (mp_uint_t i = 0; i < n_push; i++) {
  907. emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
  908. emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
  909. }
  910. emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
  911. adjust_stack(emit, n_push);
  912. }
  913. STATIC void emit_native_push_exc_stack(emit_t *emit, uint label, bool is_finally) {
  914. if (emit->exc_stack_size + 1 > emit->exc_stack_alloc) {
  915. size_t new_alloc = emit->exc_stack_alloc + 4;
  916. emit->exc_stack = m_renew(exc_stack_entry_t, emit->exc_stack, emit->exc_stack_alloc, new_alloc);
  917. emit->exc_stack_alloc = new_alloc;
  918. }
  919. exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size++];
  920. e->label = label;
  921. e->is_finally = is_finally;
  922. e->unwind_label = UNWIND_LABEL_UNUSED;
  923. e->is_active = true;
  924. ASM_MOV_REG_PCREL(emit->as, REG_RET, label);
  925. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
  926. }
  927. STATIC void emit_native_leave_exc_stack(emit_t *emit, bool start_of_handler) {
  928. assert(emit->exc_stack_size > 0);
  929. // Get current exception handler and deactivate it
  930. exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
  931. e->is_active = false;
  932. // Find next innermost active exception handler, to restore as current handler
  933. for (--e; e >= emit->exc_stack && !e->is_active; --e) {
  934. }
  935. // Update the PC of the new exception handler
  936. if (e < emit->exc_stack) {
  937. // No active handler, clear handler PC to zero
  938. if (start_of_handler) {
  939. // Optimisation: PC is already cleared by global exc handler
  940. return;
  941. }
  942. ASM_XOR_REG_REG(emit->as, REG_RET, REG_RET);
  943. } else {
  944. // Found new active handler, get its PC
  945. ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
  946. }
  947. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
  948. }
  949. STATIC exc_stack_entry_t *emit_native_pop_exc_stack(emit_t *emit) {
  950. assert(emit->exc_stack_size > 0);
  951. exc_stack_entry_t *e = &emit->exc_stack[--emit->exc_stack_size];
  952. assert(e->is_active == false);
  953. return e;
  954. }
  955. STATIC void emit_load_reg_with_ptr(emit_t *emit, int reg, mp_uint_t ptr, size_t table_off) {
  956. if (!emit->do_viper_types) {
  957. // Skip qstr names of arguments
  958. table_off += emit->scope->num_pos_args + emit->scope->num_kwonly_args;
  959. }
  960. if (emit->pass == MP_PASS_EMIT) {
  961. emit->const_table[table_off] = ptr;
  962. }
  963. emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
  964. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONST_TABLE);
  965. ASM_LOAD_REG_REG_OFFSET(emit->as, reg, REG_TEMP0, table_off);
  966. }
  967. STATIC void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj) {
  968. // First entry is for mp_fun_table
  969. size_t table_off = 1 + emit->const_table_cur_obj++;
  970. emit_load_reg_with_ptr(emit, reg, (mp_uint_t)obj, table_off);
  971. }
  972. STATIC void emit_load_reg_with_raw_code(emit_t *emit, int reg, mp_raw_code_t *rc) {
  973. // First entry is for mp_fun_table, then constant objects
  974. size_t table_off = 1 + emit->const_table_num_obj + emit->const_table_cur_raw_code++;
  975. emit_load_reg_with_ptr(emit, reg, (mp_uint_t)rc, table_off);
  976. }
  977. STATIC void emit_native_label_assign(emit_t *emit, mp_uint_t l) {
  978. DEBUG_printf("label_assign(" UINT_FMT ")\n", l);
  979. bool is_finally = false;
  980. if (emit->exc_stack_size > 0) {
  981. exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
  982. is_finally = e->is_finally && e->label == l;
  983. }
  984. if (is_finally) {
  985. // Label is at start of finally handler: store TOS into exception slot
  986. vtype_kind_t vtype;
  987. emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
  988. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
  989. }
  990. emit_native_pre(emit);
  991. // need to commit stack because we can jump here from elsewhere
  992. need_stack_settled(emit);
  993. mp_asm_base_label_assign(&emit->as->base, l);
  994. emit_post(emit);
  995. if (is_finally) {
  996. // Label is at start of finally handler: pop exception stack
  997. emit_native_leave_exc_stack(emit, false);
  998. }
  999. }
  1000. STATIC void emit_native_global_exc_entry(emit_t *emit) {
  1001. // Note: 4 labels are reserved for this function, starting at *emit->label_slot
  1002. emit->exit_label = *emit->label_slot;
  1003. if (NEED_GLOBAL_EXC_HANDLER(emit)) {
  1004. mp_uint_t nlr_label = *emit->label_slot + 1;
  1005. mp_uint_t start_label = *emit->label_slot + 2;
  1006. mp_uint_t global_except_label = *emit->label_slot + 3;
  1007. if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
  1008. // Set new globals
  1009. emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_FUN_OBJ(emit));
  1010. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, OFFSETOF_OBJ_FUN_BC_GLOBALS);
  1011. emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
  1012. // Save old globals (or NULL if globals didn't change)
  1013. emit_native_mov_state_reg(emit, LOCAL_IDX_OLD_GLOBALS(emit), REG_RET);
  1014. }
  1015. if (emit->scope->exc_stack_size == 0) {
  1016. if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
  1017. // Optimisation: if globals didn't change don't push the nlr context
  1018. ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, false);
  1019. }
  1020. // Wrap everything in an nlr context
  1021. ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
  1022. emit_call(emit, MP_F_NLR_PUSH);
  1023. #if N_NLR_SETJMP
  1024. ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2);
  1025. emit_call(emit, MP_F_SETJMP);
  1026. #endif
  1027. ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, true);
  1028. } else {
  1029. // Clear the unwind state
  1030. ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
  1031. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_TEMP0);
  1032. // Put PC of start code block into REG_LOCAL_1
  1033. ASM_MOV_REG_PCREL(emit->as, REG_LOCAL_1, start_label);
  1034. // Wrap everything in an nlr context
  1035. emit_native_label_assign(emit, nlr_label);
  1036. ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_2, LOCAL_IDX_EXC_HANDLER_UNWIND(emit));
  1037. ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
  1038. emit_call(emit, MP_F_NLR_PUSH);
  1039. #if N_NLR_SETJMP
  1040. ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2);
  1041. emit_call(emit, MP_F_SETJMP);
  1042. #endif
  1043. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_LOCAL_2);
  1044. ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, global_except_label, true);
  1045. // Clear PC of current code block, and jump there to resume execution
  1046. ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
  1047. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_TEMP0);
  1048. ASM_JUMP_REG(emit->as, REG_LOCAL_1);
  1049. // Global exception handler: check for valid exception handler
  1050. emit_native_label_assign(emit, global_except_label);
  1051. #if N_NLR_SETJMP
  1052. // Reload REG_FUN_TABLE, since it may be clobbered by longjmp
  1053. emit_native_mov_reg_state(emit, REG_LOCAL_1, LOCAL_IDX_FUN_OBJ(emit));
  1054. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_1, REG_LOCAL_1, offsetof(mp_obj_fun_bc_t, const_table) / sizeof(uintptr_t));
  1055. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_LOCAL_1, emit->scope->num_pos_args + emit->scope->num_kwonly_args);
  1056. #endif
  1057. ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_1, LOCAL_IDX_EXC_HANDLER_PC(emit));
  1058. ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false);
  1059. }
  1060. if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
  1061. // Restore old globals
  1062. emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
  1063. emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
  1064. }
  1065. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  1066. // Store return value in state[0]
  1067. ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit));
  1068. ASM_STORE_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, OFFSETOF_CODE_STATE_STATE);
  1069. // Load return kind
  1070. ASM_MOV_REG_IMM(emit->as, REG_PARENT_RET, MP_VM_RETURN_EXCEPTION);
  1071. ASM_EXIT(emit->as);
  1072. } else {
  1073. // Re-raise exception out to caller
  1074. ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
  1075. emit_call(emit, MP_F_NATIVE_RAISE);
  1076. }
  1077. // Label for start of function
  1078. emit_native_label_assign(emit, start_label);
  1079. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  1080. emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_GEN_PC(emit));
  1081. ASM_JUMP_REG(emit->as, REG_TEMP0);
  1082. emit->start_offset = mp_asm_base_get_code_pos(&emit->as->base);
  1083. // This is the first entry of the generator
  1084. // Check LOCAL_IDX_EXC_VAL for any injected value
  1085. ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
  1086. emit_call(emit, MP_F_NATIVE_RAISE);
  1087. }
  1088. }
  1089. }
  1090. STATIC void emit_native_global_exc_exit(emit_t *emit) {
  1091. // Label for end of function
  1092. emit_native_label_assign(emit, emit->exit_label);
  1093. if (NEED_GLOBAL_EXC_HANDLER(emit)) {
  1094. // Get old globals
  1095. if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
  1096. emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
  1097. if (emit->scope->exc_stack_size == 0) {
  1098. // Optimisation: if globals didn't change then don't restore them and don't do nlr_pop
  1099. ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, emit->exit_label + 1, false);
  1100. }
  1101. // Restore old globals
  1102. emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
  1103. }
  1104. // Pop the nlr context
  1105. emit_call(emit, MP_F_NLR_POP);
  1106. if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
  1107. if (emit->scope->exc_stack_size == 0) {
  1108. // Destination label for above optimisation
  1109. emit_native_label_assign(emit, emit->exit_label + 1);
  1110. }
  1111. }
  1112. // Load return value
  1113. ASM_MOV_REG_LOCAL(emit->as, REG_PARENT_RET, LOCAL_IDX_RET_VAL(emit));
  1114. }
  1115. ASM_EXIT(emit->as);
  1116. }
  1117. STATIC void emit_native_import_name(emit_t *emit, qstr qst) {
  1118. DEBUG_printf("import_name %s\n", qstr_str(qst));
  1119. // get arguments from stack: arg2 = fromlist, arg3 = level
  1120. // If using viper types these arguments must be converted to proper objects, and
  1121. // to accomplish this viper types are turned off for the emit_pre_pop_reg_reg call.
  1122. bool orig_do_viper_types = emit->do_viper_types;
  1123. emit->do_viper_types = false;
  1124. vtype_kind_t vtype_fromlist;
  1125. vtype_kind_t vtype_level;
  1126. emit_pre_pop_reg_reg(emit, &vtype_fromlist, REG_ARG_2, &vtype_level, REG_ARG_3);
  1127. assert(vtype_fromlist == VTYPE_PYOBJ);
  1128. assert(vtype_level == VTYPE_PYOBJ);
  1129. emit->do_viper_types = orig_do_viper_types;
  1130. emit_call_with_qstr_arg(emit, MP_F_IMPORT_NAME, qst, REG_ARG_1); // arg1 = import name
  1131. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1132. }
  1133. STATIC void emit_native_import_from(emit_t *emit, qstr qst) {
  1134. DEBUG_printf("import_from %s\n", qstr_str(qst));
  1135. emit_native_pre(emit);
  1136. vtype_kind_t vtype_module;
  1137. emit_access_stack(emit, 1, &vtype_module, REG_ARG_1); // arg1 = module
  1138. assert(vtype_module == VTYPE_PYOBJ);
  1139. emit_call_with_qstr_arg(emit, MP_F_IMPORT_FROM, qst, REG_ARG_2); // arg2 = import name
  1140. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1141. }
  1142. STATIC void emit_native_import_star(emit_t *emit) {
  1143. DEBUG_printf("import_star\n");
  1144. vtype_kind_t vtype_module;
  1145. emit_pre_pop_reg(emit, &vtype_module, REG_ARG_1); // arg1 = module
  1146. assert(vtype_module == VTYPE_PYOBJ);
  1147. emit_call(emit, MP_F_IMPORT_ALL);
  1148. emit_post(emit);
  1149. }
  1150. STATIC void emit_native_import(emit_t *emit, qstr qst, int kind) {
  1151. if (kind == MP_EMIT_IMPORT_NAME) {
  1152. emit_native_import_name(emit, qst);
  1153. } else if (kind == MP_EMIT_IMPORT_FROM) {
  1154. emit_native_import_from(emit, qst);
  1155. } else {
  1156. emit_native_import_star(emit);
  1157. }
  1158. }
  1159. STATIC void emit_native_load_const_tok(emit_t *emit, mp_token_kind_t tok) {
  1160. DEBUG_printf("load_const_tok(tok=%u)\n", tok);
  1161. if (tok == MP_TOKEN_ELLIPSIS) {
  1162. #if MICROPY_PERSISTENT_CODE_SAVE
  1163. emit_native_load_const_obj(emit, MP_OBJ_FROM_PTR(&mp_const_ellipsis_obj));
  1164. #else
  1165. emit_post_push_imm(emit, VTYPE_PYOBJ, (mp_uint_t)MP_OBJ_FROM_PTR(&mp_const_ellipsis_obj));
  1166. #endif
  1167. } else {
  1168. emit_native_pre(emit);
  1169. if (tok == MP_TOKEN_KW_NONE) {
  1170. emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
  1171. } else {
  1172. emit_post_push_imm(emit, VTYPE_BOOL, tok == MP_TOKEN_KW_FALSE ? 0 : 1);
  1173. }
  1174. }
  1175. }
  1176. STATIC void emit_native_load_const_small_int(emit_t *emit, mp_int_t arg) {
  1177. DEBUG_printf("load_const_small_int(int=" INT_FMT ")\n", arg);
  1178. emit_native_pre(emit);
  1179. emit_post_push_imm(emit, VTYPE_INT, arg);
  1180. }
  1181. STATIC void emit_native_load_const_str(emit_t *emit, qstr qst) {
  1182. emit_native_pre(emit);
  1183. // TODO: Eventually we want to be able to work with raw pointers in viper to
  1184. // do native array access. For now we just load them as any other object.
  1185. /*
  1186. if (emit->do_viper_types) {
  1187. // load a pointer to the asciiz string?
  1188. emit_post_push_imm(emit, VTYPE_PTR, (mp_uint_t)qstr_str(qst));
  1189. } else
  1190. */
  1191. {
  1192. need_reg_single(emit, REG_TEMP0, 0);
  1193. emit_native_mov_reg_qstr_obj(emit, REG_TEMP0, qst);
  1194. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
  1195. }
  1196. }
  1197. STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj) {
  1198. emit->scope->scope_flags |= MP_SCOPE_FLAG_HASCONSTS;
  1199. emit_native_pre(emit);
  1200. need_reg_single(emit, REG_RET, 0);
  1201. emit_load_reg_with_object(emit, REG_RET, obj);
  1202. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1203. }
  1204. STATIC void emit_native_load_null(emit_t *emit) {
  1205. emit_native_pre(emit);
  1206. emit_post_push_imm(emit, VTYPE_PYOBJ, 0);
  1207. }
  1208. STATIC void emit_native_load_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
  1209. DEBUG_printf("load_fast(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
  1210. vtype_kind_t vtype = emit->local_vtype[local_num];
  1211. if (vtype == VTYPE_UNBOUND) {
  1212. EMIT_NATIVE_VIPER_TYPE_ERROR(emit, "local '%q' used before type known", qst);
  1213. }
  1214. emit_native_pre(emit);
  1215. if (local_num < REG_LOCAL_NUM && CAN_USE_REGS_FOR_LOCALS(emit)) {
  1216. emit_post_push_reg(emit, vtype, reg_local_table[local_num]);
  1217. } else {
  1218. need_reg_single(emit, REG_TEMP0, 0);
  1219. emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_LOCAL_VAR(emit, local_num));
  1220. emit_post_push_reg(emit, vtype, REG_TEMP0);
  1221. }
  1222. }
  1223. STATIC void emit_native_load_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
  1224. DEBUG_printf("load_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
  1225. need_reg_single(emit, REG_RET, 0);
  1226. emit_native_load_fast(emit, qst, local_num);
  1227. vtype_kind_t vtype;
  1228. int reg_base = REG_RET;
  1229. emit_pre_pop_reg_flexible(emit, &vtype, &reg_base, -1, -1);
  1230. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_RET, reg_base, 1);
  1231. // closed over vars are always Python objects
  1232. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1233. }
  1234. STATIC void emit_native_load_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
  1235. if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
  1236. emit_native_load_fast(emit, qst, local_num);
  1237. } else {
  1238. emit_native_load_deref(emit, qst, local_num);
  1239. }
  1240. }
  1241. STATIC void emit_native_load_global(emit_t *emit, qstr qst, int kind) {
  1242. MP_STATIC_ASSERT(MP_F_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_LOAD_NAME);
  1243. MP_STATIC_ASSERT(MP_F_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_LOAD_GLOBAL);
  1244. emit_native_pre(emit);
  1245. if (kind == MP_EMIT_IDOP_GLOBAL_NAME) {
  1246. DEBUG_printf("load_name(%s)\n", qstr_str(qst));
  1247. } else {
  1248. DEBUG_printf("load_global(%s)\n", qstr_str(qst));
  1249. if (emit->do_viper_types) {
  1250. // check for builtin casting operators
  1251. int native_type = mp_native_type_from_qstr(qst);
  1252. if (native_type >= MP_NATIVE_TYPE_BOOL) {
  1253. emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, native_type);
  1254. return;
  1255. }
  1256. }
  1257. }
  1258. emit_call_with_qstr_arg(emit, MP_F_LOAD_NAME + kind, qst, REG_ARG_1);
  1259. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1260. }
  1261. STATIC void emit_native_load_attr(emit_t *emit, qstr qst) {
  1262. // depends on type of subject:
  1263. // - integer, function, pointer to integers: error
  1264. // - pointer to structure: get member, quite easy
  1265. // - Python object: call mp_load_attr, and needs to be typed to convert result
  1266. vtype_kind_t vtype_base;
  1267. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
  1268. assert(vtype_base == VTYPE_PYOBJ);
  1269. emit_call_with_qstr_arg(emit, MP_F_LOAD_ATTR, qst, REG_ARG_2); // arg2 = attribute name
  1270. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1271. }
  1272. STATIC void emit_native_load_method(emit_t *emit, qstr qst, bool is_super) {
  1273. if (is_super) {
  1274. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, 3); // arg2 = dest ptr
  1275. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, 2); // arg2 = dest ptr
  1276. emit_call_with_qstr_arg(emit, MP_F_LOAD_SUPER_METHOD, qst, REG_ARG_1); // arg1 = method name
  1277. } else {
  1278. vtype_kind_t vtype_base;
  1279. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
  1280. assert(vtype_base == VTYPE_PYOBJ);
  1281. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
  1282. emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, qst, REG_ARG_2); // arg2 = method name
  1283. }
  1284. }
  1285. STATIC void emit_native_load_build_class(emit_t *emit) {
  1286. emit_native_pre(emit);
  1287. emit_call(emit, MP_F_LOAD_BUILD_CLASS);
  1288. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1289. }
  1290. STATIC void emit_native_load_subscr(emit_t *emit) {
  1291. DEBUG_printf("load_subscr\n");
  1292. // need to compile: base[index]
  1293. // pop: index, base
  1294. // optimise case where index is an immediate
  1295. vtype_kind_t vtype_base = peek_vtype(emit, 1);
  1296. if (vtype_base == VTYPE_PYOBJ) {
  1297. // standard Python subscr
  1298. // TODO factor this implicit cast code with other uses of it
  1299. vtype_kind_t vtype_index = peek_vtype(emit, 0);
  1300. if (vtype_index == VTYPE_PYOBJ) {
  1301. emit_pre_pop_reg(emit, &vtype_index, REG_ARG_2);
  1302. } else {
  1303. emit_pre_pop_reg(emit, &vtype_index, REG_ARG_1);
  1304. emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype_index, REG_ARG_2); // arg2 = type
  1305. ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
  1306. }
  1307. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
  1308. emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_SENTINEL, REG_ARG_3);
  1309. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1310. } else {
  1311. // viper load
  1312. // TODO The different machine architectures have very different
  1313. // capabilities and requirements for loads, so probably best to
  1314. // write a completely separate load-optimiser for each one.
  1315. stack_info_t *top = peek_stack(emit, 0);
  1316. if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
  1317. // index is an immediate
  1318. mp_int_t index_value = top->data.u_imm;
  1319. emit_pre_pop_discard(emit); // discard index
  1320. int reg_base = REG_ARG_1;
  1321. int reg_index = REG_ARG_2;
  1322. emit_pre_pop_reg_flexible(emit, &vtype_base, &reg_base, reg_index, reg_index);
  1323. switch (vtype_base) {
  1324. case VTYPE_PTR8: {
  1325. // pointer to 8-bit memory
  1326. // TODO optimise to use thumb ldrb r1, [r2, r3]
  1327. if (index_value != 0) {
  1328. // index is non-zero
  1329. #if N_THUMB
  1330. if (index_value > 0 && index_value < 32) {
  1331. asm_thumb_ldrb_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
  1332. break;
  1333. }
  1334. #endif
  1335. ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
  1336. ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
  1337. reg_base = reg_index;
  1338. }
  1339. ASM_LOAD8_REG_REG(emit->as, REG_RET, reg_base); // load from (base+index)
  1340. break;
  1341. }
  1342. case VTYPE_PTR16: {
  1343. // pointer to 16-bit memory
  1344. if (index_value != 0) {
  1345. // index is a non-zero immediate
  1346. #if N_THUMB
  1347. if (index_value > 0 && index_value < 32) {
  1348. asm_thumb_ldrh_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
  1349. break;
  1350. }
  1351. #endif
  1352. ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 1);
  1353. ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
  1354. reg_base = reg_index;
  1355. }
  1356. ASM_LOAD16_REG_REG(emit->as, REG_RET, reg_base); // load from (base+2*index)
  1357. break;
  1358. }
  1359. case VTYPE_PTR32: {
  1360. // pointer to 32-bit memory
  1361. if (index_value != 0) {
  1362. // index is a non-zero immediate
  1363. #if N_THUMB
  1364. if (index_value > 0 && index_value < 32) {
  1365. asm_thumb_ldr_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
  1366. break;
  1367. }
  1368. #endif
  1369. ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 2);
  1370. ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
  1371. reg_base = reg_index;
  1372. }
  1373. ASM_LOAD32_REG_REG(emit->as, REG_RET, reg_base); // load from (base+4*index)
  1374. break;
  1375. }
  1376. default:
  1377. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1378. "can't load from '%q'", vtype_to_qstr(vtype_base));
  1379. }
  1380. } else {
  1381. // index is not an immediate
  1382. vtype_kind_t vtype_index;
  1383. int reg_index = REG_ARG_2;
  1384. emit_pre_pop_reg_flexible(emit, &vtype_index, &reg_index, REG_ARG_1, REG_ARG_1);
  1385. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
  1386. if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
  1387. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1388. "can't load with '%q' index", vtype_to_qstr(vtype_index));
  1389. }
  1390. switch (vtype_base) {
  1391. case VTYPE_PTR8: {
  1392. // pointer to 8-bit memory
  1393. // TODO optimise to use thumb ldrb r1, [r2, r3]
  1394. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1395. ASM_LOAD8_REG_REG(emit->as, REG_RET, REG_ARG_1); // store value to (base+index)
  1396. break;
  1397. }
  1398. case VTYPE_PTR16: {
  1399. // pointer to 16-bit memory
  1400. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1401. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1402. ASM_LOAD16_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+2*index)
  1403. break;
  1404. }
  1405. case VTYPE_PTR32: {
  1406. // pointer to word-size memory
  1407. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1408. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1409. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1410. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1411. ASM_LOAD32_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+4*index)
  1412. break;
  1413. }
  1414. default:
  1415. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1416. "can't load from '%q'", vtype_to_qstr(vtype_base));
  1417. }
  1418. }
  1419. emit_post_push_reg(emit, VTYPE_INT, REG_RET);
  1420. }
  1421. }
  1422. STATIC void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
  1423. vtype_kind_t vtype;
  1424. if (local_num < REG_LOCAL_NUM && CAN_USE_REGS_FOR_LOCALS(emit)) {
  1425. emit_pre_pop_reg(emit, &vtype, reg_local_table[local_num]);
  1426. } else {
  1427. emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
  1428. emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, local_num), REG_TEMP0);
  1429. }
  1430. emit_post(emit);
  1431. // check types
  1432. if (emit->local_vtype[local_num] == VTYPE_UNBOUND) {
  1433. // first time this local is assigned, so give it a type of the object stored in it
  1434. emit->local_vtype[local_num] = vtype;
  1435. } else if (emit->local_vtype[local_num] != vtype) {
  1436. // type of local is not the same as object stored in it
  1437. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1438. "local '%q' has type '%q' but source is '%q'",
  1439. qst, vtype_to_qstr(emit->local_vtype[local_num]), vtype_to_qstr(vtype));
  1440. }
  1441. }
  1442. STATIC void emit_native_store_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
  1443. DEBUG_printf("store_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
  1444. need_reg_single(emit, REG_TEMP0, 0);
  1445. need_reg_single(emit, REG_TEMP1, 0);
  1446. emit_native_load_fast(emit, qst, local_num);
  1447. vtype_kind_t vtype;
  1448. int reg_base = REG_TEMP0;
  1449. emit_pre_pop_reg_flexible(emit, &vtype, &reg_base, -1, -1);
  1450. int reg_src = REG_TEMP1;
  1451. emit_pre_pop_reg_flexible(emit, &vtype, &reg_src, reg_base, reg_base);
  1452. ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, reg_base, 1);
  1453. emit_post(emit);
  1454. }
  1455. STATIC void emit_native_store_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
  1456. if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
  1457. emit_native_store_fast(emit, qst, local_num);
  1458. } else {
  1459. emit_native_store_deref(emit, qst, local_num);
  1460. }
  1461. }
  1462. STATIC void emit_native_store_global(emit_t *emit, qstr qst, int kind) {
  1463. MP_STATIC_ASSERT(MP_F_STORE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_STORE_NAME);
  1464. MP_STATIC_ASSERT(MP_F_STORE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_STORE_GLOBAL);
  1465. if (kind == MP_EMIT_IDOP_GLOBAL_NAME) {
  1466. // mp_store_name, but needs conversion of object (maybe have mp_viper_store_name(obj, type))
  1467. vtype_kind_t vtype;
  1468. emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
  1469. assert(vtype == VTYPE_PYOBJ);
  1470. } else {
  1471. vtype_kind_t vtype = peek_vtype(emit, 0);
  1472. if (vtype == VTYPE_PYOBJ) {
  1473. emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
  1474. } else {
  1475. emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
  1476. emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype, REG_ARG_2); // arg2 = type
  1477. ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
  1478. }
  1479. }
  1480. emit_call_with_qstr_arg(emit, MP_F_STORE_NAME + kind, qst, REG_ARG_1); // arg1 = name
  1481. emit_post(emit);
  1482. }
  1483. STATIC void emit_native_store_attr(emit_t *emit, qstr qst) {
  1484. vtype_kind_t vtype_base, vtype_val;
  1485. emit_pre_pop_reg_reg(emit, &vtype_base, REG_ARG_1, &vtype_val, REG_ARG_3); // arg1 = base, arg3 = value
  1486. assert(vtype_base == VTYPE_PYOBJ);
  1487. assert(vtype_val == VTYPE_PYOBJ);
  1488. emit_call_with_qstr_arg(emit, MP_F_STORE_ATTR, qst, REG_ARG_2); // arg2 = attribute name
  1489. emit_post(emit);
  1490. }
  1491. STATIC void emit_native_store_subscr(emit_t *emit) {
  1492. DEBUG_printf("store_subscr\n");
  1493. // need to compile: base[index] = value
  1494. // pop: index, base, value
  1495. // optimise case where index is an immediate
  1496. vtype_kind_t vtype_base = peek_vtype(emit, 1);
  1497. if (vtype_base == VTYPE_PYOBJ) {
  1498. // standard Python subscr
  1499. vtype_kind_t vtype_index = peek_vtype(emit, 0);
  1500. vtype_kind_t vtype_value = peek_vtype(emit, 2);
  1501. if (vtype_index != VTYPE_PYOBJ || vtype_value != VTYPE_PYOBJ) {
  1502. // need to implicitly convert non-objects to objects
  1503. // TODO do this properly
  1504. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, 3);
  1505. adjust_stack(emit, 3);
  1506. }
  1507. emit_pre_pop_reg_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1, &vtype_value, REG_ARG_3);
  1508. emit_call(emit, MP_F_OBJ_SUBSCR);
  1509. } else {
  1510. // viper store
  1511. // TODO The different machine architectures have very different
  1512. // capabilities and requirements for stores, so probably best to
  1513. // write a completely separate store-optimiser for each one.
  1514. stack_info_t *top = peek_stack(emit, 0);
  1515. if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
  1516. // index is an immediate
  1517. mp_int_t index_value = top->data.u_imm;
  1518. emit_pre_pop_discard(emit); // discard index
  1519. vtype_kind_t vtype_value;
  1520. int reg_base = REG_ARG_1;
  1521. int reg_index = REG_ARG_2;
  1522. int reg_value = REG_ARG_3;
  1523. emit_pre_pop_reg_flexible(emit, &vtype_base, &reg_base, reg_index, reg_value);
  1524. #if N_X86
  1525. // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
  1526. emit_pre_pop_reg(emit, &vtype_value, reg_value);
  1527. #else
  1528. emit_pre_pop_reg_flexible(emit, &vtype_value, &reg_value, reg_base, reg_index);
  1529. #endif
  1530. if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
  1531. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1532. "can't store '%q'", vtype_to_qstr(vtype_value));
  1533. }
  1534. switch (vtype_base) {
  1535. case VTYPE_PTR8: {
  1536. // pointer to 8-bit memory
  1537. // TODO optimise to use thumb strb r1, [r2, r3]
  1538. if (index_value != 0) {
  1539. // index is non-zero
  1540. #if N_THUMB
  1541. if (index_value > 0 && index_value < 32) {
  1542. asm_thumb_strb_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
  1543. break;
  1544. }
  1545. #endif
  1546. ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
  1547. #if N_ARM
  1548. asm_arm_strb_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
  1549. return;
  1550. #endif
  1551. ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
  1552. reg_base = reg_index;
  1553. }
  1554. ASM_STORE8_REG_REG(emit->as, reg_value, reg_base); // store value to (base+index)
  1555. break;
  1556. }
  1557. case VTYPE_PTR16: {
  1558. // pointer to 16-bit memory
  1559. if (index_value != 0) {
  1560. // index is a non-zero immediate
  1561. #if N_THUMB
  1562. if (index_value > 0 && index_value < 32) {
  1563. asm_thumb_strh_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
  1564. break;
  1565. }
  1566. #endif
  1567. ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 1);
  1568. ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
  1569. reg_base = reg_index;
  1570. }
  1571. ASM_STORE16_REG_REG(emit->as, reg_value, reg_base); // store value to (base+2*index)
  1572. break;
  1573. }
  1574. case VTYPE_PTR32: {
  1575. // pointer to 32-bit memory
  1576. if (index_value != 0) {
  1577. // index is a non-zero immediate
  1578. #if N_THUMB
  1579. if (index_value > 0 && index_value < 32) {
  1580. asm_thumb_str_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
  1581. break;
  1582. }
  1583. #endif
  1584. #if N_ARM
  1585. ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
  1586. asm_arm_str_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
  1587. return;
  1588. #endif
  1589. ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 2);
  1590. ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
  1591. reg_base = reg_index;
  1592. }
  1593. ASM_STORE32_REG_REG(emit->as, reg_value, reg_base); // store value to (base+4*index)
  1594. break;
  1595. }
  1596. default:
  1597. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1598. "can't store to '%q'", vtype_to_qstr(vtype_base));
  1599. }
  1600. } else {
  1601. // index is not an immediate
  1602. vtype_kind_t vtype_index, vtype_value;
  1603. int reg_index = REG_ARG_2;
  1604. int reg_value = REG_ARG_3;
  1605. emit_pre_pop_reg_flexible(emit, &vtype_index, &reg_index, REG_ARG_1, reg_value);
  1606. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
  1607. if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
  1608. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1609. "can't store with '%q' index", vtype_to_qstr(vtype_index));
  1610. }
  1611. #if N_X86
  1612. // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
  1613. emit_pre_pop_reg(emit, &vtype_value, reg_value);
  1614. #else
  1615. emit_pre_pop_reg_flexible(emit, &vtype_value, &reg_value, REG_ARG_1, reg_index);
  1616. #endif
  1617. if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
  1618. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1619. "can't store '%q'", vtype_to_qstr(vtype_value));
  1620. }
  1621. switch (vtype_base) {
  1622. case VTYPE_PTR8: {
  1623. // pointer to 8-bit memory
  1624. // TODO optimise to use thumb strb r1, [r2, r3]
  1625. #if N_ARM
  1626. asm_arm_strb_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
  1627. break;
  1628. #endif
  1629. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1630. ASM_STORE8_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+index)
  1631. break;
  1632. }
  1633. case VTYPE_PTR16: {
  1634. // pointer to 16-bit memory
  1635. #if N_ARM
  1636. asm_arm_strh_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
  1637. break;
  1638. #endif
  1639. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1640. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1641. ASM_STORE16_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+2*index)
  1642. break;
  1643. }
  1644. case VTYPE_PTR32: {
  1645. // pointer to 32-bit memory
  1646. #if N_ARM
  1647. asm_arm_str_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
  1648. break;
  1649. #endif
  1650. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1651. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1652. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1653. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1654. ASM_STORE32_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+4*index)
  1655. break;
  1656. }
  1657. default:
  1658. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1659. "can't store to '%q'", vtype_to_qstr(vtype_base));
  1660. }
  1661. }
  1662. }
  1663. }
  1664. STATIC void emit_native_delete_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
  1665. if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
  1666. // TODO: This is not compliant implementation. We could use MP_OBJ_SENTINEL
  1667. // to mark deleted vars but then every var would need to be checked on
  1668. // each access. Very inefficient, so just set value to None to enable GC.
  1669. emit_native_load_const_tok(emit, MP_TOKEN_KW_NONE);
  1670. emit_native_store_fast(emit, qst, local_num);
  1671. } else {
  1672. // TODO implement me!
  1673. }
  1674. }
  1675. STATIC void emit_native_delete_global(emit_t *emit, qstr qst, int kind) {
  1676. MP_STATIC_ASSERT(MP_F_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_DELETE_NAME);
  1677. MP_STATIC_ASSERT(MP_F_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_DELETE_GLOBAL);
  1678. emit_native_pre(emit);
  1679. emit_call_with_qstr_arg(emit, MP_F_DELETE_NAME + kind, qst, REG_ARG_1);
  1680. emit_post(emit);
  1681. }
  1682. STATIC void emit_native_delete_attr(emit_t *emit, qstr qst) {
  1683. vtype_kind_t vtype_base;
  1684. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
  1685. assert(vtype_base == VTYPE_PYOBJ);
  1686. ASM_XOR_REG_REG(emit->as, REG_ARG_3, REG_ARG_3); // arg3 = value (null for delete)
  1687. emit_call_with_qstr_arg(emit, MP_F_STORE_ATTR, qst, REG_ARG_2); // arg2 = attribute name
  1688. emit_post(emit);
  1689. }
  1690. STATIC void emit_native_delete_subscr(emit_t *emit) {
  1691. vtype_kind_t vtype_index, vtype_base;
  1692. emit_pre_pop_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1); // index, base
  1693. assert(vtype_index == VTYPE_PYOBJ);
  1694. assert(vtype_base == VTYPE_PYOBJ);
  1695. emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3);
  1696. }
  1697. STATIC void emit_native_subscr(emit_t *emit, int kind) {
  1698. if (kind == MP_EMIT_SUBSCR_LOAD) {
  1699. emit_native_load_subscr(emit);
  1700. } else if (kind == MP_EMIT_SUBSCR_STORE) {
  1701. emit_native_store_subscr(emit);
  1702. } else {
  1703. emit_native_delete_subscr(emit);
  1704. }
  1705. }
  1706. STATIC void emit_native_attr(emit_t *emit, qstr qst, int kind) {
  1707. if (kind == MP_EMIT_ATTR_LOAD) {
  1708. emit_native_load_attr(emit, qst);
  1709. } else if (kind == MP_EMIT_ATTR_STORE) {
  1710. emit_native_store_attr(emit, qst);
  1711. } else {
  1712. emit_native_delete_attr(emit, qst);
  1713. }
  1714. }
  1715. STATIC void emit_native_dup_top(emit_t *emit) {
  1716. DEBUG_printf("dup_top\n");
  1717. vtype_kind_t vtype;
  1718. int reg = REG_TEMP0;
  1719. emit_pre_pop_reg_flexible(emit, &vtype, &reg, -1, -1);
  1720. emit_post_push_reg_reg(emit, vtype, reg, vtype, reg);
  1721. }
  1722. STATIC void emit_native_dup_top_two(emit_t *emit) {
  1723. vtype_kind_t vtype0, vtype1;
  1724. emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
  1725. emit_post_push_reg_reg_reg_reg(emit, vtype1, REG_TEMP1, vtype0, REG_TEMP0, vtype1, REG_TEMP1, vtype0, REG_TEMP0);
  1726. }
  1727. STATIC void emit_native_pop_top(emit_t *emit) {
  1728. DEBUG_printf("pop_top\n");
  1729. emit_pre_pop_discard(emit);
  1730. emit_post(emit);
  1731. }
  1732. STATIC void emit_native_rot_two(emit_t *emit) {
  1733. DEBUG_printf("rot_two\n");
  1734. vtype_kind_t vtype0, vtype1;
  1735. emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
  1736. emit_post_push_reg_reg(emit, vtype0, REG_TEMP0, vtype1, REG_TEMP1);
  1737. }
  1738. STATIC void emit_native_rot_three(emit_t *emit) {
  1739. DEBUG_printf("rot_three\n");
  1740. vtype_kind_t vtype0, vtype1, vtype2;
  1741. emit_pre_pop_reg_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1, &vtype2, REG_TEMP2);
  1742. emit_post_push_reg_reg_reg(emit, vtype0, REG_TEMP0, vtype2, REG_TEMP2, vtype1, REG_TEMP1);
  1743. }
  1744. STATIC void emit_native_jump(emit_t *emit, mp_uint_t label) {
  1745. DEBUG_printf("jump(label=" UINT_FMT ")\n", label);
  1746. emit_native_pre(emit);
  1747. // need to commit stack because we are jumping elsewhere
  1748. need_stack_settled(emit);
  1749. ASM_JUMP(emit->as, label);
  1750. emit_post(emit);
  1751. }
  1752. STATIC void emit_native_jump_helper(emit_t *emit, bool cond, mp_uint_t label, bool pop) {
  1753. vtype_kind_t vtype = peek_vtype(emit, 0);
  1754. if (vtype == VTYPE_PYOBJ) {
  1755. emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
  1756. if (!pop) {
  1757. adjust_stack(emit, 1);
  1758. }
  1759. emit_call(emit, MP_F_OBJ_IS_TRUE);
  1760. } else {
  1761. emit_pre_pop_reg(emit, &vtype, REG_RET);
  1762. if (!pop) {
  1763. adjust_stack(emit, 1);
  1764. }
  1765. if (!(vtype == VTYPE_BOOL || vtype == VTYPE_INT || vtype == VTYPE_UINT)) {
  1766. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1767. "can't implicitly convert '%q' to 'bool'", vtype_to_qstr(vtype));
  1768. }
  1769. }
  1770. // For non-pop need to save the vtype so that emit_native_adjust_stack_size
  1771. // can use it. This is a bit of a hack.
  1772. if (!pop) {
  1773. emit->saved_stack_vtype = vtype;
  1774. }
  1775. // need to commit stack because we may jump elsewhere
  1776. need_stack_settled(emit);
  1777. // Emit the jump
  1778. if (cond) {
  1779. ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label, vtype == VTYPE_PYOBJ);
  1780. } else {
  1781. ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label, vtype == VTYPE_PYOBJ);
  1782. }
  1783. if (!pop) {
  1784. adjust_stack(emit, -1);
  1785. }
  1786. emit_post(emit);
  1787. }
  1788. STATIC void emit_native_pop_jump_if(emit_t *emit, bool cond, mp_uint_t label) {
  1789. DEBUG_printf("pop_jump_if(cond=%u, label=" UINT_FMT ")\n", cond, label);
  1790. emit_native_jump_helper(emit, cond, label, true);
  1791. }
  1792. STATIC void emit_native_jump_if_or_pop(emit_t *emit, bool cond, mp_uint_t label) {
  1793. DEBUG_printf("jump_if_or_pop(cond=%u, label=" UINT_FMT ")\n", cond, label);
  1794. emit_native_jump_helper(emit, cond, label, false);
  1795. }
  1796. STATIC void emit_native_unwind_jump(emit_t *emit, mp_uint_t label, mp_uint_t except_depth) {
  1797. if (except_depth > 0) {
  1798. exc_stack_entry_t *first_finally = NULL;
  1799. exc_stack_entry_t *prev_finally = NULL;
  1800. exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
  1801. for (; except_depth > 0; --except_depth, --e) {
  1802. if (e->is_finally && e->is_active) {
  1803. // Found an active finally handler
  1804. if (first_finally == NULL) {
  1805. first_finally = e;
  1806. }
  1807. if (prev_finally != NULL) {
  1808. // Mark prev finally as needed to unwind a jump
  1809. prev_finally->unwind_label = e->label;
  1810. }
  1811. prev_finally = e;
  1812. }
  1813. }
  1814. if (prev_finally == NULL) {
  1815. // No finally, handle the jump ourselves
  1816. // First, restore the exception handler address for the jump
  1817. if (e < emit->exc_stack) {
  1818. ASM_XOR_REG_REG(emit->as, REG_RET, REG_RET);
  1819. } else {
  1820. ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
  1821. }
  1822. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
  1823. } else {
  1824. // Last finally should do our jump for us
  1825. // Mark finally as needing to decide the type of jump
  1826. prev_finally->unwind_label = UNWIND_LABEL_DO_FINAL_UNWIND;
  1827. ASM_MOV_REG_PCREL(emit->as, REG_RET, label & ~MP_EMIT_BREAK_FROM_FOR);
  1828. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_RET);
  1829. // Cancel any active exception (see also emit_native_pop_except_jump)
  1830. emit_native_mov_reg_const(emit, REG_RET, MP_F_CONST_NONE_OBJ);
  1831. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_RET);
  1832. // Jump to the innermost active finally
  1833. label = first_finally->label;
  1834. }
  1835. }
  1836. emit_native_jump(emit, label & ~MP_EMIT_BREAK_FROM_FOR);
  1837. }
  1838. STATIC void emit_native_setup_with(emit_t *emit, mp_uint_t label) {
  1839. // the context manager is on the top of the stack
  1840. // stack: (..., ctx_mgr)
  1841. // get __exit__ method
  1842. vtype_kind_t vtype;
  1843. emit_access_stack(emit, 1, &vtype, REG_ARG_1); // arg1 = ctx_mgr
  1844. assert(vtype == VTYPE_PYOBJ);
  1845. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
  1846. emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___exit__, REG_ARG_2);
  1847. // stack: (..., ctx_mgr, __exit__, self)
  1848. emit_pre_pop_reg(emit, &vtype, REG_ARG_3); // self
  1849. emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // __exit__
  1850. emit_pre_pop_reg(emit, &vtype, REG_ARG_1); // ctx_mgr
  1851. emit_post_push_reg(emit, vtype, REG_ARG_2); // __exit__
  1852. emit_post_push_reg(emit, vtype, REG_ARG_3); // self
  1853. // stack: (..., __exit__, self)
  1854. // REG_ARG_1=ctx_mgr
  1855. // get __enter__ method
  1856. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
  1857. emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___enter__, REG_ARG_2); // arg2 = method name
  1858. // stack: (..., __exit__, self, __enter__, self)
  1859. // call __enter__ method
  1860. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2); // pointer to items, including meth and self
  1861. emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 0, REG_ARG_1, 0, REG_ARG_2);
  1862. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // push return value of __enter__
  1863. // stack: (..., __exit__, self, as_value)
  1864. // need to commit stack because we may jump elsewhere
  1865. need_stack_settled(emit);
  1866. emit_native_push_exc_stack(emit, label, true);
  1867. emit_native_dup_top(emit);
  1868. // stack: (..., __exit__, self, as_value, as_value)
  1869. }
  1870. STATIC void emit_native_setup_block(emit_t *emit, mp_uint_t label, int kind) {
  1871. if (kind == MP_EMIT_SETUP_BLOCK_WITH) {
  1872. emit_native_setup_with(emit, label);
  1873. } else {
  1874. // Set up except and finally
  1875. emit_native_pre(emit);
  1876. need_stack_settled(emit);
  1877. emit_native_push_exc_stack(emit, label, kind == MP_EMIT_SETUP_BLOCK_FINALLY);
  1878. emit_post(emit);
  1879. }
  1880. }
  1881. STATIC void emit_native_with_cleanup(emit_t *emit, mp_uint_t label) {
  1882. // Note: 3 labels are reserved for this function, starting at *emit->label_slot
  1883. // stack: (..., __exit__, self, as_value)
  1884. emit_native_pre(emit);
  1885. emit_native_leave_exc_stack(emit, false);
  1886. adjust_stack(emit, -1);
  1887. // stack: (..., __exit__, self)
  1888. // Label for case where __exit__ is called from an unwind jump
  1889. emit_native_label_assign(emit, *emit->label_slot + 2);
  1890. // call __exit__
  1891. emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
  1892. emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
  1893. emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
  1894. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
  1895. emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
  1896. // Replace exc with None and finish
  1897. emit_native_jump(emit, *emit->label_slot);
  1898. // nlr_catch
  1899. // Don't use emit_native_label_assign because this isn't a real finally label
  1900. mp_asm_base_label_assign(&emit->as->base, label);
  1901. // Leave with's exception handler
  1902. emit_native_leave_exc_stack(emit, true);
  1903. // Adjust stack counter for: __exit__, self (implicitly discard as_value which is above self)
  1904. emit_native_adjust_stack_size(emit, 2);
  1905. // stack: (..., __exit__, self)
  1906. ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit)); // get exc
  1907. // Check if exc is None and jump to non-exc handler if it is
  1908. emit_native_mov_reg_const(emit, REG_ARG_2, MP_F_CONST_NONE_OBJ);
  1909. ASM_JUMP_IF_REG_EQ(emit->as, REG_ARG_1, REG_ARG_2, *emit->label_slot + 2);
  1910. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_1, 0); // get type(exc)
  1911. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_2); // push type(exc)
  1912. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_1); // push exc value
  1913. emit_post_push_imm(emit, VTYPE_PTR_NONE, 0); // traceback info
  1914. // Stack: (..., __exit__, self, type(exc), exc, traceback)
  1915. // call __exit__ method
  1916. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
  1917. emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
  1918. // Stack: (...)
  1919. // If REG_RET is true then we need to replace exception with None (swallow exception)
  1920. if (REG_ARG_1 != REG_RET) {
  1921. ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_RET);
  1922. }
  1923. emit_call(emit, MP_F_OBJ_IS_TRUE);
  1924. ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, *emit->label_slot + 1, true);
  1925. // Replace exception with None
  1926. emit_native_label_assign(emit, *emit->label_slot);
  1927. emit_native_mov_reg_const(emit, REG_TEMP0, MP_F_CONST_NONE_OBJ);
  1928. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
  1929. // end of with cleanup nlr_catch block
  1930. emit_native_label_assign(emit, *emit->label_slot + 1);
  1931. // Exception is in nlr_buf.ret_val slot
  1932. }
  1933. STATIC void emit_native_end_finally(emit_t *emit) {
  1934. // logic:
  1935. // exc = pop_stack
  1936. // if exc == None: pass
  1937. // else: raise exc
  1938. // the check if exc is None is done in the MP_F_NATIVE_RAISE stub
  1939. emit_native_pre(emit);
  1940. ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
  1941. emit_call(emit, MP_F_NATIVE_RAISE);
  1942. // Get state for this finally and see if we need to unwind
  1943. exc_stack_entry_t *e = emit_native_pop_exc_stack(emit);
  1944. if (e->unwind_label != UNWIND_LABEL_UNUSED) {
  1945. ASM_MOV_REG_LOCAL(emit->as, REG_RET, LOCAL_IDX_EXC_HANDLER_UNWIND(emit));
  1946. ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, *emit->label_slot, false);
  1947. if (e->unwind_label == UNWIND_LABEL_DO_FINAL_UNWIND) {
  1948. ASM_JUMP_REG(emit->as, REG_RET);
  1949. } else {
  1950. emit_native_jump(emit, e->unwind_label);
  1951. }
  1952. emit_native_label_assign(emit, *emit->label_slot);
  1953. }
  1954. emit_post(emit);
  1955. }
  1956. STATIC void emit_native_get_iter(emit_t *emit, bool use_stack) {
  1957. // perhaps the difficult one, as we want to rewrite for loops using native code
  1958. // in cases where we iterate over a Python object, can we use normal runtime calls?
  1959. vtype_kind_t vtype;
  1960. emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
  1961. assert(vtype == VTYPE_PYOBJ);
  1962. if (use_stack) {
  1963. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, MP_OBJ_ITER_BUF_NSLOTS);
  1964. emit_call(emit, MP_F_NATIVE_GETITER);
  1965. } else {
  1966. // mp_getiter will allocate the iter_buf on the heap
  1967. ASM_MOV_REG_IMM(emit->as, REG_ARG_2, 0);
  1968. emit_call(emit, MP_F_NATIVE_GETITER);
  1969. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1970. }
  1971. }
  1972. STATIC void emit_native_for_iter(emit_t *emit, mp_uint_t label) {
  1973. emit_native_pre(emit);
  1974. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, MP_OBJ_ITER_BUF_NSLOTS);
  1975. adjust_stack(emit, MP_OBJ_ITER_BUF_NSLOTS);
  1976. emit_call(emit, MP_F_NATIVE_ITERNEXT);
  1977. #if MICROPY_DEBUG_MP_OBJ_SENTINELS
  1978. ASM_MOV_REG_IMM(emit->as, REG_TEMP1, (mp_uint_t)MP_OBJ_STOP_ITERATION);
  1979. ASM_JUMP_IF_REG_EQ(emit->as, REG_RET, REG_TEMP1, label);
  1980. #else
  1981. MP_STATIC_ASSERT(MP_OBJ_STOP_ITERATION == 0);
  1982. ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label, false);
  1983. #endif
  1984. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1985. }
  1986. STATIC void emit_native_for_iter_end(emit_t *emit) {
  1987. // adjust stack counter (we get here from for_iter ending, which popped the value for us)
  1988. emit_native_pre(emit);
  1989. adjust_stack(emit, -MP_OBJ_ITER_BUF_NSLOTS);
  1990. emit_post(emit);
  1991. }
  1992. STATIC void emit_native_pop_except_jump(emit_t *emit, mp_uint_t label, bool within_exc_handler) {
  1993. if (within_exc_handler) {
  1994. // Cancel any active exception so subsequent handlers don't see it
  1995. emit_native_mov_reg_const(emit, REG_TEMP0, MP_F_CONST_NONE_OBJ);
  1996. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
  1997. } else {
  1998. emit_native_leave_exc_stack(emit, false);
  1999. }
  2000. emit_native_jump(emit, label);
  2001. }
  2002. STATIC void emit_native_unary_op(emit_t *emit, mp_unary_op_t op) {
  2003. vtype_kind_t vtype;
  2004. emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
  2005. if (vtype == VTYPE_PYOBJ) {
  2006. emit_call_with_imm_arg(emit, MP_F_UNARY_OP, op, REG_ARG_1);
  2007. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2008. } else {
  2009. adjust_stack(emit, 1);
  2010. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  2011. "unary op %q not implemented", mp_unary_op_method_name[op]);
  2012. }
  2013. }
  2014. STATIC void emit_native_binary_op(emit_t *emit, mp_binary_op_t op) {
  2015. DEBUG_printf("binary_op(" UINT_FMT ")\n", op);
  2016. vtype_kind_t vtype_lhs = peek_vtype(emit, 1);
  2017. vtype_kind_t vtype_rhs = peek_vtype(emit, 0);
  2018. if (vtype_lhs == VTYPE_INT && vtype_rhs == VTYPE_INT) {
  2019. // for integers, inplace and normal ops are equivalent, so use just normal ops
  2020. if (MP_BINARY_OP_INPLACE_OR <= op && op <= MP_BINARY_OP_INPLACE_POWER) {
  2021. op += MP_BINARY_OP_OR - MP_BINARY_OP_INPLACE_OR;
  2022. }
  2023. #if N_X64 || N_X86
  2024. // special cases for x86 and shifting
  2025. if (op == MP_BINARY_OP_LSHIFT || op == MP_BINARY_OP_RSHIFT) {
  2026. #if N_X64
  2027. emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X64_REG_RCX, &vtype_lhs, REG_RET);
  2028. #else
  2029. emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X86_REG_ECX, &vtype_lhs, REG_RET);
  2030. #endif
  2031. if (op == MP_BINARY_OP_LSHIFT) {
  2032. ASM_LSL_REG(emit->as, REG_RET);
  2033. } else {
  2034. ASM_ASR_REG(emit->as, REG_RET);
  2035. }
  2036. emit_post_push_reg(emit, VTYPE_INT, REG_RET);
  2037. return;
  2038. }
  2039. #endif
  2040. // special cases for floor-divide and module because we dispatch to helper functions
  2041. if (op == MP_BINARY_OP_FLOOR_DIVIDE || op == MP_BINARY_OP_MODULO) {
  2042. emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_2, &vtype_lhs, REG_ARG_1);
  2043. if (op == MP_BINARY_OP_FLOOR_DIVIDE) {
  2044. emit_call(emit, MP_F_SMALL_INT_FLOOR_DIVIDE);
  2045. } else {
  2046. emit_call(emit, MP_F_SMALL_INT_MODULO);
  2047. }
  2048. emit_post_push_reg(emit, VTYPE_INT, REG_RET);
  2049. return;
  2050. }
  2051. int reg_rhs = REG_ARG_3;
  2052. emit_pre_pop_reg_flexible(emit, &vtype_rhs, &reg_rhs, REG_RET, REG_ARG_2);
  2053. emit_pre_pop_reg(emit, &vtype_lhs, REG_ARG_2);
  2054. #if !(N_X64 || N_X86)
  2055. if (op == MP_BINARY_OP_LSHIFT) {
  2056. ASM_LSL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2057. emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
  2058. } else if (op == MP_BINARY_OP_RSHIFT) {
  2059. ASM_ASR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2060. emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
  2061. } else
  2062. #endif
  2063. if (op == MP_BINARY_OP_OR) {
  2064. ASM_OR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2065. emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
  2066. } else if (op == MP_BINARY_OP_XOR) {
  2067. ASM_XOR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2068. emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
  2069. } else if (op == MP_BINARY_OP_AND) {
  2070. ASM_AND_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2071. emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
  2072. } else if (op == MP_BINARY_OP_ADD) {
  2073. ASM_ADD_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2074. emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
  2075. } else if (op == MP_BINARY_OP_SUBTRACT) {
  2076. ASM_SUB_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2077. emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
  2078. } else if (op == MP_BINARY_OP_MULTIPLY) {
  2079. ASM_MUL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2080. emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
  2081. } else if (MP_BINARY_OP_LESS <= op && op <= MP_BINARY_OP_NOT_EQUAL) {
  2082. // comparison ops are (in enum order):
  2083. // MP_BINARY_OP_LESS
  2084. // MP_BINARY_OP_MORE
  2085. // MP_BINARY_OP_EQUAL
  2086. // MP_BINARY_OP_LESS_EQUAL
  2087. // MP_BINARY_OP_MORE_EQUAL
  2088. // MP_BINARY_OP_NOT_EQUAL
  2089. need_reg_single(emit, REG_RET, 0);
  2090. #if N_X64
  2091. asm_x64_xor_r64_r64(emit->as, REG_RET, REG_RET);
  2092. asm_x64_cmp_r64_with_r64(emit->as, reg_rhs, REG_ARG_2);
  2093. static byte ops[6] = {
  2094. ASM_X64_CC_JL,
  2095. ASM_X64_CC_JG,
  2096. ASM_X64_CC_JE,
  2097. ASM_X64_CC_JLE,
  2098. ASM_X64_CC_JGE,
  2099. ASM_X64_CC_JNE,
  2100. };
  2101. asm_x64_setcc_r8(emit->as, ops[op - MP_BINARY_OP_LESS], REG_RET);
  2102. #elif N_X86
  2103. asm_x86_xor_r32_r32(emit->as, REG_RET, REG_RET);
  2104. asm_x86_cmp_r32_with_r32(emit->as, reg_rhs, REG_ARG_2);
  2105. static byte ops[6] = {
  2106. ASM_X86_CC_JL,
  2107. ASM_X86_CC_JG,
  2108. ASM_X86_CC_JE,
  2109. ASM_X86_CC_JLE,
  2110. ASM_X86_CC_JGE,
  2111. ASM_X86_CC_JNE,
  2112. };
  2113. asm_x86_setcc_r8(emit->as, ops[op - MP_BINARY_OP_LESS], REG_RET);
  2114. #elif N_THUMB
  2115. asm_thumb_cmp_rlo_rlo(emit->as, REG_ARG_2, reg_rhs);
  2116. static uint16_t ops[6] = {
  2117. ASM_THUMB_OP_ITE_GE,
  2118. ASM_THUMB_OP_ITE_GT,
  2119. ASM_THUMB_OP_ITE_EQ,
  2120. ASM_THUMB_OP_ITE_GT,
  2121. ASM_THUMB_OP_ITE_GE,
  2122. ASM_THUMB_OP_ITE_EQ,
  2123. };
  2124. static byte ret[6] = { 0, 1, 1, 0, 1, 0, };
  2125. asm_thumb_op16(emit->as, ops[op - MP_BINARY_OP_LESS]);
  2126. asm_thumb_mov_rlo_i8(emit->as, REG_RET, ret[op - MP_BINARY_OP_LESS]);
  2127. asm_thumb_mov_rlo_i8(emit->as, REG_RET, ret[op - MP_BINARY_OP_LESS] ^ 1);
  2128. #elif N_ARM
  2129. asm_arm_cmp_reg_reg(emit->as, REG_ARG_2, reg_rhs);
  2130. static uint ccs[6] = {
  2131. ASM_ARM_CC_LT,
  2132. ASM_ARM_CC_GT,
  2133. ASM_ARM_CC_EQ,
  2134. ASM_ARM_CC_LE,
  2135. ASM_ARM_CC_GE,
  2136. ASM_ARM_CC_NE,
  2137. };
  2138. asm_arm_setcc_reg(emit->as, REG_RET, ccs[op - MP_BINARY_OP_LESS]);
  2139. #elif N_XTENSA || N_XTENSAWIN
  2140. static uint8_t ccs[6] = {
  2141. ASM_XTENSA_CC_LT,
  2142. 0x80 | ASM_XTENSA_CC_LT, // for GT we'll swap args
  2143. ASM_XTENSA_CC_EQ,
  2144. 0x80 | ASM_XTENSA_CC_GE, // for LE we'll swap args
  2145. ASM_XTENSA_CC_GE,
  2146. ASM_XTENSA_CC_NE,
  2147. };
  2148. uint8_t cc = ccs[op - MP_BINARY_OP_LESS];
  2149. if ((cc & 0x80) == 0) {
  2150. asm_xtensa_setcc_reg_reg_reg(emit->as, cc, REG_RET, REG_ARG_2, reg_rhs);
  2151. } else {
  2152. asm_xtensa_setcc_reg_reg_reg(emit->as, cc & ~0x80, REG_RET, reg_rhs, REG_ARG_2);
  2153. }
  2154. #else
  2155. #error not implemented
  2156. #endif
  2157. emit_post_push_reg(emit, VTYPE_BOOL, REG_RET);
  2158. } else {
  2159. // TODO other ops not yet implemented
  2160. adjust_stack(emit, 1);
  2161. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  2162. "binary op %q not implemented", mp_binary_op_method_name[op]);
  2163. }
  2164. } else if (vtype_lhs == VTYPE_PYOBJ && vtype_rhs == VTYPE_PYOBJ) {
  2165. emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_3, &vtype_lhs, REG_ARG_2);
  2166. bool invert = false;
  2167. if (op == MP_BINARY_OP_NOT_IN) {
  2168. invert = true;
  2169. op = MP_BINARY_OP_IN;
  2170. } else if (op == MP_BINARY_OP_IS_NOT) {
  2171. invert = true;
  2172. op = MP_BINARY_OP_IS;
  2173. }
  2174. emit_call_with_imm_arg(emit, MP_F_BINARY_OP, op, REG_ARG_1);
  2175. if (invert) {
  2176. ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
  2177. emit_call_with_imm_arg(emit, MP_F_UNARY_OP, MP_UNARY_OP_NOT, REG_ARG_1);
  2178. }
  2179. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2180. } else {
  2181. adjust_stack(emit, -1);
  2182. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  2183. "can't do binary op between '%q' and '%q'",
  2184. vtype_to_qstr(vtype_lhs), vtype_to_qstr(vtype_rhs));
  2185. }
  2186. }
  2187. #if MICROPY_PY_BUILTINS_SLICE
  2188. STATIC void emit_native_build_slice(emit_t *emit, mp_uint_t n_args);
  2189. #endif
  2190. STATIC void emit_native_build(emit_t *emit, mp_uint_t n_args, int kind) {
  2191. // for viper: call runtime, with types of args
  2192. // if wrapped in byte_array, or something, allocates memory and fills it
  2193. MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_TUPLE == MP_F_BUILD_TUPLE);
  2194. MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_LIST == MP_F_BUILD_LIST);
  2195. MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_MAP == MP_F_BUILD_MAP);
  2196. MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_SET == MP_F_BUILD_SET);
  2197. #if MICROPY_PY_BUILTINS_SLICE
  2198. if (kind == MP_EMIT_BUILD_SLICE) {
  2199. emit_native_build_slice(emit, n_args);
  2200. return;
  2201. }
  2202. #endif
  2203. emit_native_pre(emit);
  2204. if (kind == MP_EMIT_BUILD_TUPLE || kind == MP_EMIT_BUILD_LIST || kind == MP_EMIT_BUILD_SET) {
  2205. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args); // pointer to items
  2206. }
  2207. emit_call_with_imm_arg(emit, MP_F_BUILD_TUPLE + kind, n_args, REG_ARG_1);
  2208. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new tuple/list/map/set
  2209. }
  2210. STATIC void emit_native_store_map(emit_t *emit) {
  2211. vtype_kind_t vtype_key, vtype_value, vtype_map;
  2212. emit_pre_pop_reg_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3, &vtype_map, REG_ARG_1); // key, value, map
  2213. assert(vtype_key == VTYPE_PYOBJ);
  2214. assert(vtype_value == VTYPE_PYOBJ);
  2215. assert(vtype_map == VTYPE_PYOBJ);
  2216. emit_call(emit, MP_F_STORE_MAP);
  2217. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // map
  2218. }
  2219. #if MICROPY_PY_BUILTINS_SLICE
  2220. STATIC void emit_native_build_slice(emit_t *emit, mp_uint_t n_args) {
  2221. DEBUG_printf("build_slice %d\n", n_args);
  2222. if (n_args == 2) {
  2223. vtype_kind_t vtype_start, vtype_stop;
  2224. emit_pre_pop_reg_reg(emit, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop
  2225. assert(vtype_start == VTYPE_PYOBJ);
  2226. assert(vtype_stop == VTYPE_PYOBJ);
  2227. emit_native_mov_reg_const(emit, REG_ARG_3, MP_F_CONST_NONE_OBJ); // arg3 = step
  2228. } else {
  2229. assert(n_args == 3);
  2230. vtype_kind_t vtype_start, vtype_stop, vtype_step;
  2231. emit_pre_pop_reg_reg_reg(emit, &vtype_step, REG_ARG_3, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop, arg3 = step
  2232. assert(vtype_start == VTYPE_PYOBJ);
  2233. assert(vtype_stop == VTYPE_PYOBJ);
  2234. assert(vtype_step == VTYPE_PYOBJ);
  2235. }
  2236. emit_call(emit, MP_F_NEW_SLICE);
  2237. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2238. }
  2239. #endif
  2240. STATIC void emit_native_store_comp(emit_t *emit, scope_kind_t kind, mp_uint_t collection_index) {
  2241. mp_fun_kind_t f;
  2242. if (kind == SCOPE_LIST_COMP) {
  2243. vtype_kind_t vtype_item;
  2244. emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
  2245. assert(vtype_item == VTYPE_PYOBJ);
  2246. f = MP_F_LIST_APPEND;
  2247. #if MICROPY_PY_BUILTINS_SET
  2248. } else if (kind == SCOPE_SET_COMP) {
  2249. vtype_kind_t vtype_item;
  2250. emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
  2251. assert(vtype_item == VTYPE_PYOBJ);
  2252. f = MP_F_STORE_SET;
  2253. #endif
  2254. } else {
  2255. // SCOPE_DICT_COMP
  2256. vtype_kind_t vtype_key, vtype_value;
  2257. emit_pre_pop_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3);
  2258. assert(vtype_key == VTYPE_PYOBJ);
  2259. assert(vtype_value == VTYPE_PYOBJ);
  2260. f = MP_F_STORE_MAP;
  2261. }
  2262. vtype_kind_t vtype_collection;
  2263. emit_access_stack(emit, collection_index, &vtype_collection, REG_ARG_1);
  2264. assert(vtype_collection == VTYPE_PYOBJ);
  2265. emit_call(emit, f);
  2266. emit_post(emit);
  2267. }
  2268. STATIC void emit_native_unpack_sequence(emit_t *emit, mp_uint_t n_args) {
  2269. DEBUG_printf("unpack_sequence %d\n", n_args);
  2270. vtype_kind_t vtype_base;
  2271. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
  2272. assert(vtype_base == VTYPE_PYOBJ);
  2273. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_args); // arg3 = dest ptr
  2274. emit_call_with_imm_arg(emit, MP_F_UNPACK_SEQUENCE, n_args, REG_ARG_2); // arg2 = n_args
  2275. }
  2276. STATIC void emit_native_unpack_ex(emit_t *emit, mp_uint_t n_left, mp_uint_t n_right) {
  2277. DEBUG_printf("unpack_ex %d %d\n", n_left, n_right);
  2278. vtype_kind_t vtype_base;
  2279. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
  2280. assert(vtype_base == VTYPE_PYOBJ);
  2281. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_left + n_right + 1); // arg3 = dest ptr
  2282. emit_call_with_imm_arg(emit, MP_F_UNPACK_EX, n_left | (n_right << 8), REG_ARG_2); // arg2 = n_left + n_right
  2283. }
  2284. STATIC void emit_native_make_function(emit_t *emit, scope_t *scope, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
  2285. // call runtime, with type info for args, or don't support dict/default params, or only support Python objects for them
  2286. emit_native_pre(emit);
  2287. if (n_pos_defaults == 0 && n_kw_defaults == 0) {
  2288. need_reg_all(emit);
  2289. ASM_MOV_REG_IMM(emit->as, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL);
  2290. ASM_MOV_REG_IMM(emit->as, REG_ARG_3, (mp_uint_t)MP_OBJ_NULL);
  2291. } else {
  2292. vtype_kind_t vtype_def_tuple, vtype_def_dict;
  2293. emit_pre_pop_reg_reg(emit, &vtype_def_dict, REG_ARG_3, &vtype_def_tuple, REG_ARG_2);
  2294. assert(vtype_def_tuple == VTYPE_PYOBJ);
  2295. assert(vtype_def_dict == VTYPE_PYOBJ);
  2296. need_reg_all(emit);
  2297. }
  2298. emit_load_reg_with_raw_code(emit, REG_ARG_1, scope->raw_code);
  2299. ASM_CALL_IND(emit->as, MP_F_MAKE_FUNCTION_FROM_RAW_CODE);
  2300. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2301. }
  2302. STATIC void emit_native_make_closure(emit_t *emit, scope_t *scope, mp_uint_t n_closed_over, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
  2303. emit_native_pre(emit);
  2304. if (n_pos_defaults == 0 && n_kw_defaults == 0) {
  2305. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_closed_over);
  2306. ASM_MOV_REG_IMM(emit->as, REG_ARG_2, n_closed_over);
  2307. } else {
  2308. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_closed_over + 2);
  2309. ASM_MOV_REG_IMM(emit->as, REG_ARG_2, 0x100 | n_closed_over);
  2310. }
  2311. emit_load_reg_with_raw_code(emit, REG_ARG_1, scope->raw_code);
  2312. ASM_CALL_IND(emit->as, MP_F_MAKE_CLOSURE_FROM_RAW_CODE);
  2313. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2314. }
  2315. STATIC void emit_native_call_function(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
  2316. DEBUG_printf("call_function(n_pos=" UINT_FMT ", n_kw=" UINT_FMT ", star_flags=" UINT_FMT ")\n", n_positional, n_keyword, star_flags);
  2317. // TODO: in viper mode, call special runtime routine with type info for args,
  2318. // and wanted type info for return, to remove need for boxing/unboxing
  2319. emit_native_pre(emit);
  2320. vtype_kind_t vtype_fun = peek_vtype(emit, n_positional + 2 * n_keyword);
  2321. if (vtype_fun == VTYPE_BUILTIN_CAST) {
  2322. // casting operator
  2323. assert(n_positional == 1 && n_keyword == 0);
  2324. assert(!star_flags);
  2325. DEBUG_printf(" cast to %d\n", vtype_fun);
  2326. vtype_kind_t vtype_cast = peek_stack(emit, 1)->data.u_imm;
  2327. switch (peek_vtype(emit, 0)) {
  2328. case VTYPE_PYOBJ: {
  2329. vtype_kind_t vtype;
  2330. emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
  2331. emit_pre_pop_discard(emit);
  2332. emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, vtype_cast, REG_ARG_2); // arg2 = type
  2333. emit_post_push_reg(emit, vtype_cast, REG_RET);
  2334. break;
  2335. }
  2336. case VTYPE_BOOL:
  2337. case VTYPE_INT:
  2338. case VTYPE_UINT:
  2339. case VTYPE_PTR:
  2340. case VTYPE_PTR8:
  2341. case VTYPE_PTR16:
  2342. case VTYPE_PTR32:
  2343. case VTYPE_PTR_NONE:
  2344. emit_fold_stack_top(emit, REG_ARG_1);
  2345. emit_post_top_set_vtype(emit, vtype_cast);
  2346. break;
  2347. default:
  2348. // this can happen when casting a cast: int(int)
  2349. mp_raise_NotImplementedError("casting");
  2350. }
  2351. } else {
  2352. assert(vtype_fun == VTYPE_PYOBJ);
  2353. if (star_flags) {
  2354. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 3); // pointer to args
  2355. emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 0, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
  2356. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2357. } else {
  2358. if (n_positional != 0 || n_keyword != 0) {
  2359. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword); // pointer to args
  2360. }
  2361. emit_pre_pop_reg(emit, &vtype_fun, REG_ARG_1); // the function
  2362. emit_call_with_imm_arg(emit, MP_F_NATIVE_CALL_FUNCTION_N_KW, n_positional | (n_keyword << 8), REG_ARG_2);
  2363. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2364. }
  2365. }
  2366. }
  2367. STATIC void emit_native_call_method(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
  2368. if (star_flags) {
  2369. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 4); // pointer to args
  2370. emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 1, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
  2371. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2372. } else {
  2373. emit_native_pre(emit);
  2374. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2 + n_positional + 2 * n_keyword); // pointer to items, including meth and self
  2375. emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, n_positional, REG_ARG_1, n_keyword, REG_ARG_2);
  2376. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2377. }
  2378. }
  2379. STATIC void emit_native_return_value(emit_t *emit) {
  2380. DEBUG_printf("return_value\n");
  2381. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  2382. // Save pointer to current stack position for caller to access return value
  2383. emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
  2384. emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0);
  2385. // Put return type in return value slot
  2386. ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_NORMAL);
  2387. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
  2388. // Do the unwinding jump to get to the return handler
  2389. emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size);
  2390. emit->last_emit_was_return_value = true;
  2391. return;
  2392. }
  2393. if (emit->do_viper_types) {
  2394. vtype_kind_t return_vtype = emit->scope->scope_flags >> MP_SCOPE_FLAG_VIPERRET_POS;
  2395. if (peek_vtype(emit, 0) == VTYPE_PTR_NONE) {
  2396. emit_pre_pop_discard(emit);
  2397. if (return_vtype == VTYPE_PYOBJ) {
  2398. emit_native_mov_reg_const(emit, REG_PARENT_RET, MP_F_CONST_NONE_OBJ);
  2399. } else {
  2400. ASM_MOV_REG_IMM(emit->as, REG_ARG_1, 0);
  2401. }
  2402. } else {
  2403. vtype_kind_t vtype;
  2404. emit_pre_pop_reg(emit, &vtype, return_vtype == VTYPE_PYOBJ ? REG_PARENT_RET : REG_ARG_1);
  2405. if (vtype != return_vtype) {
  2406. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  2407. "return expected '%q' but got '%q'",
  2408. vtype_to_qstr(return_vtype), vtype_to_qstr(vtype));
  2409. }
  2410. }
  2411. if (return_vtype != VTYPE_PYOBJ) {
  2412. emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, return_vtype, REG_ARG_2);
  2413. #if REG_RET != REG_PARENT_RET
  2414. ASM_MOV_REG_REG(emit->as, REG_PARENT_RET, REG_RET);
  2415. #endif
  2416. }
  2417. } else {
  2418. vtype_kind_t vtype;
  2419. emit_pre_pop_reg(emit, &vtype, REG_PARENT_RET);
  2420. assert(vtype == VTYPE_PYOBJ);
  2421. }
  2422. if (NEED_GLOBAL_EXC_HANDLER(emit)) {
  2423. // Save return value for the global exception handler to use
  2424. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_PARENT_RET);
  2425. }
  2426. emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size);
  2427. emit->last_emit_was_return_value = true;
  2428. }
  2429. STATIC void emit_native_raise_varargs(emit_t *emit, mp_uint_t n_args) {
  2430. (void)n_args;
  2431. assert(n_args == 1);
  2432. vtype_kind_t vtype_exc;
  2433. emit_pre_pop_reg(emit, &vtype_exc, REG_ARG_1); // arg1 = object to raise
  2434. if (vtype_exc != VTYPE_PYOBJ) {
  2435. EMIT_NATIVE_VIPER_TYPE_ERROR(emit, "must raise an object");
  2436. }
  2437. // TODO probably make this 1 call to the runtime (which could even call convert, native_raise(obj, type))
  2438. emit_call(emit, MP_F_NATIVE_RAISE);
  2439. }
  2440. STATIC void emit_native_yield(emit_t *emit, int kind) {
  2441. // Note: 1 (yield) or 3 (yield from) labels are reserved for this function, starting at *emit->label_slot
  2442. if (emit->do_viper_types) {
  2443. mp_raise_NotImplementedError("native yield");
  2444. }
  2445. emit->scope->scope_flags |= MP_SCOPE_FLAG_GENERATOR;
  2446. need_stack_settled(emit);
  2447. if (kind == MP_EMIT_YIELD_FROM) {
  2448. // Top of yield-from loop, conceptually implementing:
  2449. // for item in generator:
  2450. // yield item
  2451. // Jump to start of loop
  2452. emit_native_jump(emit, *emit->label_slot + 2);
  2453. // Label for top of loop
  2454. emit_native_label_assign(emit, *emit->label_slot + 1);
  2455. }
  2456. // Save pointer to current stack position for caller to access yielded value
  2457. emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
  2458. emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0);
  2459. // Put return type in return value slot
  2460. ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_YIELD);
  2461. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
  2462. // Save re-entry PC
  2463. ASM_MOV_REG_PCREL(emit->as, REG_TEMP0, *emit->label_slot);
  2464. emit_native_mov_state_reg(emit, LOCAL_IDX_GEN_PC(emit), REG_TEMP0);
  2465. // Jump to exit handler
  2466. ASM_JUMP(emit->as, emit->exit_label);
  2467. // Label re-entry point
  2468. mp_asm_base_label_assign(&emit->as->base, *emit->label_slot);
  2469. // Re-open any active exception handler
  2470. if (emit->exc_stack_size > 0) {
  2471. // Find innermost active exception handler, to restore as current handler
  2472. exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
  2473. for (; e >= emit->exc_stack; --e) {
  2474. if (e->is_active) {
  2475. // Found active handler, get its PC
  2476. ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
  2477. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
  2478. }
  2479. }
  2480. }
  2481. emit_native_adjust_stack_size(emit, 1); // send_value
  2482. if (kind == MP_EMIT_YIELD_VALUE) {
  2483. // Check LOCAL_IDX_EXC_VAL for any injected value
  2484. ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
  2485. emit_call(emit, MP_F_NATIVE_RAISE);
  2486. } else {
  2487. // Label loop entry
  2488. emit_native_label_assign(emit, *emit->label_slot + 2);
  2489. // Get the next item from the delegate generator
  2490. vtype_kind_t vtype;
  2491. emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // send_value
  2492. emit_access_stack(emit, 1, &vtype, REG_ARG_1); // generator
  2493. ASM_MOV_REG_LOCAL(emit->as, REG_ARG_3, LOCAL_IDX_EXC_VAL(emit)); // throw_value
  2494. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_3);
  2495. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 1); // ret_value
  2496. emit_call(emit, MP_F_NATIVE_YIELD_FROM);
  2497. // If returned non-zero then generator continues
  2498. ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, *emit->label_slot + 1, true);
  2499. // Pop exhausted gen, replace with ret_value
  2500. emit_native_adjust_stack_size(emit, 1); // ret_value
  2501. emit_fold_stack_top(emit, REG_ARG_1);
  2502. }
  2503. }
  2504. STATIC void emit_native_start_except_handler(emit_t *emit) {
  2505. // Protected block has finished so leave the current exception handler
  2506. emit_native_leave_exc_stack(emit, true);
  2507. // Get and push nlr_buf.ret_val
  2508. ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit));
  2509. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
  2510. }
  2511. STATIC void emit_native_end_except_handler(emit_t *emit) {
  2512. adjust_stack(emit, -1); // pop the exception (end_finally didn't use it)
  2513. }
  2514. const emit_method_table_t EXPORT_FUN(method_table) = {
  2515. #if MICROPY_DYNAMIC_COMPILER
  2516. EXPORT_FUN(new),
  2517. EXPORT_FUN(free),
  2518. #endif
  2519. emit_native_start_pass,
  2520. emit_native_end_pass,
  2521. emit_native_last_emit_was_return_value,
  2522. emit_native_adjust_stack_size,
  2523. emit_native_set_source_line,
  2524. {
  2525. emit_native_load_local,
  2526. emit_native_load_global,
  2527. },
  2528. {
  2529. emit_native_store_local,
  2530. emit_native_store_global,
  2531. },
  2532. {
  2533. emit_native_delete_local,
  2534. emit_native_delete_global,
  2535. },
  2536. emit_native_label_assign,
  2537. emit_native_import,
  2538. emit_native_load_const_tok,
  2539. emit_native_load_const_small_int,
  2540. emit_native_load_const_str,
  2541. emit_native_load_const_obj,
  2542. emit_native_load_null,
  2543. emit_native_load_method,
  2544. emit_native_load_build_class,
  2545. emit_native_subscr,
  2546. emit_native_attr,
  2547. emit_native_dup_top,
  2548. emit_native_dup_top_two,
  2549. emit_native_pop_top,
  2550. emit_native_rot_two,
  2551. emit_native_rot_three,
  2552. emit_native_jump,
  2553. emit_native_pop_jump_if,
  2554. emit_native_jump_if_or_pop,
  2555. emit_native_unwind_jump,
  2556. emit_native_setup_block,
  2557. emit_native_with_cleanup,
  2558. emit_native_end_finally,
  2559. emit_native_get_iter,
  2560. emit_native_for_iter,
  2561. emit_native_for_iter_end,
  2562. emit_native_pop_except_jump,
  2563. emit_native_unary_op,
  2564. emit_native_binary_op,
  2565. emit_native_build,
  2566. emit_native_store_map,
  2567. emit_native_store_comp,
  2568. emit_native_unpack_sequence,
  2569. emit_native_unpack_ex,
  2570. emit_native_make_function,
  2571. emit_native_make_closure,
  2572. emit_native_call_function,
  2573. emit_native_call_method,
  2574. emit_native_return_value,
  2575. emit_native_raise_varargs,
  2576. emit_native_yield,
  2577. emit_native_start_except_handler,
  2578. emit_native_end_except_handler,
  2579. };
  2580. #endif