emitnative.c 124 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993
  1. /*
  2. * This file is part of the MicroPython project, http://micropython.org/
  3. *
  4. * The MIT License (MIT)
  5. *
  6. * Copyright (c) 2013, 2014 Damien P. George
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a copy
  9. * of this software and associated documentation files (the "Software"), to deal
  10. * in the Software without restriction, including without limitation the rights
  11. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  12. * copies of the Software, and to permit persons to whom the Software is
  13. * furnished to do so, subject to the following conditions:
  14. *
  15. * The above copyright notice and this permission notice shall be included in
  16. * all copies or substantial portions of the Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  19. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  20. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  21. * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  22. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  23. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  24. * THE SOFTWARE.
  25. */
  26. // Essentially normal Python has 1 type: Python objects
  27. // Viper has more than 1 type, and is just a more complicated (a superset of) Python.
  28. // If you declare everything in Viper as a Python object (ie omit type decls) then
  29. // it should in principle be exactly the same as Python native.
  30. // Having types means having more opcodes, like binary_op_nat_nat, binary_op_nat_obj etc.
  31. // In practice we won't have a VM but rather do this in asm which is actually very minimal.
  32. // Because it breaks strict Python equivalence it should be a completely separate
  33. // decorator. It breaks equivalence because overflow on integers wraps around.
  34. // It shouldn't break equivalence if you don't use the new types, but since the
  35. // type decls might be used in normal Python for other reasons, it's probably safest,
  36. // cleanest and clearest to make it a separate decorator.
  37. // Actually, it does break equivalence because integers default to native integers,
  38. // not Python objects.
  39. // for x in l[0:8]: can be compiled into a native loop if l has pointer type
  40. #include <stdio.h>
  41. #include <string.h>
  42. #include <assert.h>
  43. #include "py/emit.h"
  44. #include "py/nativeglue.h"
  45. #include "py/objfun.h"
  46. #include "py/objstr.h"
  47. #if MICROPY_DEBUG_VERBOSE // print debugging info
  48. #define DEBUG_PRINT (1)
  49. #define DEBUG_printf DEBUG_printf
  50. #else // don't print debugging info
  51. #define DEBUG_printf(...) (void)0
  52. #endif
  53. // wrapper around everything in this file
  54. #if N_X64 || N_X86 || N_THUMB || N_ARM || N_XTENSA || N_XTENSAWIN
  55. // C stack layout for native functions:
  56. // 0: nlr_buf_t [optional]
  57. // return_value [optional word]
  58. // exc_handler_unwind [optional word]
  59. // emit->code_state_start: mp_code_state_native_t
  60. // emit->stack_start: Python object stack | emit->n_state
  61. // locals (reversed, L0 at end) |
  62. //
  63. // C stack layout for native generator functions:
  64. // 0=emit->stack_start: nlr_buf_t
  65. // return_value
  66. // exc_handler_unwind [optional word]
  67. //
  68. // Then REG_GENERATOR_STATE points to:
  69. // 0=emit->code_state_start: mp_code_state_native_t
  70. // emit->stack_start: Python object stack | emit->n_state
  71. // locals (reversed, L0 at end) |
  72. //
  73. // C stack layout for viper functions:
  74. // 0: nlr_buf_t [optional]
  75. // return_value [optional word]
  76. // exc_handler_unwind [optional word]
  77. // emit->code_state_start: fun_obj, old_globals [optional]
  78. // emit->stack_start: Python object stack | emit->n_state
  79. // locals (reversed, L0 at end) |
  80. // (L0-L2 may be in regs instead)
  81. // Native emitter needs to know the following sizes and offsets of C structs (on the target):
  82. #if MICROPY_DYNAMIC_COMPILER
  83. #define SIZEOF_NLR_BUF (2 + mp_dynamic_compiler.nlr_buf_num_regs + 1) // the +1 is conservative in case MICROPY_ENABLE_PYSTACK enabled
  84. #else
  85. #define SIZEOF_NLR_BUF (sizeof(nlr_buf_t) / sizeof(uintptr_t))
  86. #endif
  87. #define SIZEOF_CODE_STATE (sizeof(mp_code_state_native_t) / sizeof(uintptr_t))
  88. #define OFFSETOF_CODE_STATE_STATE (offsetof(mp_code_state_native_t, state) / sizeof(uintptr_t))
  89. #define OFFSETOF_CODE_STATE_FUN_BC (offsetof(mp_code_state_native_t, fun_bc) / sizeof(uintptr_t))
  90. #define OFFSETOF_CODE_STATE_IP (offsetof(mp_code_state_native_t, ip) / sizeof(uintptr_t))
  91. #define OFFSETOF_CODE_STATE_SP (offsetof(mp_code_state_native_t, sp) / sizeof(uintptr_t))
  92. #define OFFSETOF_CODE_STATE_N_STATE (offsetof(mp_code_state_native_t, n_state) / sizeof(uintptr_t))
  93. #define OFFSETOF_OBJ_FUN_BC_CONTEXT (offsetof(mp_obj_fun_bc_t, context) / sizeof(uintptr_t))
  94. #define OFFSETOF_OBJ_FUN_BC_CHILD_TABLE (offsetof(mp_obj_fun_bc_t, child_table) / sizeof(uintptr_t))
  95. #define OFFSETOF_OBJ_FUN_BC_BYTECODE (offsetof(mp_obj_fun_bc_t, bytecode) / sizeof(uintptr_t))
  96. #define OFFSETOF_MODULE_CONTEXT_QSTR_TABLE (offsetof(mp_module_context_t, constants.qstr_table) / sizeof(uintptr_t))
  97. #define OFFSETOF_MODULE_CONTEXT_OBJ_TABLE (offsetof(mp_module_context_t, constants.obj_table) / sizeof(uintptr_t))
  98. #define OFFSETOF_MODULE_CONTEXT_GLOBALS (offsetof(mp_module_context_t, module.globals) / sizeof(uintptr_t))
  99. // If not already defined, set parent args to same as child call registers
  100. #ifndef REG_PARENT_RET
  101. #define REG_PARENT_RET REG_RET
  102. #define REG_PARENT_ARG_1 REG_ARG_1
  103. #define REG_PARENT_ARG_2 REG_ARG_2
  104. #define REG_PARENT_ARG_3 REG_ARG_3
  105. #define REG_PARENT_ARG_4 REG_ARG_4
  106. #endif
  107. // Word index of nlr_buf_t.ret_val
  108. #define NLR_BUF_IDX_RET_VAL (1)
  109. // Whether the viper function needs access to fun_obj
  110. #define NEED_FUN_OBJ(emit) ((emit)->scope->exc_stack_size > 0 \
  111. || ((emit)->scope->scope_flags & (MP_SCOPE_FLAG_REFGLOBALS | MP_SCOPE_FLAG_HASCONSTS)))
  112. // Whether the native/viper function needs to be wrapped in an exception handler
  113. #define NEED_GLOBAL_EXC_HANDLER(emit) ((emit)->scope->exc_stack_size > 0 \
  114. || ((emit)->scope->scope_flags & (MP_SCOPE_FLAG_GENERATOR | MP_SCOPE_FLAG_REFGLOBALS)))
  115. // Whether a slot is needed to store LOCAL_IDX_EXC_HANDLER_UNWIND
  116. #define NEED_EXC_HANDLER_UNWIND(emit) ((emit)->scope->exc_stack_size > 0)
  117. // Whether registers can be used to store locals (only true if there are no
  118. // exception handlers, because otherwise an nlr_jump will restore registers to
  119. // their state at the start of the function and updates to locals will be lost)
  120. #define CAN_USE_REGS_FOR_LOCALS(emit) ((emit)->scope->exc_stack_size == 0 && !(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR))
  121. // Indices within the local C stack for various variables
  122. #define LOCAL_IDX_EXC_VAL(emit) (NLR_BUF_IDX_RET_VAL)
  123. #define LOCAL_IDX_EXC_HANDLER_PC(emit) (NLR_BUF_IDX_LOCAL_1)
  124. #define LOCAL_IDX_EXC_HANDLER_UNWIND(emit) (SIZEOF_NLR_BUF + 1) // this needs a dedicated variable outside nlr_buf_t
  125. #define LOCAL_IDX_RET_VAL(emit) (SIZEOF_NLR_BUF) // needed when NEED_GLOBAL_EXC_HANDLER is true
  126. #define LOCAL_IDX_FUN_OBJ(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_FUN_BC)
  127. #define LOCAL_IDX_OLD_GLOBALS(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
  128. #define LOCAL_IDX_GEN_PC(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
  129. #define LOCAL_IDX_LOCAL_VAR(emit, local_num) ((emit)->stack_start + (emit)->n_state - 1 - (local_num))
  130. #if MICROPY_PERSISTENT_CODE_SAVE
  131. // When building with the ability to save native code to .mpy files:
  132. // - Qstrs are indirect via qstr_table, and REG_LOCAL_3 always points to qstr_table.
  133. // - In a generator no registers are used to store locals, and REG_LOCAL_2 points to the generator state.
  134. // - At most 2 registers hold local variables (see CAN_USE_REGS_FOR_LOCALS for when this is possible).
  135. #define REG_GENERATOR_STATE (REG_LOCAL_2)
  136. #define REG_QSTR_TABLE (REG_LOCAL_3)
  137. #define MAX_REGS_FOR_LOCAL_VARS (2)
  138. STATIC const uint8_t reg_local_table[MAX_REGS_FOR_LOCAL_VARS] = {REG_LOCAL_1, REG_LOCAL_2};
  139. #else
  140. // When building without the ability to save native code to .mpy files:
  141. // - Qstrs values are written directly into the machine code.
  142. // - In a generator no registers are used to store locals, and REG_LOCAL_3 points to the generator state.
  143. // - At most 3 registers hold local variables (see CAN_USE_REGS_FOR_LOCALS for when this is possible).
  144. #define REG_GENERATOR_STATE (REG_LOCAL_3)
  145. #define MAX_REGS_FOR_LOCAL_VARS (3)
  146. STATIC const uint8_t reg_local_table[MAX_REGS_FOR_LOCAL_VARS] = {REG_LOCAL_1, REG_LOCAL_2, REG_LOCAL_3};
  147. #endif
  148. #define REG_LOCAL_LAST (reg_local_table[MAX_REGS_FOR_LOCAL_VARS - 1])
  149. #define EMIT_NATIVE_VIPER_TYPE_ERROR(emit, ...) do { \
  150. *emit->error_slot = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, __VA_ARGS__); \
  151. } while (0)
  152. typedef enum {
  153. STACK_VALUE,
  154. STACK_REG,
  155. STACK_IMM,
  156. } stack_info_kind_t;
  157. // these enums must be distinct and the bottom 4 bits
  158. // must correspond to the correct MP_NATIVE_TYPE_xxx value
  159. typedef enum {
  160. VTYPE_PYOBJ = 0x00 | MP_NATIVE_TYPE_OBJ,
  161. VTYPE_BOOL = 0x00 | MP_NATIVE_TYPE_BOOL,
  162. VTYPE_INT = 0x00 | MP_NATIVE_TYPE_INT,
  163. VTYPE_UINT = 0x00 | MP_NATIVE_TYPE_UINT,
  164. VTYPE_PTR = 0x00 | MP_NATIVE_TYPE_PTR,
  165. VTYPE_PTR8 = 0x00 | MP_NATIVE_TYPE_PTR8,
  166. VTYPE_PTR16 = 0x00 | MP_NATIVE_TYPE_PTR16,
  167. VTYPE_PTR32 = 0x00 | MP_NATIVE_TYPE_PTR32,
  168. VTYPE_PTR_NONE = 0x50 | MP_NATIVE_TYPE_PTR,
  169. VTYPE_UNBOUND = 0x60 | MP_NATIVE_TYPE_OBJ,
  170. VTYPE_BUILTIN_CAST = 0x70 | MP_NATIVE_TYPE_OBJ,
  171. } vtype_kind_t;
  172. STATIC qstr vtype_to_qstr(vtype_kind_t vtype) {
  173. switch (vtype) {
  174. case VTYPE_PYOBJ:
  175. return MP_QSTR_object;
  176. case VTYPE_BOOL:
  177. return MP_QSTR_bool;
  178. case VTYPE_INT:
  179. return MP_QSTR_int;
  180. case VTYPE_UINT:
  181. return MP_QSTR_uint;
  182. case VTYPE_PTR:
  183. return MP_QSTR_ptr;
  184. case VTYPE_PTR8:
  185. return MP_QSTR_ptr8;
  186. case VTYPE_PTR16:
  187. return MP_QSTR_ptr16;
  188. case VTYPE_PTR32:
  189. return MP_QSTR_ptr32;
  190. case VTYPE_PTR_NONE:
  191. default:
  192. return MP_QSTR_None;
  193. }
  194. }
  195. typedef struct _stack_info_t {
  196. vtype_kind_t vtype;
  197. stack_info_kind_t kind;
  198. union {
  199. int u_reg;
  200. mp_int_t u_imm;
  201. } data;
  202. } stack_info_t;
  203. #define UNWIND_LABEL_UNUSED (0x7fff)
  204. #define UNWIND_LABEL_DO_FINAL_UNWIND (0x7ffe)
  205. typedef struct _exc_stack_entry_t {
  206. uint16_t label : 15;
  207. uint16_t is_finally : 1;
  208. uint16_t unwind_label : 15;
  209. uint16_t is_active : 1;
  210. } exc_stack_entry_t;
  211. struct _emit_t {
  212. mp_emit_common_t *emit_common;
  213. mp_obj_t *error_slot;
  214. uint *label_slot;
  215. uint exit_label;
  216. int pass;
  217. bool do_viper_types;
  218. bool prelude_offset_uses_u16_encoding;
  219. mp_uint_t local_vtype_alloc;
  220. vtype_kind_t *local_vtype;
  221. mp_uint_t stack_info_alloc;
  222. stack_info_t *stack_info;
  223. vtype_kind_t saved_stack_vtype;
  224. size_t exc_stack_alloc;
  225. size_t exc_stack_size;
  226. exc_stack_entry_t *exc_stack;
  227. int prelude_offset;
  228. int prelude_ptr_index;
  229. int start_offset;
  230. int n_state;
  231. uint16_t code_state_start;
  232. uint16_t stack_start;
  233. int stack_size;
  234. uint16_t n_info;
  235. uint16_t n_cell;
  236. scope_t *scope;
  237. ASM_T *as;
  238. };
  239. STATIC void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj);
  240. STATIC void emit_native_global_exc_entry(emit_t *emit);
  241. STATIC void emit_native_global_exc_exit(emit_t *emit);
  242. STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj);
  243. emit_t *EXPORT_FUN(new)(mp_emit_common_t * emit_common, mp_obj_t *error_slot, uint *label_slot, mp_uint_t max_num_labels) {
  244. emit_t *emit = m_new0(emit_t, 1);
  245. emit->emit_common = emit_common;
  246. emit->error_slot = error_slot;
  247. emit->label_slot = label_slot;
  248. emit->stack_info_alloc = 8;
  249. emit->stack_info = m_new(stack_info_t, emit->stack_info_alloc);
  250. emit->exc_stack_alloc = 8;
  251. emit->exc_stack = m_new(exc_stack_entry_t, emit->exc_stack_alloc);
  252. emit->as = m_new0(ASM_T, 1);
  253. mp_asm_base_init(&emit->as->base, max_num_labels);
  254. return emit;
  255. }
  256. void EXPORT_FUN(free)(emit_t * emit) {
  257. mp_asm_base_deinit(&emit->as->base, false);
  258. m_del_obj(ASM_T, emit->as);
  259. m_del(exc_stack_entry_t, emit->exc_stack, emit->exc_stack_alloc);
  260. m_del(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc);
  261. m_del(stack_info_t, emit->stack_info, emit->stack_info_alloc);
  262. m_del_obj(emit_t, emit);
  263. }
  264. STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg);
  265. STATIC void emit_native_mov_reg_const(emit_t *emit, int reg_dest, int const_val) {
  266. ASM_LOAD_REG_REG_OFFSET(emit->as, reg_dest, REG_FUN_TABLE, const_val);
  267. }
  268. STATIC void emit_native_mov_state_reg(emit_t *emit, int local_num, int reg_src) {
  269. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  270. ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, REG_GENERATOR_STATE, local_num);
  271. } else {
  272. ASM_MOV_LOCAL_REG(emit->as, local_num, reg_src);
  273. }
  274. }
  275. STATIC void emit_native_mov_reg_state(emit_t *emit, int reg_dest, int local_num) {
  276. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  277. ASM_LOAD_REG_REG_OFFSET(emit->as, reg_dest, REG_GENERATOR_STATE, local_num);
  278. } else {
  279. ASM_MOV_REG_LOCAL(emit->as, reg_dest, local_num);
  280. }
  281. }
  282. STATIC void emit_native_mov_reg_state_addr(emit_t *emit, int reg_dest, int local_num) {
  283. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  284. ASM_MOV_REG_IMM(emit->as, reg_dest, local_num * ASM_WORD_SIZE);
  285. ASM_ADD_REG_REG(emit->as, reg_dest, REG_GENERATOR_STATE);
  286. } else {
  287. ASM_MOV_REG_LOCAL_ADDR(emit->as, reg_dest, local_num);
  288. }
  289. }
  290. STATIC void emit_native_mov_reg_qstr(emit_t *emit, int arg_reg, qstr qst) {
  291. #if MICROPY_PERSISTENT_CODE_SAVE
  292. ASM_LOAD16_REG_REG_OFFSET(emit->as, arg_reg, REG_QSTR_TABLE, mp_emit_common_use_qstr(emit->emit_common, qst));
  293. #else
  294. ASM_MOV_REG_IMM(emit->as, arg_reg, qst);
  295. #endif
  296. }
  297. STATIC void emit_native_mov_reg_qstr_obj(emit_t *emit, int reg_dest, qstr qst) {
  298. #if MICROPY_PERSISTENT_CODE_SAVE
  299. emit_load_reg_with_object(emit, reg_dest, MP_OBJ_NEW_QSTR(qst));
  300. #else
  301. ASM_MOV_REG_IMM(emit->as, reg_dest, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
  302. #endif
  303. }
  304. #define emit_native_mov_state_imm_via(emit, local_num, imm, reg_temp) \
  305. do { \
  306. ASM_MOV_REG_IMM((emit)->as, (reg_temp), (imm)); \
  307. emit_native_mov_state_reg((emit), (local_num), (reg_temp)); \
  308. } while (false)
  309. STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
  310. DEBUG_printf("start_pass(pass=%u, scope=%p)\n", pass, scope);
  311. emit->pass = pass;
  312. emit->do_viper_types = scope->emit_options == MP_EMIT_OPT_VIPER;
  313. emit->stack_size = 0;
  314. emit->scope = scope;
  315. // allocate memory for keeping track of the types of locals
  316. if (emit->local_vtype_alloc < scope->num_locals) {
  317. emit->local_vtype = m_renew(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc, scope->num_locals);
  318. emit->local_vtype_alloc = scope->num_locals;
  319. }
  320. // set default type for arguments
  321. mp_uint_t num_args = emit->scope->num_pos_args + emit->scope->num_kwonly_args;
  322. if (scope->scope_flags & MP_SCOPE_FLAG_VARARGS) {
  323. num_args += 1;
  324. }
  325. if (scope->scope_flags & MP_SCOPE_FLAG_VARKEYWORDS) {
  326. num_args += 1;
  327. }
  328. for (mp_uint_t i = 0; i < num_args; i++) {
  329. emit->local_vtype[i] = VTYPE_PYOBJ;
  330. }
  331. // Set viper type for arguments
  332. if (emit->do_viper_types) {
  333. for (int i = 0; i < emit->scope->id_info_len; ++i) {
  334. id_info_t *id = &emit->scope->id_info[i];
  335. if (id->flags & ID_FLAG_IS_PARAM) {
  336. assert(id->local_num < emit->local_vtype_alloc);
  337. emit->local_vtype[id->local_num] = id->flags >> ID_FLAG_VIPER_TYPE_POS;
  338. }
  339. }
  340. }
  341. // local variables begin unbound, and have unknown type
  342. for (mp_uint_t i = num_args; i < emit->local_vtype_alloc; i++) {
  343. emit->local_vtype[i] = emit->do_viper_types ? VTYPE_UNBOUND : VTYPE_PYOBJ;
  344. }
  345. // values on stack begin unbound
  346. for (mp_uint_t i = 0; i < emit->stack_info_alloc; i++) {
  347. emit->stack_info[i].kind = STACK_VALUE;
  348. emit->stack_info[i].vtype = VTYPE_UNBOUND;
  349. }
  350. mp_asm_base_start_pass(&emit->as->base, pass == MP_PASS_EMIT ? MP_ASM_PASS_EMIT : MP_ASM_PASS_COMPUTE);
  351. // generate code for entry to function
  352. // Work out start of code state (mp_code_state_native_t or reduced version for viper)
  353. emit->code_state_start = 0;
  354. if (NEED_GLOBAL_EXC_HANDLER(emit)) {
  355. emit->code_state_start = SIZEOF_NLR_BUF; // for nlr_buf_t
  356. emit->code_state_start += 1; // for return_value
  357. if (NEED_EXC_HANDLER_UNWIND(emit)) {
  358. emit->code_state_start += 1;
  359. }
  360. }
  361. size_t fun_table_off = mp_emit_common_use_const_obj(emit->emit_common, MP_OBJ_FROM_PTR(&mp_fun_table));
  362. if (emit->do_viper_types) {
  363. // Work out size of state (locals plus stack)
  364. // n_state counts all stack and locals, even those in registers
  365. emit->n_state = scope->num_locals + scope->stack_size;
  366. int num_locals_in_regs = 0;
  367. if (CAN_USE_REGS_FOR_LOCALS(emit)) {
  368. num_locals_in_regs = scope->num_locals;
  369. if (num_locals_in_regs > MAX_REGS_FOR_LOCAL_VARS) {
  370. num_locals_in_regs = MAX_REGS_FOR_LOCAL_VARS;
  371. }
  372. // Need a spot for REG_LOCAL_LAST (see below)
  373. if (scope->num_pos_args >= MAX_REGS_FOR_LOCAL_VARS + 1) {
  374. --num_locals_in_regs;
  375. }
  376. }
  377. // Work out where the locals and Python stack start within the C stack
  378. if (NEED_GLOBAL_EXC_HANDLER(emit)) {
  379. // Reserve 2 words for function object and old globals
  380. emit->stack_start = emit->code_state_start + 2;
  381. } else if (scope->scope_flags & MP_SCOPE_FLAG_HASCONSTS) {
  382. // Reserve 1 word for function object, to access const table
  383. emit->stack_start = emit->code_state_start + 1;
  384. } else {
  385. emit->stack_start = emit->code_state_start + 0;
  386. }
  387. // Entry to function
  388. ASM_ENTRY(emit->as, emit->stack_start + emit->n_state - num_locals_in_regs);
  389. #if N_X86
  390. asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1);
  391. #endif
  392. // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
  393. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
  394. #if MICROPY_PERSISTENT_CODE_SAVE
  395. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
  396. #endif
  397. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
  398. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off);
  399. // Store function object (passed as first arg) to stack if needed
  400. if (NEED_FUN_OBJ(emit)) {
  401. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
  402. }
  403. // Put n_args in REG_ARG_1, n_kw in REG_ARG_2, args array in REG_LOCAL_LAST
  404. #if N_X86
  405. asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_1);
  406. asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_2);
  407. asm_x86_mov_arg_to_r32(emit->as, 3, REG_LOCAL_LAST);
  408. #else
  409. ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_PARENT_ARG_2);
  410. ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_3);
  411. ASM_MOV_REG_REG(emit->as, REG_LOCAL_LAST, REG_PARENT_ARG_4);
  412. #endif
  413. // Check number of args matches this function, and call mp_arg_check_num_sig if not
  414. ASM_JUMP_IF_REG_NONZERO(emit->as, REG_ARG_2, *emit->label_slot + 4, true);
  415. ASM_MOV_REG_IMM(emit->as, REG_ARG_3, scope->num_pos_args);
  416. ASM_JUMP_IF_REG_EQ(emit->as, REG_ARG_1, REG_ARG_3, *emit->label_slot + 5);
  417. mp_asm_base_label_assign(&emit->as->base, *emit->label_slot + 4);
  418. ASM_MOV_REG_IMM(emit->as, REG_ARG_3, MP_OBJ_FUN_MAKE_SIG(scope->num_pos_args, scope->num_pos_args, false));
  419. ASM_CALL_IND(emit->as, MP_F_ARG_CHECK_NUM_SIG);
  420. mp_asm_base_label_assign(&emit->as->base, *emit->label_slot + 5);
  421. // Store arguments into locals (reg or stack), converting to native if needed
  422. for (int i = 0; i < emit->scope->num_pos_args; i++) {
  423. int r = REG_ARG_1;
  424. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_LOCAL_LAST, i);
  425. if (emit->local_vtype[i] != VTYPE_PYOBJ) {
  426. emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, emit->local_vtype[i], REG_ARG_2);
  427. r = REG_RET;
  428. }
  429. // REG_LOCAL_LAST points to the args array so be sure not to overwrite it if it's still needed
  430. if (i < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit) && (i != MAX_REGS_FOR_LOCAL_VARS - 1 || emit->scope->num_pos_args == MAX_REGS_FOR_LOCAL_VARS)) {
  431. ASM_MOV_REG_REG(emit->as, reg_local_table[i], r);
  432. } else {
  433. emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, i), r);
  434. }
  435. }
  436. // Get local from the stack back into REG_LOCAL_LAST if this reg couldn't be written to above
  437. if (emit->scope->num_pos_args >= MAX_REGS_FOR_LOCAL_VARS + 1 && CAN_USE_REGS_FOR_LOCALS(emit)) {
  438. ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_LAST, LOCAL_IDX_LOCAL_VAR(emit, MAX_REGS_FOR_LOCAL_VARS - 1));
  439. }
  440. emit_native_global_exc_entry(emit);
  441. } else {
  442. // work out size of state (locals plus stack)
  443. emit->n_state = scope->num_locals + scope->stack_size;
  444. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  445. mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->prelude_ptr_index);
  446. mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->start_offset);
  447. ASM_ENTRY(emit->as, emit->code_state_start);
  448. // Reset the state size for the state pointed to by REG_GENERATOR_STATE
  449. emit->code_state_start = 0;
  450. emit->stack_start = SIZEOF_CODE_STATE;
  451. // Put address of code_state into REG_GENERATOR_STATE
  452. #if N_X86
  453. asm_x86_mov_arg_to_r32(emit->as, 0, REG_GENERATOR_STATE);
  454. #else
  455. ASM_MOV_REG_REG(emit->as, REG_GENERATOR_STATE, REG_PARENT_ARG_1);
  456. #endif
  457. // Put throw value into LOCAL_IDX_EXC_VAL slot, for yield/yield-from
  458. #if N_X86
  459. asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2);
  460. #endif
  461. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_PARENT_ARG_2);
  462. // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
  463. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, LOCAL_IDX_FUN_OBJ(emit));
  464. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT);
  465. #if MICROPY_PERSISTENT_CODE_SAVE
  466. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
  467. #endif
  468. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
  469. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_TEMP0, fun_table_off);
  470. } else {
  471. // The locals and stack start after the code_state structure
  472. emit->stack_start = emit->code_state_start + SIZEOF_CODE_STATE;
  473. // Allocate space on C-stack for code_state structure, which includes state
  474. ASM_ENTRY(emit->as, emit->stack_start + emit->n_state);
  475. // Prepare incoming arguments for call to mp_setup_code_state
  476. #if N_X86
  477. asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1);
  478. asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2);
  479. asm_x86_mov_arg_to_r32(emit->as, 2, REG_PARENT_ARG_3);
  480. asm_x86_mov_arg_to_r32(emit->as, 3, REG_PARENT_ARG_4);
  481. #endif
  482. // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
  483. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
  484. #if MICROPY_PERSISTENT_CODE_SAVE
  485. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
  486. #endif
  487. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
  488. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off);
  489. // Set code_state.fun_bc
  490. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
  491. // Set code_state.ip, a pointer to the beginning of the prelude. This pointer is found
  492. // either directly in mp_obj_fun_bc_t.child_table (if there are no children), or in
  493. // mp_obj_fun_bc_t.child_table[num_children] (if num_children > 0).
  494. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_PARENT_ARG_1, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CHILD_TABLE);
  495. if (emit->prelude_ptr_index != 0) {
  496. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_PARENT_ARG_1, REG_PARENT_ARG_1, emit->prelude_ptr_index);
  497. }
  498. emit_native_mov_state_reg(emit, emit->code_state_start + OFFSETOF_CODE_STATE_IP, REG_PARENT_ARG_1);
  499. // Set code_state.n_state (only works on little endian targets due to n_state being uint16_t)
  500. emit_native_mov_state_imm_via(emit, emit->code_state_start + OFFSETOF_CODE_STATE_N_STATE, emit->n_state, REG_ARG_1);
  501. // Put address of code_state into first arg
  502. ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, emit->code_state_start);
  503. // Copy next 3 args if needed
  504. #if REG_ARG_2 != REG_PARENT_ARG_2
  505. ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_2);
  506. #endif
  507. #if REG_ARG_3 != REG_PARENT_ARG_3
  508. ASM_MOV_REG_REG(emit->as, REG_ARG_3, REG_PARENT_ARG_3);
  509. #endif
  510. #if REG_ARG_4 != REG_PARENT_ARG_4
  511. ASM_MOV_REG_REG(emit->as, REG_ARG_4, REG_PARENT_ARG_4);
  512. #endif
  513. // Call mp_setup_code_state to prepare code_state structure
  514. #if N_THUMB
  515. asm_thumb_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_THUMB_REG_R4);
  516. #elif N_ARM
  517. asm_arm_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_ARM_REG_R4);
  518. #else
  519. ASM_CALL_IND(emit->as, MP_F_SETUP_CODE_STATE);
  520. #endif
  521. }
  522. emit_native_global_exc_entry(emit);
  523. // cache some locals in registers, but only if no exception handlers
  524. if (CAN_USE_REGS_FOR_LOCALS(emit)) {
  525. for (int i = 0; i < MAX_REGS_FOR_LOCAL_VARS && i < scope->num_locals; ++i) {
  526. ASM_MOV_REG_LOCAL(emit->as, reg_local_table[i], LOCAL_IDX_LOCAL_VAR(emit, i));
  527. }
  528. }
  529. // set the type of closed over variables
  530. for (mp_uint_t i = 0; i < scope->id_info_len; i++) {
  531. id_info_t *id = &scope->id_info[i];
  532. if (id->kind == ID_INFO_KIND_CELL) {
  533. emit->local_vtype[id->local_num] = VTYPE_PYOBJ;
  534. }
  535. }
  536. }
  537. }
  538. static inline void emit_native_write_code_info_byte(emit_t *emit, byte val) {
  539. mp_asm_base_data(&emit->as->base, 1, val);
  540. }
  541. static inline void emit_native_write_code_info_qstr(emit_t *emit, qstr qst) {
  542. mp_encode_uint(&emit->as->base, mp_asm_base_get_cur_to_write_bytes, mp_emit_common_use_qstr(emit->emit_common, qst));
  543. }
  544. STATIC bool emit_native_end_pass(emit_t *emit) {
  545. emit_native_global_exc_exit(emit);
  546. if (!emit->do_viper_types) {
  547. emit->prelude_offset = mp_asm_base_get_code_pos(&emit->as->base);
  548. emit->prelude_ptr_index = emit->emit_common->ct_cur_child;
  549. size_t n_state = emit->n_state;
  550. size_t n_exc_stack = 0; // exc-stack not needed for native code
  551. MP_BC_PRELUDE_SIG_ENCODE(n_state, n_exc_stack, emit->scope, emit_native_write_code_info_byte, emit);
  552. size_t n_info = emit->n_info;
  553. size_t n_cell = emit->n_cell;
  554. MP_BC_PRELUDE_SIZE_ENCODE(n_info, n_cell, emit_native_write_code_info_byte, emit);
  555. // bytecode prelude: source info (function and argument qstrs)
  556. size_t info_start = mp_asm_base_get_code_pos(&emit->as->base);
  557. emit_native_write_code_info_qstr(emit, emit->scope->simple_name);
  558. for (int i = 0; i < emit->scope->num_pos_args + emit->scope->num_kwonly_args; i++) {
  559. qstr qst = MP_QSTR__star_;
  560. for (int j = 0; j < emit->scope->id_info_len; ++j) {
  561. id_info_t *id = &emit->scope->id_info[j];
  562. if ((id->flags & ID_FLAG_IS_PARAM) && id->local_num == i) {
  563. qst = id->qst;
  564. break;
  565. }
  566. }
  567. emit_native_write_code_info_qstr(emit, qst);
  568. }
  569. emit->n_info = mp_asm_base_get_code_pos(&emit->as->base) - info_start;
  570. // bytecode prelude: initialise closed over variables
  571. size_t cell_start = mp_asm_base_get_code_pos(&emit->as->base);
  572. for (int i = 0; i < emit->scope->id_info_len; i++) {
  573. id_info_t *id = &emit->scope->id_info[i];
  574. if (id->kind == ID_INFO_KIND_CELL) {
  575. assert(id->local_num <= 255);
  576. mp_asm_base_data(&emit->as->base, 1, id->local_num); // write the local which should be converted to a cell
  577. }
  578. }
  579. emit->n_cell = mp_asm_base_get_code_pos(&emit->as->base) - cell_start;
  580. }
  581. ASM_END_PASS(emit->as);
  582. // check stack is back to zero size
  583. assert(emit->stack_size == 0);
  584. assert(emit->exc_stack_size == 0);
  585. if (emit->pass == MP_PASS_EMIT) {
  586. void *f = mp_asm_base_get_code(&emit->as->base);
  587. mp_uint_t f_len = mp_asm_base_get_code_size(&emit->as->base);
  588. mp_raw_code_t **children = emit->emit_common->children;
  589. if (!emit->do_viper_types) {
  590. #if MICROPY_EMIT_NATIVE_PRELUDE_SEPARATE_FROM_MACHINE_CODE
  591. // Executable code cannot be accessed byte-wise on this architecture, so copy
  592. // the prelude to a separate memory region that is byte-wise readable.
  593. void *buf = emit->as->base.code_base + emit->prelude_offset;
  594. size_t n = emit->as->base.code_offset - emit->prelude_offset;
  595. const uint8_t *prelude_ptr = memcpy(m_new(uint8_t, n), buf, n);
  596. #else
  597. // Point to the prelude directly, at the end of the machine code data.
  598. const uint8_t *prelude_ptr = (const uint8_t *)f + emit->prelude_offset;
  599. #endif
  600. // Store the pointer to the prelude using the child_table.
  601. assert(emit->prelude_ptr_index == emit->emit_common->ct_cur_child);
  602. if (emit->prelude_ptr_index == 0) {
  603. children = (void *)prelude_ptr;
  604. } else {
  605. children = m_renew(mp_raw_code_t *, children, emit->prelude_ptr_index, emit->prelude_ptr_index + 1);
  606. children[emit->prelude_ptr_index] = (void *)prelude_ptr;
  607. }
  608. }
  609. mp_emit_glue_assign_native(emit->scope->raw_code,
  610. emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY,
  611. f, f_len,
  612. children,
  613. #if MICROPY_PERSISTENT_CODE_SAVE
  614. emit->emit_common->ct_cur_child,
  615. emit->prelude_offset,
  616. #endif
  617. emit->scope->scope_flags, 0, 0);
  618. }
  619. return true;
  620. }
  621. STATIC void ensure_extra_stack(emit_t *emit, size_t delta) {
  622. if (emit->stack_size + delta > emit->stack_info_alloc) {
  623. size_t new_alloc = (emit->stack_size + delta + 8) & ~3;
  624. emit->stack_info = m_renew(stack_info_t, emit->stack_info, emit->stack_info_alloc, new_alloc);
  625. emit->stack_info_alloc = new_alloc;
  626. }
  627. }
  628. STATIC void adjust_stack(emit_t *emit, mp_int_t stack_size_delta) {
  629. assert((mp_int_t)emit->stack_size + stack_size_delta >= 0);
  630. assert((mp_int_t)emit->stack_size + stack_size_delta <= (mp_int_t)emit->stack_info_alloc);
  631. emit->stack_size += stack_size_delta;
  632. if (emit->pass > MP_PASS_SCOPE && emit->stack_size > emit->scope->stack_size) {
  633. emit->scope->stack_size = emit->stack_size;
  634. }
  635. #if DEBUG_PRINT
  636. DEBUG_printf(" adjust_stack; stack_size=%d+%d; stack now:", emit->stack_size - stack_size_delta, stack_size_delta);
  637. for (int i = 0; i < emit->stack_size; i++) {
  638. stack_info_t *si = &emit->stack_info[i];
  639. DEBUG_printf(" (v=%d k=%d %d)", si->vtype, si->kind, si->data.u_reg);
  640. }
  641. DEBUG_printf("\n");
  642. #endif
  643. }
  644. STATIC void emit_native_adjust_stack_size(emit_t *emit, mp_int_t delta) {
  645. DEBUG_printf("adjust_stack_size(" INT_FMT ")\n", delta);
  646. if (delta > 0) {
  647. ensure_extra_stack(emit, delta);
  648. }
  649. // If we are adjusting the stack in a positive direction (pushing) then we
  650. // need to fill in values for the stack kind and vtype of the newly-pushed
  651. // entries. These should be set to "value" (ie not reg or imm) because we
  652. // should only need to adjust the stack due to a jump to this part in the
  653. // code (and hence we have settled the stack before the jump).
  654. for (mp_int_t i = 0; i < delta; i++) {
  655. stack_info_t *si = &emit->stack_info[emit->stack_size + i];
  656. si->kind = STACK_VALUE;
  657. // TODO we don't know the vtype to use here. At the moment this is a
  658. // hack to get the case of multi comparison working.
  659. if (delta == 1) {
  660. si->vtype = emit->saved_stack_vtype;
  661. } else {
  662. si->vtype = VTYPE_PYOBJ;
  663. }
  664. }
  665. adjust_stack(emit, delta);
  666. }
  667. STATIC void emit_native_set_source_line(emit_t *emit, mp_uint_t source_line) {
  668. (void)emit;
  669. (void)source_line;
  670. }
  671. // this must be called at start of emit functions
  672. STATIC void emit_native_pre(emit_t *emit) {
  673. (void)emit;
  674. }
  675. // depth==0 is top, depth==1 is before top, etc
  676. STATIC stack_info_t *peek_stack(emit_t *emit, mp_uint_t depth) {
  677. return &emit->stack_info[emit->stack_size - 1 - depth];
  678. }
  679. // depth==0 is top, depth==1 is before top, etc
  680. STATIC vtype_kind_t peek_vtype(emit_t *emit, mp_uint_t depth) {
  681. if (emit->do_viper_types) {
  682. return peek_stack(emit, depth)->vtype;
  683. } else {
  684. // Type is always PYOBJ even if the intermediate stored value is not
  685. return VTYPE_PYOBJ;
  686. }
  687. }
  688. // pos=1 is TOS, pos=2 is next, etc
  689. // use pos=0 for no skipping
  690. STATIC void need_reg_single(emit_t *emit, int reg_needed, int skip_stack_pos) {
  691. skip_stack_pos = emit->stack_size - skip_stack_pos;
  692. for (int i = 0; i < emit->stack_size; i++) {
  693. if (i != skip_stack_pos) {
  694. stack_info_t *si = &emit->stack_info[i];
  695. if (si->kind == STACK_REG && si->data.u_reg == reg_needed) {
  696. si->kind = STACK_VALUE;
  697. emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
  698. }
  699. }
  700. }
  701. }
  702. // Ensures all unsettled registers that hold Python values are copied to the
  703. // concrete Python stack. All registers are then free to use.
  704. STATIC void need_reg_all(emit_t *emit) {
  705. for (int i = 0; i < emit->stack_size; i++) {
  706. stack_info_t *si = &emit->stack_info[i];
  707. if (si->kind == STACK_REG) {
  708. DEBUG_printf(" reg(%u) to local(%u)\n", si->data.u_reg, emit->stack_start + i);
  709. si->kind = STACK_VALUE;
  710. emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
  711. }
  712. }
  713. }
  714. STATIC vtype_kind_t load_reg_stack_imm(emit_t *emit, int reg_dest, const stack_info_t *si, bool convert_to_pyobj) {
  715. if (!convert_to_pyobj && emit->do_viper_types) {
  716. ASM_MOV_REG_IMM(emit->as, reg_dest, si->data.u_imm);
  717. return si->vtype;
  718. } else {
  719. if (si->vtype == VTYPE_PYOBJ) {
  720. ASM_MOV_REG_IMM(emit->as, reg_dest, si->data.u_imm);
  721. } else if (si->vtype == VTYPE_BOOL) {
  722. emit_native_mov_reg_const(emit, reg_dest, MP_F_CONST_FALSE_OBJ + si->data.u_imm);
  723. } else if (si->vtype == VTYPE_INT || si->vtype == VTYPE_UINT) {
  724. ASM_MOV_REG_IMM(emit->as, reg_dest, (uintptr_t)MP_OBJ_NEW_SMALL_INT(si->data.u_imm));
  725. } else if (si->vtype == VTYPE_PTR_NONE) {
  726. emit_native_mov_reg_const(emit, reg_dest, MP_F_CONST_NONE_OBJ);
  727. } else {
  728. mp_raise_NotImplementedError(MP_ERROR_TEXT("conversion to object"));
  729. }
  730. return VTYPE_PYOBJ;
  731. }
  732. }
  733. // Copies all unsettled registers and immediates that are Python values into the
  734. // concrete Python stack. This ensures the concrete Python stack holds valid
  735. // values for the current stack_size.
  736. // This function may clobber REG_TEMP1.
  737. STATIC void need_stack_settled(emit_t *emit) {
  738. DEBUG_printf(" need_stack_settled; stack_size=%d\n", emit->stack_size);
  739. need_reg_all(emit);
  740. for (int i = 0; i < emit->stack_size; i++) {
  741. stack_info_t *si = &emit->stack_info[i];
  742. if (si->kind == STACK_IMM) {
  743. DEBUG_printf(" imm(" INT_FMT ") to local(%u)\n", si->data.u_imm, emit->stack_start + i);
  744. si->kind = STACK_VALUE;
  745. // using REG_TEMP1 to avoid clobbering REG_TEMP0 (aka REG_RET)
  746. si->vtype = load_reg_stack_imm(emit, REG_TEMP1, si, false);
  747. emit_native_mov_state_reg(emit, emit->stack_start + i, REG_TEMP1);
  748. }
  749. }
  750. }
  751. // pos=1 is TOS, pos=2 is next, etc
  752. STATIC void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int reg_dest) {
  753. need_reg_single(emit, reg_dest, pos);
  754. stack_info_t *si = &emit->stack_info[emit->stack_size - pos];
  755. *vtype = si->vtype;
  756. switch (si->kind) {
  757. case STACK_VALUE:
  758. emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - pos);
  759. break;
  760. case STACK_REG:
  761. if (si->data.u_reg != reg_dest) {
  762. ASM_MOV_REG_REG(emit->as, reg_dest, si->data.u_reg);
  763. }
  764. break;
  765. case STACK_IMM:
  766. *vtype = load_reg_stack_imm(emit, reg_dest, si, false);
  767. break;
  768. }
  769. }
  770. // does an efficient X=pop(); discard(); push(X)
  771. // needs a (non-temp) register in case the popped element was stored in the stack
  772. STATIC void emit_fold_stack_top(emit_t *emit, int reg_dest) {
  773. stack_info_t *si = &emit->stack_info[emit->stack_size - 2];
  774. si[0] = si[1];
  775. if (si->kind == STACK_VALUE) {
  776. // if folded element was on the stack we need to put it in a register
  777. emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - 1);
  778. si->kind = STACK_REG;
  779. si->data.u_reg = reg_dest;
  780. }
  781. adjust_stack(emit, -1);
  782. }
  783. // If stacked value is in a register and the register is not r1 or r2, then
  784. // *reg_dest is set to that register. Otherwise the value is put in *reg_dest.
  785. STATIC void emit_pre_pop_reg_flexible(emit_t *emit, vtype_kind_t *vtype, int *reg_dest, int not_r1, int not_r2) {
  786. stack_info_t *si = peek_stack(emit, 0);
  787. if (si->kind == STACK_REG && si->data.u_reg != not_r1 && si->data.u_reg != not_r2) {
  788. *vtype = si->vtype;
  789. *reg_dest = si->data.u_reg;
  790. need_reg_single(emit, *reg_dest, 1);
  791. } else {
  792. emit_access_stack(emit, 1, vtype, *reg_dest);
  793. }
  794. adjust_stack(emit, -1);
  795. }
  796. STATIC void emit_pre_pop_discard(emit_t *emit) {
  797. adjust_stack(emit, -1);
  798. }
  799. STATIC void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest) {
  800. emit_access_stack(emit, 1, vtype, reg_dest);
  801. adjust_stack(emit, -1);
  802. }
  803. STATIC void emit_pre_pop_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb) {
  804. emit_pre_pop_reg(emit, vtypea, rega);
  805. emit_pre_pop_reg(emit, vtypeb, regb);
  806. }
  807. STATIC void emit_pre_pop_reg_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb, vtype_kind_t *vtypec, int regc) {
  808. emit_pre_pop_reg(emit, vtypea, rega);
  809. emit_pre_pop_reg(emit, vtypeb, regb);
  810. emit_pre_pop_reg(emit, vtypec, regc);
  811. }
  812. STATIC void emit_post(emit_t *emit) {
  813. (void)emit;
  814. }
  815. STATIC void emit_post_top_set_vtype(emit_t *emit, vtype_kind_t new_vtype) {
  816. stack_info_t *si = &emit->stack_info[emit->stack_size - 1];
  817. si->vtype = new_vtype;
  818. }
  819. STATIC void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg) {
  820. ensure_extra_stack(emit, 1);
  821. stack_info_t *si = &emit->stack_info[emit->stack_size];
  822. si->vtype = vtype;
  823. si->kind = STACK_REG;
  824. si->data.u_reg = reg;
  825. adjust_stack(emit, 1);
  826. }
  827. STATIC void emit_post_push_imm(emit_t *emit, vtype_kind_t vtype, mp_int_t imm) {
  828. ensure_extra_stack(emit, 1);
  829. stack_info_t *si = &emit->stack_info[emit->stack_size];
  830. si->vtype = vtype;
  831. si->kind = STACK_IMM;
  832. si->data.u_imm = imm;
  833. adjust_stack(emit, 1);
  834. }
  835. STATIC void emit_post_push_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb) {
  836. emit_post_push_reg(emit, vtypea, rega);
  837. emit_post_push_reg(emit, vtypeb, regb);
  838. }
  839. STATIC void emit_post_push_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc) {
  840. emit_post_push_reg(emit, vtypea, rega);
  841. emit_post_push_reg(emit, vtypeb, regb);
  842. emit_post_push_reg(emit, vtypec, regc);
  843. }
  844. STATIC void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc, vtype_kind_t vtyped, int regd) {
  845. emit_post_push_reg(emit, vtypea, rega);
  846. emit_post_push_reg(emit, vtypeb, regb);
  847. emit_post_push_reg(emit, vtypec, regc);
  848. emit_post_push_reg(emit, vtyped, regd);
  849. }
  850. STATIC void emit_call(emit_t *emit, mp_fun_kind_t fun_kind) {
  851. need_reg_all(emit);
  852. ASM_CALL_IND(emit->as, fun_kind);
  853. }
  854. STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
  855. need_reg_all(emit);
  856. ASM_MOV_REG_IMM(emit->as, arg_reg, arg_val);
  857. ASM_CALL_IND(emit->as, fun_kind);
  858. }
  859. STATIC void emit_call_with_2_imm_args(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2) {
  860. need_reg_all(emit);
  861. ASM_MOV_REG_IMM(emit->as, arg_reg1, arg_val1);
  862. ASM_MOV_REG_IMM(emit->as, arg_reg2, arg_val2);
  863. ASM_CALL_IND(emit->as, fun_kind);
  864. }
  865. STATIC void emit_call_with_qstr_arg(emit_t *emit, mp_fun_kind_t fun_kind, qstr qst, int arg_reg) {
  866. need_reg_all(emit);
  867. emit_native_mov_reg_qstr(emit, arg_reg, qst);
  868. ASM_CALL_IND(emit->as, fun_kind);
  869. }
  870. // vtype of all n_pop objects is VTYPE_PYOBJ
  871. // Will convert any items that are not VTYPE_PYOBJ to this type and put them back on the stack.
  872. // If any conversions of non-immediate values are needed, then it uses REG_ARG_1, REG_ARG_2 and REG_RET.
  873. // Otherwise, it does not use any temporary registers (but may use reg_dest before loading it with stack pointer).
  874. STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_pop) {
  875. need_reg_all(emit);
  876. // First, store any immediate values to their respective place on the stack.
  877. for (mp_uint_t i = 0; i < n_pop; i++) {
  878. stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
  879. // must push any imm's to stack
  880. // must convert them to VTYPE_PYOBJ for viper code
  881. if (si->kind == STACK_IMM) {
  882. si->kind = STACK_VALUE;
  883. si->vtype = load_reg_stack_imm(emit, reg_dest, si, true);
  884. emit_native_mov_state_reg(emit, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
  885. }
  886. // verify that this value is on the stack
  887. assert(si->kind == STACK_VALUE);
  888. }
  889. // Second, convert any non-VTYPE_PYOBJ to that type.
  890. for (mp_uint_t i = 0; i < n_pop; i++) {
  891. stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
  892. if (si->vtype != VTYPE_PYOBJ) {
  893. mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;
  894. emit_native_mov_reg_state(emit, REG_ARG_1, local_num);
  895. emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, si->vtype, REG_ARG_2); // arg2 = type
  896. emit_native_mov_state_reg(emit, local_num, REG_RET);
  897. si->vtype = VTYPE_PYOBJ;
  898. DEBUG_printf(" convert_native_to_obj(local_num=" UINT_FMT ")\n", local_num);
  899. }
  900. }
  901. // Adujust the stack for a pop of n_pop items, and load the stack pointer into reg_dest.
  902. adjust_stack(emit, -n_pop);
  903. emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
  904. }
  905. // vtype of all n_push objects is VTYPE_PYOBJ
  906. STATIC void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_push) {
  907. need_reg_all(emit);
  908. ensure_extra_stack(emit, n_push);
  909. for (mp_uint_t i = 0; i < n_push; i++) {
  910. emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
  911. emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
  912. }
  913. emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
  914. adjust_stack(emit, n_push);
  915. }
  916. STATIC void emit_native_push_exc_stack(emit_t *emit, uint label, bool is_finally) {
  917. if (emit->exc_stack_size + 1 > emit->exc_stack_alloc) {
  918. size_t new_alloc = emit->exc_stack_alloc + 4;
  919. emit->exc_stack = m_renew(exc_stack_entry_t, emit->exc_stack, emit->exc_stack_alloc, new_alloc);
  920. emit->exc_stack_alloc = new_alloc;
  921. }
  922. exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size++];
  923. e->label = label;
  924. e->is_finally = is_finally;
  925. e->unwind_label = UNWIND_LABEL_UNUSED;
  926. e->is_active = true;
  927. ASM_MOV_REG_PCREL(emit->as, REG_RET, label);
  928. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
  929. }
  930. STATIC void emit_native_leave_exc_stack(emit_t *emit, bool start_of_handler) {
  931. assert(emit->exc_stack_size > 0);
  932. // Get current exception handler and deactivate it
  933. exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
  934. e->is_active = false;
  935. // Find next innermost active exception handler, to restore as current handler
  936. for (--e; e >= emit->exc_stack && !e->is_active; --e) {
  937. }
  938. // Update the PC of the new exception handler
  939. if (e < emit->exc_stack) {
  940. // No active handler, clear handler PC to zero
  941. if (start_of_handler) {
  942. // Optimisation: PC is already cleared by global exc handler
  943. return;
  944. }
  945. ASM_XOR_REG_REG(emit->as, REG_RET, REG_RET);
  946. } else {
  947. // Found new active handler, get its PC
  948. ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
  949. }
  950. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
  951. }
  952. STATIC exc_stack_entry_t *emit_native_pop_exc_stack(emit_t *emit) {
  953. assert(emit->exc_stack_size > 0);
  954. exc_stack_entry_t *e = &emit->exc_stack[--emit->exc_stack_size];
  955. assert(e->is_active == false);
  956. return e;
  957. }
  958. STATIC void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj) {
  959. emit->scope->scope_flags |= MP_SCOPE_FLAG_HASCONSTS;
  960. size_t table_off = mp_emit_common_use_const_obj(emit->emit_common, obj);
  961. emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
  962. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT);
  963. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
  964. ASM_LOAD_REG_REG_OFFSET(emit->as, reg, REG_TEMP0, table_off);
  965. }
  966. STATIC void emit_load_reg_with_child(emit_t *emit, int reg, mp_raw_code_t *rc) {
  967. size_t table_off = mp_emit_common_alloc_const_child(emit->emit_common, rc);
  968. emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
  969. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CHILD_TABLE);
  970. ASM_LOAD_REG_REG_OFFSET(emit->as, reg, REG_TEMP0, table_off);
  971. }
  972. STATIC void emit_native_label_assign(emit_t *emit, mp_uint_t l) {
  973. DEBUG_printf("label_assign(" UINT_FMT ")\n", l);
  974. bool is_finally = false;
  975. if (emit->exc_stack_size > 0) {
  976. exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
  977. is_finally = e->is_finally && e->label == l;
  978. }
  979. if (is_finally) {
  980. // Label is at start of finally handler: store TOS into exception slot
  981. vtype_kind_t vtype;
  982. emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
  983. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
  984. }
  985. emit_native_pre(emit);
  986. // need to commit stack because we can jump here from elsewhere
  987. need_stack_settled(emit);
  988. mp_asm_base_label_assign(&emit->as->base, l);
  989. emit_post(emit);
  990. if (is_finally) {
  991. // Label is at start of finally handler: pop exception stack
  992. emit_native_leave_exc_stack(emit, false);
  993. }
  994. }
  995. STATIC void emit_native_global_exc_entry(emit_t *emit) {
  996. // Note: 4 labels are reserved for this function, starting at *emit->label_slot
  997. emit->exit_label = *emit->label_slot;
  998. if (NEED_GLOBAL_EXC_HANDLER(emit)) {
  999. mp_uint_t nlr_label = *emit->label_slot + 1;
  1000. mp_uint_t start_label = *emit->label_slot + 2;
  1001. mp_uint_t global_except_label = *emit->label_slot + 3;
  1002. if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
  1003. // Set new globals
  1004. emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_FUN_OBJ(emit));
  1005. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
  1006. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, OFFSETOF_MODULE_CONTEXT_GLOBALS);
  1007. emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
  1008. // Save old globals (or NULL if globals didn't change)
  1009. emit_native_mov_state_reg(emit, LOCAL_IDX_OLD_GLOBALS(emit), REG_RET);
  1010. }
  1011. if (emit->scope->exc_stack_size == 0) {
  1012. if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
  1013. // Optimisation: if globals didn't change don't push the nlr context
  1014. ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, false);
  1015. }
  1016. // Wrap everything in an nlr context
  1017. ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
  1018. emit_call(emit, MP_F_NLR_PUSH);
  1019. #if N_NLR_SETJMP
  1020. ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2);
  1021. emit_call(emit, MP_F_SETJMP);
  1022. #endif
  1023. ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, true);
  1024. } else {
  1025. // Clear the unwind state
  1026. ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
  1027. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_TEMP0);
  1028. // Put PC of start code block into REG_LOCAL_1
  1029. ASM_MOV_REG_PCREL(emit->as, REG_LOCAL_1, start_label);
  1030. // Wrap everything in an nlr context
  1031. emit_native_label_assign(emit, nlr_label);
  1032. ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
  1033. emit_call(emit, MP_F_NLR_PUSH);
  1034. #if N_NLR_SETJMP
  1035. ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2);
  1036. emit_call(emit, MP_F_SETJMP);
  1037. #endif
  1038. ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, global_except_label, true);
  1039. // Clear PC of current code block, and jump there to resume execution
  1040. ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
  1041. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_TEMP0);
  1042. ASM_JUMP_REG(emit->as, REG_LOCAL_1);
  1043. // Global exception handler: check for valid exception handler
  1044. emit_native_label_assign(emit, global_except_label);
  1045. ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_1, LOCAL_IDX_EXC_HANDLER_PC(emit));
  1046. ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false);
  1047. }
  1048. if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
  1049. // Restore old globals
  1050. emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
  1051. emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
  1052. }
  1053. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  1054. // Store return value in state[0]
  1055. ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit));
  1056. ASM_STORE_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, OFFSETOF_CODE_STATE_STATE);
  1057. // Load return kind
  1058. ASM_MOV_REG_IMM(emit->as, REG_PARENT_RET, MP_VM_RETURN_EXCEPTION);
  1059. ASM_EXIT(emit->as);
  1060. } else {
  1061. // Re-raise exception out to caller
  1062. ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
  1063. emit_call(emit, MP_F_NATIVE_RAISE);
  1064. }
  1065. // Label for start of function
  1066. emit_native_label_assign(emit, start_label);
  1067. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  1068. emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_GEN_PC(emit));
  1069. ASM_JUMP_REG(emit->as, REG_TEMP0);
  1070. emit->start_offset = mp_asm_base_get_code_pos(&emit->as->base);
  1071. // This is the first entry of the generator
  1072. // Check LOCAL_IDX_EXC_VAL for any injected value
  1073. ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
  1074. emit_call(emit, MP_F_NATIVE_RAISE);
  1075. }
  1076. }
  1077. }
  1078. STATIC void emit_native_global_exc_exit(emit_t *emit) {
  1079. // Label for end of function
  1080. emit_native_label_assign(emit, emit->exit_label);
  1081. if (NEED_GLOBAL_EXC_HANDLER(emit)) {
  1082. // Get old globals
  1083. if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
  1084. emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
  1085. if (emit->scope->exc_stack_size == 0) {
  1086. // Optimisation: if globals didn't change then don't restore them and don't do nlr_pop
  1087. ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, emit->exit_label + 1, false);
  1088. }
  1089. // Restore old globals
  1090. emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
  1091. }
  1092. // Pop the nlr context
  1093. emit_call(emit, MP_F_NLR_POP);
  1094. if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
  1095. if (emit->scope->exc_stack_size == 0) {
  1096. // Destination label for above optimisation
  1097. emit_native_label_assign(emit, emit->exit_label + 1);
  1098. }
  1099. }
  1100. // Load return value
  1101. ASM_MOV_REG_LOCAL(emit->as, REG_PARENT_RET, LOCAL_IDX_RET_VAL(emit));
  1102. }
  1103. ASM_EXIT(emit->as);
  1104. }
  1105. STATIC void emit_native_import_name(emit_t *emit, qstr qst) {
  1106. DEBUG_printf("import_name %s\n", qstr_str(qst));
  1107. // get arguments from stack: arg2 = fromlist, arg3 = level
  1108. // If using viper types these arguments must be converted to proper objects, and
  1109. // to accomplish this viper types are turned off for the emit_pre_pop_reg_reg call.
  1110. bool orig_do_viper_types = emit->do_viper_types;
  1111. emit->do_viper_types = false;
  1112. vtype_kind_t vtype_fromlist;
  1113. vtype_kind_t vtype_level;
  1114. emit_pre_pop_reg_reg(emit, &vtype_fromlist, REG_ARG_2, &vtype_level, REG_ARG_3);
  1115. assert(vtype_fromlist == VTYPE_PYOBJ);
  1116. assert(vtype_level == VTYPE_PYOBJ);
  1117. emit->do_viper_types = orig_do_viper_types;
  1118. emit_call_with_qstr_arg(emit, MP_F_IMPORT_NAME, qst, REG_ARG_1); // arg1 = import name
  1119. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1120. }
  1121. STATIC void emit_native_import_from(emit_t *emit, qstr qst) {
  1122. DEBUG_printf("import_from %s\n", qstr_str(qst));
  1123. emit_native_pre(emit);
  1124. vtype_kind_t vtype_module;
  1125. emit_access_stack(emit, 1, &vtype_module, REG_ARG_1); // arg1 = module
  1126. assert(vtype_module == VTYPE_PYOBJ);
  1127. emit_call_with_qstr_arg(emit, MP_F_IMPORT_FROM, qst, REG_ARG_2); // arg2 = import name
  1128. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1129. }
  1130. STATIC void emit_native_import_star(emit_t *emit) {
  1131. DEBUG_printf("import_star\n");
  1132. vtype_kind_t vtype_module;
  1133. emit_pre_pop_reg(emit, &vtype_module, REG_ARG_1); // arg1 = module
  1134. assert(vtype_module == VTYPE_PYOBJ);
  1135. emit_call(emit, MP_F_IMPORT_ALL);
  1136. emit_post(emit);
  1137. }
  1138. STATIC void emit_native_import(emit_t *emit, qstr qst, int kind) {
  1139. if (kind == MP_EMIT_IMPORT_NAME) {
  1140. emit_native_import_name(emit, qst);
  1141. } else if (kind == MP_EMIT_IMPORT_FROM) {
  1142. emit_native_import_from(emit, qst);
  1143. } else {
  1144. emit_native_import_star(emit);
  1145. }
  1146. }
  1147. STATIC void emit_native_load_const_tok(emit_t *emit, mp_token_kind_t tok) {
  1148. DEBUG_printf("load_const_tok(tok=%u)\n", tok);
  1149. if (tok == MP_TOKEN_ELLIPSIS) {
  1150. emit_native_load_const_obj(emit, MP_OBJ_FROM_PTR(&mp_const_ellipsis_obj));
  1151. } else {
  1152. emit_native_pre(emit);
  1153. if (tok == MP_TOKEN_KW_NONE) {
  1154. emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
  1155. } else {
  1156. emit_post_push_imm(emit, VTYPE_BOOL, tok == MP_TOKEN_KW_FALSE ? 0 : 1);
  1157. }
  1158. }
  1159. }
  1160. STATIC void emit_native_load_const_small_int(emit_t *emit, mp_int_t arg) {
  1161. DEBUG_printf("load_const_small_int(int=" INT_FMT ")\n", arg);
  1162. emit_native_pre(emit);
  1163. emit_post_push_imm(emit, VTYPE_INT, arg);
  1164. }
  1165. STATIC void emit_native_load_const_str(emit_t *emit, qstr qst) {
  1166. emit_native_pre(emit);
  1167. // TODO: Eventually we want to be able to work with raw pointers in viper to
  1168. // do native array access. For now we just load them as any other object.
  1169. /*
  1170. if (emit->do_viper_types) {
  1171. // load a pointer to the asciiz string?
  1172. emit_post_push_imm(emit, VTYPE_PTR, (mp_uint_t)qstr_str(qst));
  1173. } else
  1174. */
  1175. {
  1176. need_reg_single(emit, REG_TEMP0, 0);
  1177. emit_native_mov_reg_qstr_obj(emit, REG_TEMP0, qst);
  1178. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
  1179. }
  1180. }
  1181. STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj) {
  1182. emit_native_pre(emit);
  1183. need_reg_single(emit, REG_RET, 0);
  1184. emit_load_reg_with_object(emit, REG_RET, obj);
  1185. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1186. }
  1187. STATIC void emit_native_load_null(emit_t *emit) {
  1188. emit_native_pre(emit);
  1189. emit_post_push_imm(emit, VTYPE_PYOBJ, 0);
  1190. }
  1191. STATIC void emit_native_load_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
  1192. DEBUG_printf("load_fast(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
  1193. vtype_kind_t vtype = emit->local_vtype[local_num];
  1194. if (vtype == VTYPE_UNBOUND) {
  1195. EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("local '%q' used before type known"), qst);
  1196. }
  1197. emit_native_pre(emit);
  1198. if (local_num < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit)) {
  1199. emit_post_push_reg(emit, vtype, reg_local_table[local_num]);
  1200. } else {
  1201. need_reg_single(emit, REG_TEMP0, 0);
  1202. emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_LOCAL_VAR(emit, local_num));
  1203. emit_post_push_reg(emit, vtype, REG_TEMP0);
  1204. }
  1205. }
  1206. STATIC void emit_native_load_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
  1207. DEBUG_printf("load_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
  1208. need_reg_single(emit, REG_RET, 0);
  1209. emit_native_load_fast(emit, qst, local_num);
  1210. vtype_kind_t vtype;
  1211. int reg_base = REG_RET;
  1212. emit_pre_pop_reg_flexible(emit, &vtype, &reg_base, -1, -1);
  1213. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_RET, reg_base, 1);
  1214. // closed over vars are always Python objects
  1215. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1216. }
  1217. STATIC void emit_native_load_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
  1218. if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
  1219. emit_native_load_fast(emit, qst, local_num);
  1220. } else {
  1221. emit_native_load_deref(emit, qst, local_num);
  1222. }
  1223. }
  1224. STATIC void emit_native_load_global(emit_t *emit, qstr qst, int kind) {
  1225. MP_STATIC_ASSERT(MP_F_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_LOAD_NAME);
  1226. MP_STATIC_ASSERT(MP_F_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_LOAD_GLOBAL);
  1227. emit_native_pre(emit);
  1228. if (kind == MP_EMIT_IDOP_GLOBAL_NAME) {
  1229. DEBUG_printf("load_name(%s)\n", qstr_str(qst));
  1230. } else {
  1231. DEBUG_printf("load_global(%s)\n", qstr_str(qst));
  1232. if (emit->do_viper_types) {
  1233. // check for builtin casting operators
  1234. int native_type = mp_native_type_from_qstr(qst);
  1235. if (native_type >= MP_NATIVE_TYPE_BOOL) {
  1236. emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, native_type);
  1237. return;
  1238. }
  1239. }
  1240. }
  1241. emit_call_with_qstr_arg(emit, MP_F_LOAD_NAME + kind, qst, REG_ARG_1);
  1242. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1243. }
  1244. STATIC void emit_native_load_attr(emit_t *emit, qstr qst) {
  1245. // depends on type of subject:
  1246. // - integer, function, pointer to integers: error
  1247. // - pointer to structure: get member, quite easy
  1248. // - Python object: call mp_load_attr, and needs to be typed to convert result
  1249. vtype_kind_t vtype_base;
  1250. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
  1251. assert(vtype_base == VTYPE_PYOBJ);
  1252. emit_call_with_qstr_arg(emit, MP_F_LOAD_ATTR, qst, REG_ARG_2); // arg2 = attribute name
  1253. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1254. }
  1255. STATIC void emit_native_load_method(emit_t *emit, qstr qst, bool is_super) {
  1256. if (is_super) {
  1257. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, 3); // arg2 = dest ptr
  1258. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, 2); // arg2 = dest ptr
  1259. emit_call_with_qstr_arg(emit, MP_F_LOAD_SUPER_METHOD, qst, REG_ARG_1); // arg1 = method name
  1260. } else {
  1261. vtype_kind_t vtype_base;
  1262. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
  1263. assert(vtype_base == VTYPE_PYOBJ);
  1264. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
  1265. emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, qst, REG_ARG_2); // arg2 = method name
  1266. }
  1267. }
  1268. STATIC void emit_native_load_build_class(emit_t *emit) {
  1269. emit_native_pre(emit);
  1270. emit_call(emit, MP_F_LOAD_BUILD_CLASS);
  1271. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1272. }
  1273. STATIC void emit_native_load_subscr(emit_t *emit) {
  1274. DEBUG_printf("load_subscr\n");
  1275. // need to compile: base[index]
  1276. // pop: index, base
  1277. // optimise case where index is an immediate
  1278. vtype_kind_t vtype_base = peek_vtype(emit, 1);
  1279. if (vtype_base == VTYPE_PYOBJ) {
  1280. // standard Python subscr
  1281. // TODO factor this implicit cast code with other uses of it
  1282. vtype_kind_t vtype_index = peek_vtype(emit, 0);
  1283. if (vtype_index == VTYPE_PYOBJ) {
  1284. emit_pre_pop_reg(emit, &vtype_index, REG_ARG_2);
  1285. } else {
  1286. emit_pre_pop_reg(emit, &vtype_index, REG_ARG_1);
  1287. emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype_index, REG_ARG_2); // arg2 = type
  1288. ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
  1289. }
  1290. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
  1291. emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_SENTINEL, REG_ARG_3);
  1292. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1293. } else {
  1294. // viper load
  1295. // TODO The different machine architectures have very different
  1296. // capabilities and requirements for loads, so probably best to
  1297. // write a completely separate load-optimiser for each one.
  1298. stack_info_t *top = peek_stack(emit, 0);
  1299. if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
  1300. // index is an immediate
  1301. mp_int_t index_value = top->data.u_imm;
  1302. emit_pre_pop_discard(emit); // discard index
  1303. int reg_base = REG_ARG_1;
  1304. int reg_index = REG_ARG_2;
  1305. emit_pre_pop_reg_flexible(emit, &vtype_base, &reg_base, reg_index, reg_index);
  1306. need_reg_single(emit, REG_RET, 0);
  1307. switch (vtype_base) {
  1308. case VTYPE_PTR8: {
  1309. // pointer to 8-bit memory
  1310. // TODO optimise to use thumb ldrb r1, [r2, r3]
  1311. if (index_value != 0) {
  1312. // index is non-zero
  1313. #if N_THUMB
  1314. if (index_value > 0 && index_value < 32) {
  1315. asm_thumb_ldrb_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
  1316. break;
  1317. }
  1318. #endif
  1319. need_reg_single(emit, reg_index, 0);
  1320. ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
  1321. ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
  1322. reg_base = reg_index;
  1323. }
  1324. ASM_LOAD8_REG_REG(emit->as, REG_RET, reg_base); // load from (base+index)
  1325. break;
  1326. }
  1327. case VTYPE_PTR16: {
  1328. // pointer to 16-bit memory
  1329. if (index_value != 0) {
  1330. // index is a non-zero immediate
  1331. #if N_THUMB
  1332. if (index_value > 0 && index_value < 32) {
  1333. asm_thumb_ldrh_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
  1334. break;
  1335. }
  1336. #endif
  1337. need_reg_single(emit, reg_index, 0);
  1338. ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 1);
  1339. ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
  1340. reg_base = reg_index;
  1341. }
  1342. ASM_LOAD16_REG_REG(emit->as, REG_RET, reg_base); // load from (base+2*index)
  1343. break;
  1344. }
  1345. case VTYPE_PTR32: {
  1346. // pointer to 32-bit memory
  1347. if (index_value != 0) {
  1348. // index is a non-zero immediate
  1349. #if N_THUMB
  1350. if (index_value > 0 && index_value < 32) {
  1351. asm_thumb_ldr_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
  1352. break;
  1353. }
  1354. #endif
  1355. need_reg_single(emit, reg_index, 0);
  1356. ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 2);
  1357. ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
  1358. reg_base = reg_index;
  1359. }
  1360. ASM_LOAD32_REG_REG(emit->as, REG_RET, reg_base); // load from (base+4*index)
  1361. break;
  1362. }
  1363. default:
  1364. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1365. MP_ERROR_TEXT("can't load from '%q'"), vtype_to_qstr(vtype_base));
  1366. }
  1367. } else {
  1368. // index is not an immediate
  1369. vtype_kind_t vtype_index;
  1370. int reg_index = REG_ARG_2;
  1371. emit_pre_pop_reg_flexible(emit, &vtype_index, &reg_index, REG_ARG_1, REG_ARG_1);
  1372. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
  1373. need_reg_single(emit, REG_RET, 0);
  1374. if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
  1375. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1376. MP_ERROR_TEXT("can't load with '%q' index"), vtype_to_qstr(vtype_index));
  1377. }
  1378. switch (vtype_base) {
  1379. case VTYPE_PTR8: {
  1380. // pointer to 8-bit memory
  1381. // TODO optimise to use thumb ldrb r1, [r2, r3]
  1382. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1383. ASM_LOAD8_REG_REG(emit->as, REG_RET, REG_ARG_1); // store value to (base+index)
  1384. break;
  1385. }
  1386. case VTYPE_PTR16: {
  1387. // pointer to 16-bit memory
  1388. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1389. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1390. ASM_LOAD16_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+2*index)
  1391. break;
  1392. }
  1393. case VTYPE_PTR32: {
  1394. // pointer to word-size memory
  1395. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1396. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1397. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1398. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1399. ASM_LOAD32_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+4*index)
  1400. break;
  1401. }
  1402. default:
  1403. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1404. MP_ERROR_TEXT("can't load from '%q'"), vtype_to_qstr(vtype_base));
  1405. }
  1406. }
  1407. emit_post_push_reg(emit, VTYPE_INT, REG_RET);
  1408. }
  1409. }
  1410. STATIC void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
  1411. vtype_kind_t vtype;
  1412. if (local_num < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit)) {
  1413. emit_pre_pop_reg(emit, &vtype, reg_local_table[local_num]);
  1414. } else {
  1415. emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
  1416. emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, local_num), REG_TEMP0);
  1417. }
  1418. emit_post(emit);
  1419. // check types
  1420. if (emit->local_vtype[local_num] == VTYPE_UNBOUND) {
  1421. // first time this local is assigned, so give it a type of the object stored in it
  1422. emit->local_vtype[local_num] = vtype;
  1423. } else if (emit->local_vtype[local_num] != vtype) {
  1424. // type of local is not the same as object stored in it
  1425. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1426. MP_ERROR_TEXT("local '%q' has type '%q' but source is '%q'"),
  1427. qst, vtype_to_qstr(emit->local_vtype[local_num]), vtype_to_qstr(vtype));
  1428. }
  1429. }
  1430. STATIC void emit_native_store_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
  1431. DEBUG_printf("store_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
  1432. need_reg_single(emit, REG_TEMP0, 0);
  1433. need_reg_single(emit, REG_TEMP1, 0);
  1434. emit_native_load_fast(emit, qst, local_num);
  1435. vtype_kind_t vtype;
  1436. int reg_base = REG_TEMP0;
  1437. emit_pre_pop_reg_flexible(emit, &vtype, &reg_base, -1, -1);
  1438. int reg_src = REG_TEMP1;
  1439. emit_pre_pop_reg_flexible(emit, &vtype, &reg_src, reg_base, reg_base);
  1440. ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, reg_base, 1);
  1441. emit_post(emit);
  1442. }
  1443. STATIC void emit_native_store_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
  1444. if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
  1445. emit_native_store_fast(emit, qst, local_num);
  1446. } else {
  1447. emit_native_store_deref(emit, qst, local_num);
  1448. }
  1449. }
  1450. STATIC void emit_native_store_global(emit_t *emit, qstr qst, int kind) {
  1451. MP_STATIC_ASSERT(MP_F_STORE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_STORE_NAME);
  1452. MP_STATIC_ASSERT(MP_F_STORE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_STORE_GLOBAL);
  1453. if (kind == MP_EMIT_IDOP_GLOBAL_NAME) {
  1454. // mp_store_name, but needs conversion of object (maybe have mp_viper_store_name(obj, type))
  1455. vtype_kind_t vtype;
  1456. emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
  1457. assert(vtype == VTYPE_PYOBJ);
  1458. } else {
  1459. vtype_kind_t vtype = peek_vtype(emit, 0);
  1460. if (vtype == VTYPE_PYOBJ) {
  1461. emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
  1462. } else {
  1463. emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
  1464. emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype, REG_ARG_2); // arg2 = type
  1465. ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
  1466. }
  1467. }
  1468. emit_call_with_qstr_arg(emit, MP_F_STORE_NAME + kind, qst, REG_ARG_1); // arg1 = name
  1469. emit_post(emit);
  1470. }
  1471. STATIC void emit_native_store_attr(emit_t *emit, qstr qst) {
  1472. vtype_kind_t vtype_base;
  1473. vtype_kind_t vtype_val = peek_vtype(emit, 1);
  1474. if (vtype_val == VTYPE_PYOBJ) {
  1475. emit_pre_pop_reg_reg(emit, &vtype_base, REG_ARG_1, &vtype_val, REG_ARG_3); // arg1 = base, arg3 = value
  1476. } else {
  1477. emit_access_stack(emit, 2, &vtype_val, REG_ARG_1); // arg1 = value
  1478. emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype_val, REG_ARG_2); // arg2 = type
  1479. ASM_MOV_REG_REG(emit->as, REG_ARG_3, REG_RET); // arg3 = value (converted)
  1480. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
  1481. adjust_stack(emit, -1); // pop value
  1482. }
  1483. assert(vtype_base == VTYPE_PYOBJ);
  1484. emit_call_with_qstr_arg(emit, MP_F_STORE_ATTR, qst, REG_ARG_2); // arg2 = attribute name
  1485. emit_post(emit);
  1486. }
  1487. STATIC void emit_native_store_subscr(emit_t *emit) {
  1488. DEBUG_printf("store_subscr\n");
  1489. // need to compile: base[index] = value
  1490. // pop: index, base, value
  1491. // optimise case where index is an immediate
  1492. vtype_kind_t vtype_base = peek_vtype(emit, 1);
  1493. if (vtype_base == VTYPE_PYOBJ) {
  1494. // standard Python subscr
  1495. vtype_kind_t vtype_index = peek_vtype(emit, 0);
  1496. vtype_kind_t vtype_value = peek_vtype(emit, 2);
  1497. if (vtype_index != VTYPE_PYOBJ || vtype_value != VTYPE_PYOBJ) {
  1498. // need to implicitly convert non-objects to objects
  1499. // TODO do this properly
  1500. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, 3);
  1501. adjust_stack(emit, 3);
  1502. }
  1503. emit_pre_pop_reg_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1, &vtype_value, REG_ARG_3);
  1504. emit_call(emit, MP_F_OBJ_SUBSCR);
  1505. } else {
  1506. // viper store
  1507. // TODO The different machine architectures have very different
  1508. // capabilities and requirements for stores, so probably best to
  1509. // write a completely separate store-optimiser for each one.
  1510. stack_info_t *top = peek_stack(emit, 0);
  1511. if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
  1512. // index is an immediate
  1513. mp_int_t index_value = top->data.u_imm;
  1514. emit_pre_pop_discard(emit); // discard index
  1515. vtype_kind_t vtype_value;
  1516. int reg_base = REG_ARG_1;
  1517. int reg_index = REG_ARG_2;
  1518. int reg_value = REG_ARG_3;
  1519. emit_pre_pop_reg_flexible(emit, &vtype_base, &reg_base, reg_index, reg_value);
  1520. #if N_X64 || N_X86
  1521. // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
  1522. emit_pre_pop_reg(emit, &vtype_value, reg_value);
  1523. #else
  1524. emit_pre_pop_reg_flexible(emit, &vtype_value, &reg_value, reg_base, reg_index);
  1525. #endif
  1526. if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
  1527. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1528. MP_ERROR_TEXT("can't store '%q'"), vtype_to_qstr(vtype_value));
  1529. }
  1530. switch (vtype_base) {
  1531. case VTYPE_PTR8: {
  1532. // pointer to 8-bit memory
  1533. // TODO optimise to use thumb strb r1, [r2, r3]
  1534. if (index_value != 0) {
  1535. // index is non-zero
  1536. #if N_THUMB
  1537. if (index_value > 0 && index_value < 32) {
  1538. asm_thumb_strb_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
  1539. break;
  1540. }
  1541. #endif
  1542. ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
  1543. #if N_ARM
  1544. asm_arm_strb_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
  1545. return;
  1546. #endif
  1547. ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
  1548. reg_base = reg_index;
  1549. }
  1550. ASM_STORE8_REG_REG(emit->as, reg_value, reg_base); // store value to (base+index)
  1551. break;
  1552. }
  1553. case VTYPE_PTR16: {
  1554. // pointer to 16-bit memory
  1555. if (index_value != 0) {
  1556. // index is a non-zero immediate
  1557. #if N_THUMB
  1558. if (index_value > 0 && index_value < 32) {
  1559. asm_thumb_strh_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
  1560. break;
  1561. }
  1562. #endif
  1563. ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 1);
  1564. ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
  1565. reg_base = reg_index;
  1566. }
  1567. ASM_STORE16_REG_REG(emit->as, reg_value, reg_base); // store value to (base+2*index)
  1568. break;
  1569. }
  1570. case VTYPE_PTR32: {
  1571. // pointer to 32-bit memory
  1572. if (index_value != 0) {
  1573. // index is a non-zero immediate
  1574. #if N_THUMB
  1575. if (index_value > 0 && index_value < 32) {
  1576. asm_thumb_str_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
  1577. break;
  1578. }
  1579. #endif
  1580. #if N_ARM
  1581. ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
  1582. asm_arm_str_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
  1583. return;
  1584. #endif
  1585. ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 2);
  1586. ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
  1587. reg_base = reg_index;
  1588. }
  1589. ASM_STORE32_REG_REG(emit->as, reg_value, reg_base); // store value to (base+4*index)
  1590. break;
  1591. }
  1592. default:
  1593. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1594. MP_ERROR_TEXT("can't store to '%q'"), vtype_to_qstr(vtype_base));
  1595. }
  1596. } else {
  1597. // index is not an immediate
  1598. vtype_kind_t vtype_index, vtype_value;
  1599. int reg_index = REG_ARG_2;
  1600. int reg_value = REG_ARG_3;
  1601. emit_pre_pop_reg_flexible(emit, &vtype_index, &reg_index, REG_ARG_1, reg_value);
  1602. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
  1603. if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
  1604. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1605. MP_ERROR_TEXT("can't store with '%q' index"), vtype_to_qstr(vtype_index));
  1606. }
  1607. #if N_X64 || N_X86
  1608. // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
  1609. emit_pre_pop_reg(emit, &vtype_value, reg_value);
  1610. #else
  1611. emit_pre_pop_reg_flexible(emit, &vtype_value, &reg_value, REG_ARG_1, reg_index);
  1612. #endif
  1613. if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
  1614. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1615. MP_ERROR_TEXT("can't store '%q'"), vtype_to_qstr(vtype_value));
  1616. }
  1617. switch (vtype_base) {
  1618. case VTYPE_PTR8: {
  1619. // pointer to 8-bit memory
  1620. // TODO optimise to use thumb strb r1, [r2, r3]
  1621. #if N_ARM
  1622. asm_arm_strb_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
  1623. break;
  1624. #endif
  1625. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1626. ASM_STORE8_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+index)
  1627. break;
  1628. }
  1629. case VTYPE_PTR16: {
  1630. // pointer to 16-bit memory
  1631. #if N_ARM
  1632. asm_arm_strh_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
  1633. break;
  1634. #endif
  1635. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1636. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1637. ASM_STORE16_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+2*index)
  1638. break;
  1639. }
  1640. case VTYPE_PTR32: {
  1641. // pointer to 32-bit memory
  1642. #if N_ARM
  1643. asm_arm_str_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
  1644. break;
  1645. #endif
  1646. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1647. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1648. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1649. ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
  1650. ASM_STORE32_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+4*index)
  1651. break;
  1652. }
  1653. default:
  1654. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1655. MP_ERROR_TEXT("can't store to '%q'"), vtype_to_qstr(vtype_base));
  1656. }
  1657. }
  1658. }
  1659. }
  1660. STATIC void emit_native_delete_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
  1661. if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
  1662. // TODO: This is not compliant implementation. We could use MP_OBJ_SENTINEL
  1663. // to mark deleted vars but then every var would need to be checked on
  1664. // each access. Very inefficient, so just set value to None to enable GC.
  1665. emit_native_load_const_tok(emit, MP_TOKEN_KW_NONE);
  1666. emit_native_store_fast(emit, qst, local_num);
  1667. } else {
  1668. // TODO implement me!
  1669. }
  1670. }
  1671. STATIC void emit_native_delete_global(emit_t *emit, qstr qst, int kind) {
  1672. MP_STATIC_ASSERT(MP_F_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_DELETE_NAME);
  1673. MP_STATIC_ASSERT(MP_F_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_DELETE_GLOBAL);
  1674. emit_native_pre(emit);
  1675. emit_call_with_qstr_arg(emit, MP_F_DELETE_NAME + kind, qst, REG_ARG_1);
  1676. emit_post(emit);
  1677. }
  1678. STATIC void emit_native_delete_attr(emit_t *emit, qstr qst) {
  1679. vtype_kind_t vtype_base;
  1680. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
  1681. assert(vtype_base == VTYPE_PYOBJ);
  1682. ASM_XOR_REG_REG(emit->as, REG_ARG_3, REG_ARG_3); // arg3 = value (null for delete)
  1683. emit_call_with_qstr_arg(emit, MP_F_STORE_ATTR, qst, REG_ARG_2); // arg2 = attribute name
  1684. emit_post(emit);
  1685. }
  1686. STATIC void emit_native_delete_subscr(emit_t *emit) {
  1687. vtype_kind_t vtype_index, vtype_base;
  1688. emit_pre_pop_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1); // index, base
  1689. assert(vtype_index == VTYPE_PYOBJ);
  1690. assert(vtype_base == VTYPE_PYOBJ);
  1691. emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3);
  1692. }
  1693. STATIC void emit_native_subscr(emit_t *emit, int kind) {
  1694. if (kind == MP_EMIT_SUBSCR_LOAD) {
  1695. emit_native_load_subscr(emit);
  1696. } else if (kind == MP_EMIT_SUBSCR_STORE) {
  1697. emit_native_store_subscr(emit);
  1698. } else {
  1699. emit_native_delete_subscr(emit);
  1700. }
  1701. }
  1702. STATIC void emit_native_attr(emit_t *emit, qstr qst, int kind) {
  1703. if (kind == MP_EMIT_ATTR_LOAD) {
  1704. emit_native_load_attr(emit, qst);
  1705. } else if (kind == MP_EMIT_ATTR_STORE) {
  1706. emit_native_store_attr(emit, qst);
  1707. } else {
  1708. emit_native_delete_attr(emit, qst);
  1709. }
  1710. }
  1711. STATIC void emit_native_dup_top(emit_t *emit) {
  1712. DEBUG_printf("dup_top\n");
  1713. vtype_kind_t vtype;
  1714. int reg = REG_TEMP0;
  1715. emit_pre_pop_reg_flexible(emit, &vtype, &reg, -1, -1);
  1716. emit_post_push_reg_reg(emit, vtype, reg, vtype, reg);
  1717. }
  1718. STATIC void emit_native_dup_top_two(emit_t *emit) {
  1719. vtype_kind_t vtype0, vtype1;
  1720. emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
  1721. emit_post_push_reg_reg_reg_reg(emit, vtype1, REG_TEMP1, vtype0, REG_TEMP0, vtype1, REG_TEMP1, vtype0, REG_TEMP0);
  1722. }
  1723. STATIC void emit_native_pop_top(emit_t *emit) {
  1724. DEBUG_printf("pop_top\n");
  1725. emit_pre_pop_discard(emit);
  1726. emit_post(emit);
  1727. }
  1728. STATIC void emit_native_rot_two(emit_t *emit) {
  1729. DEBUG_printf("rot_two\n");
  1730. vtype_kind_t vtype0, vtype1;
  1731. emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
  1732. emit_post_push_reg_reg(emit, vtype0, REG_TEMP0, vtype1, REG_TEMP1);
  1733. }
  1734. STATIC void emit_native_rot_three(emit_t *emit) {
  1735. DEBUG_printf("rot_three\n");
  1736. vtype_kind_t vtype0, vtype1, vtype2;
  1737. emit_pre_pop_reg_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1, &vtype2, REG_TEMP2);
  1738. emit_post_push_reg_reg_reg(emit, vtype0, REG_TEMP0, vtype2, REG_TEMP2, vtype1, REG_TEMP1);
  1739. }
  1740. STATIC void emit_native_jump(emit_t *emit, mp_uint_t label) {
  1741. DEBUG_printf("jump(label=" UINT_FMT ")\n", label);
  1742. emit_native_pre(emit);
  1743. // need to commit stack because we are jumping elsewhere
  1744. need_stack_settled(emit);
  1745. ASM_JUMP(emit->as, label);
  1746. emit_post(emit);
  1747. mp_asm_base_suppress_code(&emit->as->base);
  1748. }
  1749. STATIC void emit_native_jump_helper(emit_t *emit, bool cond, mp_uint_t label, bool pop) {
  1750. vtype_kind_t vtype = peek_vtype(emit, 0);
  1751. if (vtype == VTYPE_PYOBJ) {
  1752. emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
  1753. if (!pop) {
  1754. adjust_stack(emit, 1);
  1755. }
  1756. emit_call(emit, MP_F_OBJ_IS_TRUE);
  1757. } else {
  1758. emit_pre_pop_reg(emit, &vtype, REG_RET);
  1759. if (!pop) {
  1760. adjust_stack(emit, 1);
  1761. }
  1762. if (!(vtype == VTYPE_BOOL || vtype == VTYPE_INT || vtype == VTYPE_UINT)) {
  1763. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  1764. MP_ERROR_TEXT("can't implicitly convert '%q' to 'bool'"), vtype_to_qstr(vtype));
  1765. }
  1766. }
  1767. // For non-pop need to save the vtype so that emit_native_adjust_stack_size
  1768. // can use it. This is a bit of a hack.
  1769. if (!pop) {
  1770. emit->saved_stack_vtype = vtype;
  1771. }
  1772. // need to commit stack because we may jump elsewhere
  1773. need_stack_settled(emit);
  1774. // Emit the jump
  1775. if (cond) {
  1776. ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label, vtype == VTYPE_PYOBJ);
  1777. } else {
  1778. ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label, vtype == VTYPE_PYOBJ);
  1779. }
  1780. if (!pop) {
  1781. adjust_stack(emit, -1);
  1782. }
  1783. emit_post(emit);
  1784. }
  1785. STATIC void emit_native_pop_jump_if(emit_t *emit, bool cond, mp_uint_t label) {
  1786. DEBUG_printf("pop_jump_if(cond=%u, label=" UINT_FMT ")\n", cond, label);
  1787. emit_native_jump_helper(emit, cond, label, true);
  1788. }
  1789. STATIC void emit_native_jump_if_or_pop(emit_t *emit, bool cond, mp_uint_t label) {
  1790. DEBUG_printf("jump_if_or_pop(cond=%u, label=" UINT_FMT ")\n", cond, label);
  1791. emit_native_jump_helper(emit, cond, label, false);
  1792. }
  1793. STATIC void emit_native_unwind_jump(emit_t *emit, mp_uint_t label, mp_uint_t except_depth) {
  1794. if (except_depth > 0) {
  1795. exc_stack_entry_t *first_finally = NULL;
  1796. exc_stack_entry_t *prev_finally = NULL;
  1797. exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
  1798. for (; except_depth > 0; --except_depth, --e) {
  1799. if (e->is_finally && e->is_active) {
  1800. // Found an active finally handler
  1801. if (first_finally == NULL) {
  1802. first_finally = e;
  1803. }
  1804. if (prev_finally != NULL) {
  1805. // Mark prev finally as needed to unwind a jump
  1806. prev_finally->unwind_label = e->label;
  1807. }
  1808. prev_finally = e;
  1809. }
  1810. }
  1811. if (prev_finally == NULL) {
  1812. // No finally, handle the jump ourselves
  1813. // First, restore the exception handler address for the jump
  1814. if (e < emit->exc_stack) {
  1815. ASM_XOR_REG_REG(emit->as, REG_RET, REG_RET);
  1816. } else {
  1817. ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
  1818. }
  1819. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
  1820. } else {
  1821. // Last finally should do our jump for us
  1822. // Mark finally as needing to decide the type of jump
  1823. prev_finally->unwind_label = UNWIND_LABEL_DO_FINAL_UNWIND;
  1824. ASM_MOV_REG_PCREL(emit->as, REG_RET, label & ~MP_EMIT_BREAK_FROM_FOR);
  1825. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_RET);
  1826. // Cancel any active exception (see also emit_native_pop_except_jump)
  1827. ASM_MOV_REG_IMM(emit->as, REG_RET, (mp_uint_t)MP_OBJ_NULL);
  1828. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_RET);
  1829. // Jump to the innermost active finally
  1830. label = first_finally->label;
  1831. }
  1832. }
  1833. emit_native_jump(emit, label & ~MP_EMIT_BREAK_FROM_FOR);
  1834. }
  1835. STATIC void emit_native_setup_with(emit_t *emit, mp_uint_t label) {
  1836. // the context manager is on the top of the stack
  1837. // stack: (..., ctx_mgr)
  1838. // get __exit__ method
  1839. vtype_kind_t vtype;
  1840. emit_access_stack(emit, 1, &vtype, REG_ARG_1); // arg1 = ctx_mgr
  1841. assert(vtype == VTYPE_PYOBJ);
  1842. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
  1843. emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___exit__, REG_ARG_2);
  1844. // stack: (..., ctx_mgr, __exit__, self)
  1845. emit_pre_pop_reg(emit, &vtype, REG_ARG_3); // self
  1846. emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // __exit__
  1847. emit_pre_pop_reg(emit, &vtype, REG_ARG_1); // ctx_mgr
  1848. emit_post_push_reg(emit, vtype, REG_ARG_2); // __exit__
  1849. emit_post_push_reg(emit, vtype, REG_ARG_3); // self
  1850. // stack: (..., __exit__, self)
  1851. // REG_ARG_1=ctx_mgr
  1852. // get __enter__ method
  1853. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
  1854. emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___enter__, REG_ARG_2); // arg2 = method name
  1855. // stack: (..., __exit__, self, __enter__, self)
  1856. // call __enter__ method
  1857. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2); // pointer to items, including meth and self
  1858. emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 0, REG_ARG_1, 0, REG_ARG_2);
  1859. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // push return value of __enter__
  1860. // stack: (..., __exit__, self, as_value)
  1861. // need to commit stack because we may jump elsewhere
  1862. need_stack_settled(emit);
  1863. emit_native_push_exc_stack(emit, label, true);
  1864. emit_native_dup_top(emit);
  1865. // stack: (..., __exit__, self, as_value, as_value)
  1866. }
  1867. STATIC void emit_native_setup_block(emit_t *emit, mp_uint_t label, int kind) {
  1868. if (kind == MP_EMIT_SETUP_BLOCK_WITH) {
  1869. emit_native_setup_with(emit, label);
  1870. } else {
  1871. // Set up except and finally
  1872. emit_native_pre(emit);
  1873. need_stack_settled(emit);
  1874. emit_native_push_exc_stack(emit, label, kind == MP_EMIT_SETUP_BLOCK_FINALLY);
  1875. emit_post(emit);
  1876. }
  1877. }
  1878. STATIC void emit_native_with_cleanup(emit_t *emit, mp_uint_t label) {
  1879. // Note: 3 labels are reserved for this function, starting at *emit->label_slot
  1880. // stack: (..., __exit__, self, as_value)
  1881. emit_native_pre(emit);
  1882. emit_native_leave_exc_stack(emit, false);
  1883. adjust_stack(emit, -1);
  1884. // stack: (..., __exit__, self)
  1885. // Label for case where __exit__ is called from an unwind jump
  1886. emit_native_label_assign(emit, *emit->label_slot + 2);
  1887. // call __exit__
  1888. emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
  1889. emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
  1890. emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
  1891. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
  1892. emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
  1893. // Replace exc with None and finish
  1894. emit_native_jump(emit, *emit->label_slot);
  1895. // nlr_catch
  1896. // Don't use emit_native_label_assign because this isn't a real finally label
  1897. mp_asm_base_label_assign(&emit->as->base, label);
  1898. // Leave with's exception handler
  1899. emit_native_leave_exc_stack(emit, true);
  1900. // Adjust stack counter for: __exit__, self (implicitly discard as_value which is above self)
  1901. emit_native_adjust_stack_size(emit, 2);
  1902. // stack: (..., __exit__, self)
  1903. ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit)); // get exc
  1904. // Check if exc is MP_OBJ_NULL (i.e. zero) and jump to non-exc handler if it is
  1905. ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, *emit->label_slot + 2, false);
  1906. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_1, 0); // get type(exc)
  1907. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_2); // push type(exc)
  1908. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_1); // push exc value
  1909. emit_post_push_imm(emit, VTYPE_PTR_NONE, 0); // traceback info
  1910. // Stack: (..., __exit__, self, type(exc), exc, traceback)
  1911. // call __exit__ method
  1912. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
  1913. emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
  1914. // Stack: (...)
  1915. // If REG_RET is true then we need to replace exception with None (swallow exception)
  1916. if (REG_ARG_1 != REG_RET) {
  1917. ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_RET);
  1918. }
  1919. emit_call(emit, MP_F_OBJ_IS_TRUE);
  1920. ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, *emit->label_slot + 1, true);
  1921. // Replace exception with MP_OBJ_NULL.
  1922. emit_native_label_assign(emit, *emit->label_slot);
  1923. ASM_MOV_REG_IMM(emit->as, REG_TEMP0, (mp_uint_t)MP_OBJ_NULL);
  1924. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
  1925. // end of with cleanup nlr_catch block
  1926. emit_native_label_assign(emit, *emit->label_slot + 1);
  1927. // Exception is in nlr_buf.ret_val slot
  1928. }
  1929. STATIC void emit_native_end_finally(emit_t *emit) {
  1930. // logic:
  1931. // exc = pop_stack
  1932. // if exc == None: pass
  1933. // else: raise exc
  1934. // the check if exc is None is done in the MP_F_NATIVE_RAISE stub
  1935. emit_native_pre(emit);
  1936. ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
  1937. emit_call(emit, MP_F_NATIVE_RAISE);
  1938. // Get state for this finally and see if we need to unwind
  1939. exc_stack_entry_t *e = emit_native_pop_exc_stack(emit);
  1940. if (e->unwind_label != UNWIND_LABEL_UNUSED) {
  1941. ASM_MOV_REG_LOCAL(emit->as, REG_RET, LOCAL_IDX_EXC_HANDLER_UNWIND(emit));
  1942. ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, *emit->label_slot, false);
  1943. if (e->unwind_label == UNWIND_LABEL_DO_FINAL_UNWIND) {
  1944. ASM_JUMP_REG(emit->as, REG_RET);
  1945. } else {
  1946. emit_native_jump(emit, e->unwind_label);
  1947. }
  1948. emit_native_label_assign(emit, *emit->label_slot);
  1949. }
  1950. emit_post(emit);
  1951. }
  1952. STATIC void emit_native_get_iter(emit_t *emit, bool use_stack) {
  1953. // perhaps the difficult one, as we want to rewrite for loops using native code
  1954. // in cases where we iterate over a Python object, can we use normal runtime calls?
  1955. vtype_kind_t vtype;
  1956. emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
  1957. assert(vtype == VTYPE_PYOBJ);
  1958. if (use_stack) {
  1959. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, MP_OBJ_ITER_BUF_NSLOTS);
  1960. emit_call(emit, MP_F_NATIVE_GETITER);
  1961. } else {
  1962. // mp_getiter will allocate the iter_buf on the heap
  1963. ASM_MOV_REG_IMM(emit->as, REG_ARG_2, 0);
  1964. emit_call(emit, MP_F_NATIVE_GETITER);
  1965. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1966. }
  1967. }
  1968. STATIC void emit_native_for_iter(emit_t *emit, mp_uint_t label) {
  1969. emit_native_pre(emit);
  1970. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, MP_OBJ_ITER_BUF_NSLOTS);
  1971. adjust_stack(emit, MP_OBJ_ITER_BUF_NSLOTS);
  1972. emit_call(emit, MP_F_NATIVE_ITERNEXT);
  1973. #if MICROPY_DEBUG_MP_OBJ_SENTINELS
  1974. ASM_MOV_REG_IMM(emit->as, REG_TEMP1, (mp_uint_t)MP_OBJ_STOP_ITERATION);
  1975. ASM_JUMP_IF_REG_EQ(emit->as, REG_RET, REG_TEMP1, label);
  1976. #else
  1977. MP_STATIC_ASSERT(MP_OBJ_STOP_ITERATION == 0);
  1978. ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label, false);
  1979. #endif
  1980. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  1981. }
  1982. STATIC void emit_native_for_iter_end(emit_t *emit) {
  1983. // adjust stack counter (we get here from for_iter ending, which popped the value for us)
  1984. emit_native_pre(emit);
  1985. adjust_stack(emit, -MP_OBJ_ITER_BUF_NSLOTS);
  1986. emit_post(emit);
  1987. }
  1988. STATIC void emit_native_pop_except_jump(emit_t *emit, mp_uint_t label, bool within_exc_handler) {
  1989. if (within_exc_handler) {
  1990. // Cancel any active exception so subsequent handlers don't see it
  1991. ASM_MOV_REG_IMM(emit->as, REG_TEMP0, (mp_uint_t)MP_OBJ_NULL);
  1992. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
  1993. } else {
  1994. emit_native_leave_exc_stack(emit, false);
  1995. }
  1996. emit_native_jump(emit, label);
  1997. }
  1998. STATIC void emit_native_unary_op(emit_t *emit, mp_unary_op_t op) {
  1999. vtype_kind_t vtype;
  2000. emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
  2001. if (vtype == VTYPE_PYOBJ) {
  2002. emit_call_with_imm_arg(emit, MP_F_UNARY_OP, op, REG_ARG_1);
  2003. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2004. } else {
  2005. adjust_stack(emit, 1);
  2006. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  2007. MP_ERROR_TEXT("unary op %q not implemented"), mp_unary_op_method_name[op]);
  2008. }
  2009. }
  2010. STATIC void emit_native_binary_op(emit_t *emit, mp_binary_op_t op) {
  2011. DEBUG_printf("binary_op(" UINT_FMT ")\n", op);
  2012. vtype_kind_t vtype_lhs = peek_vtype(emit, 1);
  2013. vtype_kind_t vtype_rhs = peek_vtype(emit, 0);
  2014. if ((vtype_lhs == VTYPE_INT || vtype_lhs == VTYPE_UINT)
  2015. && (vtype_rhs == VTYPE_INT || vtype_rhs == VTYPE_UINT)) {
  2016. // for integers, inplace and normal ops are equivalent, so use just normal ops
  2017. if (MP_BINARY_OP_INPLACE_OR <= op && op <= MP_BINARY_OP_INPLACE_POWER) {
  2018. op += MP_BINARY_OP_OR - MP_BINARY_OP_INPLACE_OR;
  2019. }
  2020. #if N_X64 || N_X86
  2021. // special cases for x86 and shifting
  2022. if (op == MP_BINARY_OP_LSHIFT || op == MP_BINARY_OP_RSHIFT) {
  2023. #if N_X64
  2024. emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X64_REG_RCX, &vtype_lhs, REG_RET);
  2025. #else
  2026. emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X86_REG_ECX, &vtype_lhs, REG_RET);
  2027. #endif
  2028. if (op == MP_BINARY_OP_LSHIFT) {
  2029. ASM_LSL_REG(emit->as, REG_RET);
  2030. } else {
  2031. if (vtype_lhs == VTYPE_UINT) {
  2032. ASM_LSR_REG(emit->as, REG_RET);
  2033. } else {
  2034. ASM_ASR_REG(emit->as, REG_RET);
  2035. }
  2036. }
  2037. emit_post_push_reg(emit, vtype_lhs, REG_RET);
  2038. return;
  2039. }
  2040. #endif
  2041. // special cases for floor-divide and module because we dispatch to helper functions
  2042. if (op == MP_BINARY_OP_FLOOR_DIVIDE || op == MP_BINARY_OP_MODULO) {
  2043. emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_2, &vtype_lhs, REG_ARG_1);
  2044. if (vtype_lhs != VTYPE_INT) {
  2045. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  2046. MP_ERROR_TEXT("div/mod not implemented for uint"), mp_binary_op_method_name[op]);
  2047. }
  2048. if (op == MP_BINARY_OP_FLOOR_DIVIDE) {
  2049. emit_call(emit, MP_F_SMALL_INT_FLOOR_DIVIDE);
  2050. } else {
  2051. emit_call(emit, MP_F_SMALL_INT_MODULO);
  2052. }
  2053. emit_post_push_reg(emit, VTYPE_INT, REG_RET);
  2054. return;
  2055. }
  2056. int reg_rhs = REG_ARG_3;
  2057. emit_pre_pop_reg_flexible(emit, &vtype_rhs, &reg_rhs, REG_RET, REG_ARG_2);
  2058. emit_pre_pop_reg(emit, &vtype_lhs, REG_ARG_2);
  2059. #if !(N_X64 || N_X86)
  2060. if (op == MP_BINARY_OP_LSHIFT || op == MP_BINARY_OP_RSHIFT) {
  2061. if (op == MP_BINARY_OP_LSHIFT) {
  2062. ASM_LSL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2063. } else {
  2064. if (vtype_lhs == VTYPE_UINT) {
  2065. ASM_LSR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2066. } else {
  2067. ASM_ASR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2068. }
  2069. }
  2070. emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
  2071. return;
  2072. }
  2073. #endif
  2074. if (op == MP_BINARY_OP_OR) {
  2075. ASM_OR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2076. emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
  2077. } else if (op == MP_BINARY_OP_XOR) {
  2078. ASM_XOR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2079. emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
  2080. } else if (op == MP_BINARY_OP_AND) {
  2081. ASM_AND_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2082. emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
  2083. } else if (op == MP_BINARY_OP_ADD) {
  2084. ASM_ADD_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2085. emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
  2086. } else if (op == MP_BINARY_OP_SUBTRACT) {
  2087. ASM_SUB_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2088. emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
  2089. } else if (op == MP_BINARY_OP_MULTIPLY) {
  2090. ASM_MUL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
  2091. emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
  2092. } else if (op == MP_BINARY_OP_LESS
  2093. || op == MP_BINARY_OP_MORE
  2094. || op == MP_BINARY_OP_EQUAL
  2095. || op == MP_BINARY_OP_LESS_EQUAL
  2096. || op == MP_BINARY_OP_MORE_EQUAL
  2097. || op == MP_BINARY_OP_NOT_EQUAL) {
  2098. // comparison ops
  2099. if (vtype_lhs != vtype_rhs) {
  2100. EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("comparison of int and uint"));
  2101. }
  2102. size_t op_idx = op - MP_BINARY_OP_LESS + (vtype_lhs == VTYPE_UINT ? 0 : 6);
  2103. need_reg_single(emit, REG_RET, 0);
  2104. #if N_X64
  2105. asm_x64_xor_r64_r64(emit->as, REG_RET, REG_RET);
  2106. asm_x64_cmp_r64_with_r64(emit->as, reg_rhs, REG_ARG_2);
  2107. static byte ops[6 + 6] = {
  2108. // unsigned
  2109. ASM_X64_CC_JB,
  2110. ASM_X64_CC_JA,
  2111. ASM_X64_CC_JE,
  2112. ASM_X64_CC_JBE,
  2113. ASM_X64_CC_JAE,
  2114. ASM_X64_CC_JNE,
  2115. // signed
  2116. ASM_X64_CC_JL,
  2117. ASM_X64_CC_JG,
  2118. ASM_X64_CC_JE,
  2119. ASM_X64_CC_JLE,
  2120. ASM_X64_CC_JGE,
  2121. ASM_X64_CC_JNE,
  2122. };
  2123. asm_x64_setcc_r8(emit->as, ops[op_idx], REG_RET);
  2124. #elif N_X86
  2125. asm_x86_xor_r32_r32(emit->as, REG_RET, REG_RET);
  2126. asm_x86_cmp_r32_with_r32(emit->as, reg_rhs, REG_ARG_2);
  2127. static byte ops[6 + 6] = {
  2128. // unsigned
  2129. ASM_X86_CC_JB,
  2130. ASM_X86_CC_JA,
  2131. ASM_X86_CC_JE,
  2132. ASM_X86_CC_JBE,
  2133. ASM_X86_CC_JAE,
  2134. ASM_X86_CC_JNE,
  2135. // signed
  2136. ASM_X86_CC_JL,
  2137. ASM_X86_CC_JG,
  2138. ASM_X86_CC_JE,
  2139. ASM_X86_CC_JLE,
  2140. ASM_X86_CC_JGE,
  2141. ASM_X86_CC_JNE,
  2142. };
  2143. asm_x86_setcc_r8(emit->as, ops[op_idx], REG_RET);
  2144. #elif N_THUMB
  2145. asm_thumb_cmp_rlo_rlo(emit->as, REG_ARG_2, reg_rhs);
  2146. if (asm_thumb_allow_armv7m(emit->as)) {
  2147. static uint16_t ops[6 + 6] = {
  2148. // unsigned
  2149. ASM_THUMB_OP_ITE_CC,
  2150. ASM_THUMB_OP_ITE_HI,
  2151. ASM_THUMB_OP_ITE_EQ,
  2152. ASM_THUMB_OP_ITE_LS,
  2153. ASM_THUMB_OP_ITE_CS,
  2154. ASM_THUMB_OP_ITE_NE,
  2155. // signed
  2156. ASM_THUMB_OP_ITE_LT,
  2157. ASM_THUMB_OP_ITE_GT,
  2158. ASM_THUMB_OP_ITE_EQ,
  2159. ASM_THUMB_OP_ITE_LE,
  2160. ASM_THUMB_OP_ITE_GE,
  2161. ASM_THUMB_OP_ITE_NE,
  2162. };
  2163. asm_thumb_op16(emit->as, ops[op_idx]);
  2164. asm_thumb_mov_rlo_i8(emit->as, REG_RET, 1);
  2165. asm_thumb_mov_rlo_i8(emit->as, REG_RET, 0);
  2166. } else {
  2167. static uint16_t ops[6 + 6] = {
  2168. // unsigned
  2169. ASM_THUMB_CC_CC,
  2170. ASM_THUMB_CC_HI,
  2171. ASM_THUMB_CC_EQ,
  2172. ASM_THUMB_CC_LS,
  2173. ASM_THUMB_CC_CS,
  2174. ASM_THUMB_CC_NE,
  2175. // signed
  2176. ASM_THUMB_CC_LT,
  2177. ASM_THUMB_CC_GT,
  2178. ASM_THUMB_CC_EQ,
  2179. ASM_THUMB_CC_LE,
  2180. ASM_THUMB_CC_GE,
  2181. ASM_THUMB_CC_NE,
  2182. };
  2183. asm_thumb_bcc_rel9(emit->as, ops[op_idx], 6);
  2184. asm_thumb_mov_rlo_i8(emit->as, REG_RET, 0);
  2185. asm_thumb_b_rel12(emit->as, 4);
  2186. asm_thumb_mov_rlo_i8(emit->as, REG_RET, 1);
  2187. }
  2188. #elif N_ARM
  2189. asm_arm_cmp_reg_reg(emit->as, REG_ARG_2, reg_rhs);
  2190. static uint ccs[6 + 6] = {
  2191. // unsigned
  2192. ASM_ARM_CC_CC,
  2193. ASM_ARM_CC_HI,
  2194. ASM_ARM_CC_EQ,
  2195. ASM_ARM_CC_LS,
  2196. ASM_ARM_CC_CS,
  2197. ASM_ARM_CC_NE,
  2198. // signed
  2199. ASM_ARM_CC_LT,
  2200. ASM_ARM_CC_GT,
  2201. ASM_ARM_CC_EQ,
  2202. ASM_ARM_CC_LE,
  2203. ASM_ARM_CC_GE,
  2204. ASM_ARM_CC_NE,
  2205. };
  2206. asm_arm_setcc_reg(emit->as, REG_RET, ccs[op_idx]);
  2207. #elif N_XTENSA || N_XTENSAWIN
  2208. static uint8_t ccs[6 + 6] = {
  2209. // unsigned
  2210. ASM_XTENSA_CC_LTU,
  2211. 0x80 | ASM_XTENSA_CC_LTU, // for GTU we'll swap args
  2212. ASM_XTENSA_CC_EQ,
  2213. 0x80 | ASM_XTENSA_CC_GEU, // for LEU we'll swap args
  2214. ASM_XTENSA_CC_GEU,
  2215. ASM_XTENSA_CC_NE,
  2216. // signed
  2217. ASM_XTENSA_CC_LT,
  2218. 0x80 | ASM_XTENSA_CC_LT, // for GT we'll swap args
  2219. ASM_XTENSA_CC_EQ,
  2220. 0x80 | ASM_XTENSA_CC_GE, // for LE we'll swap args
  2221. ASM_XTENSA_CC_GE,
  2222. ASM_XTENSA_CC_NE,
  2223. };
  2224. uint8_t cc = ccs[op_idx];
  2225. if ((cc & 0x80) == 0) {
  2226. asm_xtensa_setcc_reg_reg_reg(emit->as, cc, REG_RET, REG_ARG_2, reg_rhs);
  2227. } else {
  2228. asm_xtensa_setcc_reg_reg_reg(emit->as, cc & ~0x80, REG_RET, reg_rhs, REG_ARG_2);
  2229. }
  2230. #else
  2231. #error not implemented
  2232. #endif
  2233. emit_post_push_reg(emit, VTYPE_BOOL, REG_RET);
  2234. } else {
  2235. // TODO other ops not yet implemented
  2236. adjust_stack(emit, 1);
  2237. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  2238. MP_ERROR_TEXT("binary op %q not implemented"), mp_binary_op_method_name[op]);
  2239. }
  2240. } else if (vtype_lhs == VTYPE_PYOBJ && vtype_rhs == VTYPE_PYOBJ) {
  2241. emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_3, &vtype_lhs, REG_ARG_2);
  2242. bool invert = false;
  2243. if (op == MP_BINARY_OP_NOT_IN) {
  2244. invert = true;
  2245. op = MP_BINARY_OP_IN;
  2246. } else if (op == MP_BINARY_OP_IS_NOT) {
  2247. invert = true;
  2248. op = MP_BINARY_OP_IS;
  2249. }
  2250. emit_call_with_imm_arg(emit, MP_F_BINARY_OP, op, REG_ARG_1);
  2251. if (invert) {
  2252. ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
  2253. emit_call_with_imm_arg(emit, MP_F_UNARY_OP, MP_UNARY_OP_NOT, REG_ARG_1);
  2254. }
  2255. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2256. } else {
  2257. adjust_stack(emit, -1);
  2258. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  2259. MP_ERROR_TEXT("can't do binary op between '%q' and '%q'"),
  2260. vtype_to_qstr(vtype_lhs), vtype_to_qstr(vtype_rhs));
  2261. }
  2262. }
  2263. #if MICROPY_PY_BUILTINS_SLICE
  2264. STATIC void emit_native_build_slice(emit_t *emit, mp_uint_t n_args);
  2265. #endif
  2266. STATIC void emit_native_build(emit_t *emit, mp_uint_t n_args, int kind) {
  2267. // for viper: call runtime, with types of args
  2268. // if wrapped in byte_array, or something, allocates memory and fills it
  2269. MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_TUPLE == MP_F_BUILD_TUPLE);
  2270. MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_LIST == MP_F_BUILD_LIST);
  2271. MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_MAP == MP_F_BUILD_MAP);
  2272. MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_SET == MP_F_BUILD_SET);
  2273. #if MICROPY_PY_BUILTINS_SLICE
  2274. if (kind == MP_EMIT_BUILD_SLICE) {
  2275. emit_native_build_slice(emit, n_args);
  2276. return;
  2277. }
  2278. #endif
  2279. emit_native_pre(emit);
  2280. if (kind == MP_EMIT_BUILD_TUPLE || kind == MP_EMIT_BUILD_LIST || kind == MP_EMIT_BUILD_SET) {
  2281. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args); // pointer to items
  2282. }
  2283. emit_call_with_imm_arg(emit, MP_F_BUILD_TUPLE + kind, n_args, REG_ARG_1);
  2284. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new tuple/list/map/set
  2285. }
  2286. STATIC void emit_native_store_map(emit_t *emit) {
  2287. vtype_kind_t vtype_key, vtype_value, vtype_map;
  2288. emit_pre_pop_reg_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3, &vtype_map, REG_ARG_1); // key, value, map
  2289. assert(vtype_key == VTYPE_PYOBJ);
  2290. assert(vtype_value == VTYPE_PYOBJ);
  2291. assert(vtype_map == VTYPE_PYOBJ);
  2292. emit_call(emit, MP_F_STORE_MAP);
  2293. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // map
  2294. }
  2295. #if MICROPY_PY_BUILTINS_SLICE
  2296. STATIC void emit_native_build_slice(emit_t *emit, mp_uint_t n_args) {
  2297. DEBUG_printf("build_slice %d\n", n_args);
  2298. if (n_args == 2) {
  2299. vtype_kind_t vtype_start, vtype_stop;
  2300. emit_pre_pop_reg_reg(emit, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop
  2301. assert(vtype_start == VTYPE_PYOBJ);
  2302. assert(vtype_stop == VTYPE_PYOBJ);
  2303. emit_native_mov_reg_const(emit, REG_ARG_3, MP_F_CONST_NONE_OBJ); // arg3 = step
  2304. } else {
  2305. assert(n_args == 3);
  2306. vtype_kind_t vtype_start, vtype_stop, vtype_step;
  2307. emit_pre_pop_reg_reg_reg(emit, &vtype_step, REG_ARG_3, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop, arg3 = step
  2308. assert(vtype_start == VTYPE_PYOBJ);
  2309. assert(vtype_stop == VTYPE_PYOBJ);
  2310. assert(vtype_step == VTYPE_PYOBJ);
  2311. }
  2312. emit_call(emit, MP_F_NEW_SLICE);
  2313. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2314. }
  2315. #endif
  2316. STATIC void emit_native_store_comp(emit_t *emit, scope_kind_t kind, mp_uint_t collection_index) {
  2317. mp_fun_kind_t f;
  2318. if (kind == SCOPE_LIST_COMP) {
  2319. vtype_kind_t vtype_item;
  2320. emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
  2321. assert(vtype_item == VTYPE_PYOBJ);
  2322. f = MP_F_LIST_APPEND;
  2323. #if MICROPY_PY_BUILTINS_SET
  2324. } else if (kind == SCOPE_SET_COMP) {
  2325. vtype_kind_t vtype_item;
  2326. emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
  2327. assert(vtype_item == VTYPE_PYOBJ);
  2328. f = MP_F_STORE_SET;
  2329. #endif
  2330. } else {
  2331. // SCOPE_DICT_COMP
  2332. vtype_kind_t vtype_key, vtype_value;
  2333. emit_pre_pop_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3);
  2334. assert(vtype_key == VTYPE_PYOBJ);
  2335. assert(vtype_value == VTYPE_PYOBJ);
  2336. f = MP_F_STORE_MAP;
  2337. }
  2338. vtype_kind_t vtype_collection;
  2339. emit_access_stack(emit, collection_index, &vtype_collection, REG_ARG_1);
  2340. assert(vtype_collection == VTYPE_PYOBJ);
  2341. emit_call(emit, f);
  2342. emit_post(emit);
  2343. }
  2344. STATIC void emit_native_unpack_sequence(emit_t *emit, mp_uint_t n_args) {
  2345. DEBUG_printf("unpack_sequence %d\n", n_args);
  2346. vtype_kind_t vtype_base;
  2347. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
  2348. assert(vtype_base == VTYPE_PYOBJ);
  2349. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_args); // arg3 = dest ptr
  2350. emit_call_with_imm_arg(emit, MP_F_UNPACK_SEQUENCE, n_args, REG_ARG_2); // arg2 = n_args
  2351. }
  2352. STATIC void emit_native_unpack_ex(emit_t *emit, mp_uint_t n_left, mp_uint_t n_right) {
  2353. DEBUG_printf("unpack_ex %d %d\n", n_left, n_right);
  2354. vtype_kind_t vtype_base;
  2355. emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
  2356. assert(vtype_base == VTYPE_PYOBJ);
  2357. emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_left + n_right + 1); // arg3 = dest ptr
  2358. emit_call_with_imm_arg(emit, MP_F_UNPACK_EX, n_left | (n_right << 8), REG_ARG_2); // arg2 = n_left + n_right
  2359. }
  2360. STATIC void emit_native_make_function(emit_t *emit, scope_t *scope, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
  2361. // call runtime, with type info for args, or don't support dict/default params, or only support Python objects for them
  2362. emit_native_pre(emit);
  2363. emit_native_mov_reg_state(emit, REG_ARG_2, LOCAL_IDX_FUN_OBJ(emit));
  2364. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_2, OFFSETOF_OBJ_FUN_BC_CONTEXT);
  2365. if (n_pos_defaults == 0 && n_kw_defaults == 0) {
  2366. need_reg_all(emit);
  2367. ASM_MOV_REG_IMM(emit->as, REG_ARG_3, 0);
  2368. } else {
  2369. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2);
  2370. need_reg_all(emit);
  2371. }
  2372. emit_load_reg_with_child(emit, REG_ARG_1, scope->raw_code);
  2373. ASM_CALL_IND(emit->as, MP_F_MAKE_FUNCTION_FROM_RAW_CODE);
  2374. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2375. }
  2376. STATIC void emit_native_make_closure(emit_t *emit, scope_t *scope, mp_uint_t n_closed_over, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
  2377. // make function
  2378. emit_native_pre(emit);
  2379. emit_native_mov_reg_state(emit, REG_ARG_2, LOCAL_IDX_FUN_OBJ(emit));
  2380. ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_2, OFFSETOF_OBJ_FUN_BC_CONTEXT);
  2381. if (n_pos_defaults == 0 && n_kw_defaults == 0) {
  2382. need_reg_all(emit);
  2383. ASM_MOV_REG_IMM(emit->as, REG_ARG_3, 0);
  2384. } else {
  2385. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2 + n_closed_over);
  2386. adjust_stack(emit, 2 + n_closed_over);
  2387. need_reg_all(emit);
  2388. }
  2389. emit_load_reg_with_child(emit, REG_ARG_1, scope->raw_code);
  2390. ASM_CALL_IND(emit->as, MP_F_MAKE_FUNCTION_FROM_RAW_CODE);
  2391. // make closure
  2392. #if REG_ARG_1 != REG_RET
  2393. ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_RET);
  2394. #endif
  2395. ASM_MOV_REG_IMM(emit->as, REG_ARG_2, n_closed_over);
  2396. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_closed_over);
  2397. if (n_pos_defaults != 0 || n_kw_defaults != 0) {
  2398. adjust_stack(emit, -2);
  2399. }
  2400. ASM_CALL_IND(emit->as, MP_F_NEW_CLOSURE);
  2401. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2402. }
  2403. STATIC void emit_native_call_function(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
  2404. DEBUG_printf("call_function(n_pos=" UINT_FMT ", n_kw=" UINT_FMT ", star_flags=" UINT_FMT ")\n", n_positional, n_keyword, star_flags);
  2405. // TODO: in viper mode, call special runtime routine with type info for args,
  2406. // and wanted type info for return, to remove need for boxing/unboxing
  2407. emit_native_pre(emit);
  2408. vtype_kind_t vtype_fun = peek_vtype(emit, n_positional + 2 * n_keyword);
  2409. if (vtype_fun == VTYPE_BUILTIN_CAST) {
  2410. // casting operator
  2411. assert(n_positional == 1 && n_keyword == 0);
  2412. assert(!star_flags);
  2413. DEBUG_printf(" cast to %d\n", vtype_fun);
  2414. vtype_kind_t vtype_cast = peek_stack(emit, 1)->data.u_imm;
  2415. switch (peek_vtype(emit, 0)) {
  2416. case VTYPE_PYOBJ: {
  2417. vtype_kind_t vtype;
  2418. emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
  2419. emit_pre_pop_discard(emit);
  2420. emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, vtype_cast, REG_ARG_2); // arg2 = type
  2421. emit_post_push_reg(emit, vtype_cast, REG_RET);
  2422. break;
  2423. }
  2424. case VTYPE_BOOL:
  2425. case VTYPE_INT:
  2426. case VTYPE_UINT:
  2427. case VTYPE_PTR:
  2428. case VTYPE_PTR8:
  2429. case VTYPE_PTR16:
  2430. case VTYPE_PTR32:
  2431. case VTYPE_PTR_NONE:
  2432. emit_fold_stack_top(emit, REG_ARG_1);
  2433. emit_post_top_set_vtype(emit, vtype_cast);
  2434. break;
  2435. default:
  2436. // this can happen when casting a cast: int(int)
  2437. mp_raise_NotImplementedError(MP_ERROR_TEXT("casting"));
  2438. }
  2439. } else {
  2440. assert(vtype_fun == VTYPE_PYOBJ);
  2441. if (star_flags) {
  2442. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 2); // pointer to args
  2443. emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 0, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
  2444. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2445. } else {
  2446. if (n_positional != 0 || n_keyword != 0) {
  2447. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword); // pointer to args
  2448. }
  2449. emit_pre_pop_reg(emit, &vtype_fun, REG_ARG_1); // the function
  2450. emit_call_with_imm_arg(emit, MP_F_NATIVE_CALL_FUNCTION_N_KW, n_positional | (n_keyword << 8), REG_ARG_2);
  2451. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2452. }
  2453. }
  2454. }
  2455. STATIC void emit_native_call_method(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
  2456. if (star_flags) {
  2457. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 3); // pointer to args
  2458. emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 1, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
  2459. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2460. } else {
  2461. emit_native_pre(emit);
  2462. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2 + n_positional + 2 * n_keyword); // pointer to items, including meth and self
  2463. emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, n_positional, REG_ARG_1, n_keyword, REG_ARG_2);
  2464. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
  2465. }
  2466. }
  2467. STATIC void emit_native_return_value(emit_t *emit) {
  2468. DEBUG_printf("return_value\n");
  2469. if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
  2470. // Save pointer to current stack position for caller to access return value
  2471. emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
  2472. emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0);
  2473. // Put return type in return value slot
  2474. ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_NORMAL);
  2475. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
  2476. // Do the unwinding jump to get to the return handler
  2477. emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size);
  2478. return;
  2479. }
  2480. if (emit->do_viper_types) {
  2481. vtype_kind_t return_vtype = emit->scope->scope_flags >> MP_SCOPE_FLAG_VIPERRET_POS;
  2482. if (peek_vtype(emit, 0) == VTYPE_PTR_NONE) {
  2483. emit_pre_pop_discard(emit);
  2484. if (return_vtype == VTYPE_PYOBJ) {
  2485. emit_native_mov_reg_const(emit, REG_PARENT_RET, MP_F_CONST_NONE_OBJ);
  2486. } else {
  2487. ASM_MOV_REG_IMM(emit->as, REG_ARG_1, 0);
  2488. }
  2489. } else {
  2490. vtype_kind_t vtype;
  2491. emit_pre_pop_reg(emit, &vtype, return_vtype == VTYPE_PYOBJ ? REG_PARENT_RET : REG_ARG_1);
  2492. if (vtype != return_vtype) {
  2493. EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
  2494. MP_ERROR_TEXT("return expected '%q' but got '%q'"),
  2495. vtype_to_qstr(return_vtype), vtype_to_qstr(vtype));
  2496. }
  2497. }
  2498. if (return_vtype != VTYPE_PYOBJ) {
  2499. emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, return_vtype, REG_ARG_2);
  2500. #if REG_RET != REG_PARENT_RET
  2501. ASM_MOV_REG_REG(emit->as, REG_PARENT_RET, REG_RET);
  2502. #endif
  2503. }
  2504. } else {
  2505. vtype_kind_t vtype;
  2506. emit_pre_pop_reg(emit, &vtype, REG_PARENT_RET);
  2507. assert(vtype == VTYPE_PYOBJ);
  2508. }
  2509. if (NEED_GLOBAL_EXC_HANDLER(emit)) {
  2510. // Save return value for the global exception handler to use
  2511. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_PARENT_RET);
  2512. }
  2513. emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size);
  2514. }
  2515. STATIC void emit_native_raise_varargs(emit_t *emit, mp_uint_t n_args) {
  2516. (void)n_args;
  2517. assert(n_args == 1);
  2518. vtype_kind_t vtype_exc;
  2519. emit_pre_pop_reg(emit, &vtype_exc, REG_ARG_1); // arg1 = object to raise
  2520. if (vtype_exc != VTYPE_PYOBJ) {
  2521. EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("must raise an object"));
  2522. }
  2523. // TODO probably make this 1 call to the runtime (which could even call convert, native_raise(obj, type))
  2524. emit_call(emit, MP_F_NATIVE_RAISE);
  2525. mp_asm_base_suppress_code(&emit->as->base);
  2526. }
  2527. STATIC void emit_native_yield(emit_t *emit, int kind) {
  2528. // Note: 1 (yield) or 3 (yield from) labels are reserved for this function, starting at *emit->label_slot
  2529. if (emit->do_viper_types) {
  2530. mp_raise_NotImplementedError(MP_ERROR_TEXT("native yield"));
  2531. }
  2532. emit->scope->scope_flags |= MP_SCOPE_FLAG_GENERATOR;
  2533. need_stack_settled(emit);
  2534. if (kind == MP_EMIT_YIELD_FROM) {
  2535. // Top of yield-from loop, conceptually implementing:
  2536. // for item in generator:
  2537. // yield item
  2538. // Jump to start of loop
  2539. emit_native_jump(emit, *emit->label_slot + 2);
  2540. // Label for top of loop
  2541. emit_native_label_assign(emit, *emit->label_slot + 1);
  2542. }
  2543. // Save pointer to current stack position for caller to access yielded value
  2544. emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
  2545. emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0);
  2546. // Put return type in return value slot
  2547. ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_YIELD);
  2548. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
  2549. // Save re-entry PC
  2550. ASM_MOV_REG_PCREL(emit->as, REG_TEMP0, *emit->label_slot);
  2551. emit_native_mov_state_reg(emit, LOCAL_IDX_GEN_PC(emit), REG_TEMP0);
  2552. // Jump to exit handler
  2553. ASM_JUMP(emit->as, emit->exit_label);
  2554. // Label re-entry point
  2555. mp_asm_base_label_assign(&emit->as->base, *emit->label_slot);
  2556. // Re-open any active exception handler
  2557. if (emit->exc_stack_size > 0) {
  2558. // Find innermost active exception handler, to restore as current handler
  2559. exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
  2560. for (; e >= emit->exc_stack; --e) {
  2561. if (e->is_active) {
  2562. // Found active handler, get its PC
  2563. ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
  2564. ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
  2565. break;
  2566. }
  2567. }
  2568. }
  2569. emit_native_adjust_stack_size(emit, 1); // send_value
  2570. if (kind == MP_EMIT_YIELD_VALUE) {
  2571. // Check LOCAL_IDX_EXC_VAL for any injected value
  2572. ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
  2573. emit_call(emit, MP_F_NATIVE_RAISE);
  2574. } else {
  2575. // Label loop entry
  2576. emit_native_label_assign(emit, *emit->label_slot + 2);
  2577. // Get the next item from the delegate generator
  2578. vtype_kind_t vtype;
  2579. emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // send_value
  2580. emit_access_stack(emit, 1, &vtype, REG_ARG_1); // generator
  2581. ASM_MOV_REG_LOCAL(emit->as, REG_ARG_3, LOCAL_IDX_EXC_VAL(emit)); // throw_value
  2582. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_3);
  2583. emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 1); // ret_value
  2584. emit_call(emit, MP_F_NATIVE_YIELD_FROM);
  2585. // If returned non-zero then generator continues
  2586. ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, *emit->label_slot + 1, true);
  2587. // Pop exhausted gen, replace with ret_value
  2588. emit_native_adjust_stack_size(emit, 1); // ret_value
  2589. emit_fold_stack_top(emit, REG_ARG_1);
  2590. }
  2591. }
  2592. STATIC void emit_native_start_except_handler(emit_t *emit) {
  2593. // Protected block has finished so leave the current exception handler
  2594. emit_native_leave_exc_stack(emit, true);
  2595. // Get and push nlr_buf.ret_val
  2596. ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit));
  2597. emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
  2598. }
  2599. STATIC void emit_native_end_except_handler(emit_t *emit) {
  2600. adjust_stack(emit, -1); // pop the exception (end_finally didn't use it)
  2601. }
  2602. const emit_method_table_t EXPORT_FUN(method_table) = {
  2603. #if MICROPY_DYNAMIC_COMPILER
  2604. EXPORT_FUN(new),
  2605. EXPORT_FUN(free),
  2606. #endif
  2607. emit_native_start_pass,
  2608. emit_native_end_pass,
  2609. emit_native_adjust_stack_size,
  2610. emit_native_set_source_line,
  2611. {
  2612. emit_native_load_local,
  2613. emit_native_load_global,
  2614. },
  2615. {
  2616. emit_native_store_local,
  2617. emit_native_store_global,
  2618. },
  2619. {
  2620. emit_native_delete_local,
  2621. emit_native_delete_global,
  2622. },
  2623. emit_native_label_assign,
  2624. emit_native_import,
  2625. emit_native_load_const_tok,
  2626. emit_native_load_const_small_int,
  2627. emit_native_load_const_str,
  2628. emit_native_load_const_obj,
  2629. emit_native_load_null,
  2630. emit_native_load_method,
  2631. emit_native_load_build_class,
  2632. emit_native_subscr,
  2633. emit_native_attr,
  2634. emit_native_dup_top,
  2635. emit_native_dup_top_two,
  2636. emit_native_pop_top,
  2637. emit_native_rot_two,
  2638. emit_native_rot_three,
  2639. emit_native_jump,
  2640. emit_native_pop_jump_if,
  2641. emit_native_jump_if_or_pop,
  2642. emit_native_unwind_jump,
  2643. emit_native_setup_block,
  2644. emit_native_with_cleanup,
  2645. emit_native_end_finally,
  2646. emit_native_get_iter,
  2647. emit_native_for_iter,
  2648. emit_native_for_iter_end,
  2649. emit_native_pop_except_jump,
  2650. emit_native_unary_op,
  2651. emit_native_binary_op,
  2652. emit_native_build,
  2653. emit_native_store_map,
  2654. emit_native_store_comp,
  2655. emit_native_unpack_sequence,
  2656. emit_native_unpack_ex,
  2657. emit_native_make_function,
  2658. emit_native_make_closure,
  2659. emit_native_call_function,
  2660. emit_native_call_method,
  2661. emit_native_return_value,
  2662. emit_native_raise_varargs,
  2663. emit_native_yield,
  2664. emit_native_start_except_handler,
  2665. emit_native_end_except_handler,
  2666. };
  2667. #endif