aot_llvm.c 120 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626
  1. /*
  2. * Copyright (C) 2019 Intel Corporation. All rights reserved.
  3. * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. */
  5. #include "aot_llvm.h"
  6. #include "aot_llvm_extra2.h"
  7. #include "aot_compiler.h"
  8. #include "aot_emit_exception.h"
  9. #include "aot_emit_table.h"
  10. #include "../aot/aot_runtime.h"
  11. #include "../aot/aot_intrinsic.h"
  12. #if WASM_ENABLE_DEBUG_AOT != 0
  13. #include "debug/dwarf_extractor.h"
  14. #endif
  15. static bool
  16. create_native_symbol(const AOTCompContext *comp_ctx, AOTFuncContext *func_ctx);
  17. static bool
  18. create_native_stack_bound(const AOTCompContext *comp_ctx,
  19. AOTFuncContext *func_ctx);
  20. static bool
  21. create_native_stack_top_min(const AOTCompContext *comp_ctx,
  22. AOTFuncContext *func_ctx);
  23. LLVMTypeRef
  24. wasm_type_to_llvm_type(const AOTLLVMTypes *llvm_types, uint8 wasm_type)
  25. {
  26. switch (wasm_type) {
  27. case VALUE_TYPE_I32:
  28. case VALUE_TYPE_FUNCREF:
  29. case VALUE_TYPE_EXTERNREF:
  30. return llvm_types->int32_type;
  31. case VALUE_TYPE_I64:
  32. return llvm_types->int64_type;
  33. case VALUE_TYPE_F32:
  34. return llvm_types->float32_type;
  35. case VALUE_TYPE_F64:
  36. return llvm_types->float64_type;
  37. case VALUE_TYPE_V128:
  38. return llvm_types->i64x2_vec_type;
  39. case VALUE_TYPE_VOID:
  40. return llvm_types->void_type;
  41. default:
  42. break;
  43. }
  44. return NULL;
  45. }
  46. static LLVMValueRef
  47. aot_add_llvm_func1(const AOTCompContext *comp_ctx, LLVMModuleRef module,
  48. uint32 func_index, uint32 param_count, LLVMTypeRef func_type,
  49. const char *prefix)
  50. {
  51. char func_name[48];
  52. LLVMValueRef func;
  53. LLVMValueRef local_value;
  54. uint32 i, j;
  55. /* Add LLVM function */
  56. snprintf(func_name, sizeof(func_name), "%s%d", prefix, func_index);
  57. if (!(func = LLVMAddFunction(module, func_name, func_type))) {
  58. aot_set_last_error("add LLVM function failed.");
  59. return NULL;
  60. }
  61. j = 0;
  62. local_value = LLVMGetParam(func, j++);
  63. LLVMSetValueName(local_value, "exec_env");
  64. /* Set parameter names */
  65. for (i = 0; i < param_count; i++) {
  66. local_value = LLVMGetParam(func, j++);
  67. LLVMSetValueName(local_value, "");
  68. }
  69. return func;
  70. }
  71. /*
  72. * create a basic func_ctx enough to call aot_emit_exception.
  73. *
  74. * that is:
  75. * - exec_env
  76. * - aot_inst
  77. * - native_symbol (if is_indirect_mode)
  78. */
  79. static bool
  80. create_basic_func_context(const AOTCompContext *comp_ctx,
  81. AOTFuncContext *func_ctx)
  82. {
  83. LLVMValueRef aot_inst_offset = I32_TWO, aot_inst_addr;
  84. /* Save the pameters for fast access */
  85. func_ctx->exec_env = LLVMGetParam(func_ctx->func, 0);
  86. /* Get aot inst address, the layout of exec_env is:
  87. exec_env->next, exec_env->prev, exec_env->module_inst, and argv_buf */
  88. if (!(aot_inst_addr = LLVMBuildInBoundsGEP2(
  89. comp_ctx->builder, OPQ_PTR_TYPE, func_ctx->exec_env,
  90. &aot_inst_offset, 1, "aot_inst_addr"))) {
  91. aot_set_last_error("llvm build in bounds gep failed");
  92. goto fail;
  93. }
  94. /* Load aot inst */
  95. if (!(func_ctx->aot_inst = LLVMBuildLoad2(comp_ctx->builder, OPQ_PTR_TYPE,
  96. aot_inst_addr, "aot_inst"))) {
  97. aot_set_last_error("llvm build load failed");
  98. goto fail;
  99. }
  100. if (comp_ctx->is_indirect_mode
  101. && !create_native_symbol(comp_ctx, func_ctx)) {
  102. goto fail;
  103. }
  104. return true;
  105. fail:
  106. return false;
  107. }
  108. /*
  109. * return if the "precheck" wrapper function can use tail call optimization
  110. */
  111. bool
  112. aot_target_precheck_can_use_musttail(const AOTCompContext *comp_ctx)
  113. {
  114. if (!strcmp(comp_ctx->target_arch, "xtensa")) {
  115. /*
  116. * xtensa windowed ABI doesn't have tail call optimization.
  117. *
  118. * Note: as of writing this, the xtensa version of LLVM
  119. * simply ignores the musttail attribute.
  120. * https://github.com/espressif/llvm-project/pull/73
  121. */
  122. return false;
  123. }
  124. if (!strcmp(comp_ctx->target_arch, "riscv32")
  125. || !strcmp(comp_ctx->target_arch, "riscv64")) {
  126. /*
  127. * REVISIT: actually, riscv can use tail call optimization
  128. * in some cases. I (yamamoto) don't know the exact conditions
  129. * though.
  130. */
  131. return false;
  132. }
  133. if (!strcmp(comp_ctx->target_arch, "mips")) {
  134. /*
  135. * cf.
  136. * https://github.com/bytecodealliance/wasm-micro-runtime/issues/2412
  137. */
  138. return false;
  139. }
  140. if (strstr(comp_ctx->target_arch, "thumb")) {
  141. /*
  142. * cf.
  143. * https://github.com/bytecodealliance/wasm-micro-runtime/issues/2412
  144. */
  145. return false;
  146. }
  147. /*
  148. * x86-64/i386: true
  149. *
  150. * others: assume true for now
  151. */
  152. return true;
  153. }
  154. unsigned int
  155. aot_estimate_stack_usage_for_function_call(const AOTCompContext *comp_ctx,
  156. const AOTFuncType *callee_func_type)
  157. {
  158. /*
  159. * Estimate how much stack is necessary to make a function call.
  160. * This does not include the stack consumption of the callee function.
  161. *
  162. * For precise estimation, ideally this function needs to be
  163. * target-specific.
  164. * However, this implementation aims to be target-independent,
  165. * allowing a small overstimation, which is probably ok for our purpose.
  166. * (overflow detection and memory profiling)
  167. * On the other hand, an underestimation should be avoided as it
  168. * can cause more serious problems like silent data corruptions.
  169. *
  170. * Assumptions:
  171. *
  172. * - the first result is returned via a register.
  173. *
  174. * - all parameters, including exec_env and pointers to non-first
  175. * results, are passed via stack.
  176. * (this is a bit pessimistic than many of real calling conventions,
  177. * where some of parameters are passed via register.)
  178. *
  179. * - N-byte value needs N-byte alignment on stack.
  180. *
  181. * - a value smaller than a pointer is extended.
  182. * (eg. 4 byte values are extended to 8 byte on x86-64.)
  183. */
  184. const unsigned int param_count = callee_func_type->param_count;
  185. const unsigned int result_count = callee_func_type->result_count;
  186. unsigned int size = 0;
  187. unsigned int i;
  188. unsigned int nb;
  189. if (!strcmp(comp_ctx->target_arch, "xtensa")) {
  190. /*
  191. * In the xtensa windowed ABI, outgoing arguments are already
  192. * included in the callee's stack frame size, which equals to
  193. * the operand of the ENTRY instruction and what LLVM
  194. * MFI->getStackSize returns.
  195. */
  196. return 0;
  197. }
  198. /* exec_env */
  199. size = comp_ctx->pointer_size;
  200. /* parameters */
  201. for (i = 0; i < param_count; i++) {
  202. nb = wasm_value_type_cell_num(callee_func_type->types[i]) * 4;
  203. if (nb < comp_ctx->pointer_size) {
  204. nb = comp_ctx->pointer_size;
  205. }
  206. size = align_uint(size, nb) + nb;
  207. }
  208. /* pointers to results */
  209. nb = comp_ctx->pointer_size;
  210. for (i = 1; i < result_count; i++) {
  211. size = align_uint(size, nb) + nb;
  212. }
  213. /* return address */
  214. nb = comp_ctx->pointer_size;
  215. size = align_uint(size, nb) + nb;
  216. /*
  217. * some extra for possible arch-dependent things like
  218. * 16-byte alignment for x86_64.
  219. */
  220. size += 16;
  221. return size;
  222. }
  223. static uint32
  224. get_inst_extra_offset(AOTCompContext *comp_ctx)
  225. {
  226. const AOTCompData *comp_data = comp_ctx->comp_data;
  227. uint32 table_count = comp_data->import_table_count + comp_data->table_count;
  228. uint64 offset = get_tbl_inst_offset(comp_ctx, NULL, table_count);
  229. uint32 offset_32 = (uint32)offset;
  230. bh_assert(offset <= UINT32_MAX);
  231. offset_32 = align_uint((uint32)offset_32, 8);
  232. return offset_32;
  233. }
  234. /*
  235. * a "precheck" function performs a few things before calling wrapped_func.
  236. *
  237. * - update native_stack_top_min if necessary
  238. * - stack overflow check (if it does, trap)
  239. */
  240. static LLVMValueRef
  241. aot_add_precheck_function(AOTCompContext *comp_ctx, LLVMModuleRef module,
  242. uint32 func_index, uint32 orig_param_count,
  243. LLVMTypeRef func_type, LLVMValueRef wrapped_func)
  244. {
  245. LLVMValueRef precheck_func;
  246. LLVMBasicBlockRef begin = NULL;
  247. LLVMBasicBlockRef check_top_block = NULL;
  248. LLVMBasicBlockRef update_top_block = NULL;
  249. LLVMBasicBlockRef stack_bound_check_block = NULL;
  250. LLVMBasicBlockRef call_wrapped_func_block = NULL;
  251. LLVMValueRef *params = NULL;
  252. precheck_func =
  253. aot_add_llvm_func1(comp_ctx, module, func_index, orig_param_count,
  254. func_type, AOT_FUNC_PREFIX);
  255. if (!precheck_func) {
  256. goto fail;
  257. }
  258. begin = LLVMAppendBasicBlockInContext(comp_ctx->context, precheck_func,
  259. "begin");
  260. check_top_block = LLVMAppendBasicBlockInContext(
  261. comp_ctx->context, precheck_func, "check_top_block");
  262. if (comp_ctx->enable_stack_estimation) {
  263. update_top_block = LLVMAppendBasicBlockInContext(
  264. comp_ctx->context, precheck_func, "update_top_block");
  265. if (!update_top_block) {
  266. goto fail;
  267. }
  268. }
  269. stack_bound_check_block = LLVMAppendBasicBlockInContext(
  270. comp_ctx->context, precheck_func, "stack_bound_check_block");
  271. call_wrapped_func_block = LLVMAppendBasicBlockInContext(
  272. comp_ctx->context, precheck_func, "call_wrapped_func");
  273. if (!begin || !check_top_block || !stack_bound_check_block
  274. || !call_wrapped_func_block) {
  275. goto fail;
  276. }
  277. LLVMBuilderRef b = comp_ctx->builder;
  278. LLVMPositionBuilderAtEnd(b, begin);
  279. /* create a temporary minimum func_ctx */
  280. AOTFuncContext tmp;
  281. AOTFuncContext *func_ctx = &tmp;
  282. memset(func_ctx, 0, sizeof(*func_ctx));
  283. func_ctx->func = precheck_func;
  284. func_ctx->module = module;
  285. func_ctx->aot_func = comp_ctx->comp_data->funcs[func_index];
  286. #if WASM_ENABLE_DEBUG_AOT != 0
  287. func_ctx->debug_func = NULL;
  288. #endif
  289. if (!create_basic_func_context(comp_ctx, func_ctx))
  290. goto fail;
  291. if (comp_ctx->enable_stack_bound_check
  292. && !create_native_stack_bound(comp_ctx, func_ctx))
  293. goto fail;
  294. if (comp_ctx->enable_stack_estimation
  295. && !create_native_stack_top_min(comp_ctx, func_ctx)) {
  296. goto fail;
  297. }
  298. uint32 param_count = LLVMCountParams(precheck_func);
  299. uint32 sz = param_count * (uint32)sizeof(LLVMValueRef);
  300. params = wasm_runtime_malloc(sz);
  301. if (params == NULL) {
  302. goto fail;
  303. }
  304. LLVMGetParams(precheck_func, params);
  305. const bool is_64bit = comp_ctx->pointer_size == sizeof(uint64);
  306. LLVMTypeRef uintptr_type;
  307. if (is_64bit)
  308. uintptr_type = I64_TYPE;
  309. else
  310. uintptr_type = I32_TYPE;
  311. /*
  312. * load the stack pointer
  313. */
  314. LLVMValueRef sp_ptr = LLVMBuildAlloca(b, I32_TYPE, "sp_ptr");
  315. if (!sp_ptr) {
  316. goto fail;
  317. }
  318. LLVMValueRef sp = LLVMBuildPtrToInt(b, sp_ptr, uintptr_type, "sp");
  319. if (!sp) {
  320. goto fail;
  321. }
  322. /*
  323. * load the value for this wrapped function from the stack_sizes array
  324. */
  325. LLVMValueRef stack_sizes;
  326. if (comp_ctx->is_indirect_mode) {
  327. uint32 offset_u32;
  328. LLVMValueRef offset;
  329. LLVMValueRef stack_sizes_p;
  330. offset_u32 = get_inst_extra_offset(comp_ctx);
  331. offset_u32 += offsetof(AOTModuleInstanceExtra, stack_sizes);
  332. offset = I32_CONST(offset_u32);
  333. if (!offset) {
  334. goto fail;
  335. }
  336. stack_sizes_p =
  337. LLVMBuildInBoundsGEP2(b, INT8_TYPE, func_ctx->aot_inst, &offset, 1,
  338. "aot_inst_stack_sizes_p");
  339. if (!stack_sizes_p) {
  340. goto fail;
  341. }
  342. stack_sizes =
  343. LLVMBuildLoad2(b, INT32_PTR_TYPE, stack_sizes_p, "stack_sizes");
  344. if (!stack_sizes) {
  345. goto fail;
  346. }
  347. }
  348. else {
  349. stack_sizes = comp_ctx->stack_sizes;
  350. }
  351. LLVMValueRef func_index_const = I32_CONST(func_index);
  352. LLVMValueRef sizes =
  353. LLVMBuildBitCast(b, stack_sizes, INT32_PTR_TYPE, "sizes");
  354. if (!sizes) {
  355. goto fail;
  356. }
  357. LLVMValueRef sizep = LLVMBuildInBoundsGEP2(b, I32_TYPE, sizes,
  358. &func_index_const, 1, "sizep");
  359. if (!sizep) {
  360. goto fail;
  361. }
  362. LLVMValueRef size32 = LLVMBuildLoad2(b, I32_TYPE, sizep, "size32");
  363. if (!size32) {
  364. goto fail;
  365. }
  366. LLVMValueRef size;
  367. if (is_64bit) {
  368. size = LLVMBuildZExt(b, size32, uintptr_type, "size");
  369. if (!size) {
  370. goto fail;
  371. }
  372. }
  373. else {
  374. size = size32;
  375. }
  376. /*
  377. * calculate new sp
  378. */
  379. LLVMValueRef underflow =
  380. LLVMBuildICmp(b, LLVMIntULT, sp, size, "underflow");
  381. if (!underflow) {
  382. goto fail;
  383. }
  384. LLVMValueRef new_sp = LLVMBuildSub(b, sp, size, "new_sp");
  385. if (!new_sp) {
  386. goto fail;
  387. }
  388. if (!LLVMBuildBr(b, check_top_block)) {
  389. goto fail;
  390. }
  391. LLVMPositionBuilderAtEnd(b, check_top_block);
  392. if (comp_ctx->enable_stack_estimation) {
  393. /*
  394. * load native_stack_top_min from the exec_env
  395. */
  396. LLVMValueRef top_min =
  397. LLVMBuildLoad2(b, OPQ_PTR_TYPE, func_ctx->native_stack_top_min_addr,
  398. "native_stack_top_min");
  399. if (!top_min) {
  400. goto fail;
  401. }
  402. LLVMValueRef top_min_int = LLVMBuildPtrToInt(
  403. b, top_min, uintptr_type, "native_stack_top_min_int");
  404. if (!top_min_int) {
  405. goto fail;
  406. }
  407. bh_assert(update_top_block);
  408. /*
  409. * update native_stack_top_min if
  410. * new_sp = sp - size < native_stack_top_min
  411. *
  412. * Note: unless the stack has already overflown in this exec_env,
  413. * native_stack_bound <= native_stack_top_min
  414. */
  415. LLVMValueRef cmp_top =
  416. LLVMBuildICmp(b, LLVMIntULT, new_sp, top_min_int, "cmp_top");
  417. if (!cmp_top) {
  418. goto fail;
  419. }
  420. cmp_top = LLVMBuildOr(b, underflow, cmp_top, "cmp_top2");
  421. if (!cmp_top) {
  422. goto fail;
  423. }
  424. if (!LLVMBuildCondBr(b, cmp_top, update_top_block,
  425. call_wrapped_func_block)) {
  426. aot_set_last_error("llvm build cond br failed.");
  427. goto fail;
  428. }
  429. /*
  430. * update native_stack_top_min
  431. */
  432. LLVMPositionBuilderAtEnd(b, update_top_block);
  433. LLVMValueRef new_sp_ptr =
  434. LLVMBuildIntToPtr(b, new_sp, INT8_PTR_TYPE, "new_sp_ptr");
  435. if (!new_sp_ptr) {
  436. goto fail;
  437. }
  438. if (!LLVMBuildStore(b, new_sp_ptr,
  439. func_ctx->native_stack_top_min_addr)) {
  440. goto fail;
  441. }
  442. if (!LLVMBuildBr(b, stack_bound_check_block)) {
  443. goto fail;
  444. }
  445. }
  446. else {
  447. if (!LLVMBuildBr(b, stack_bound_check_block)) {
  448. goto fail;
  449. }
  450. }
  451. LLVMPositionBuilderAtEnd(b, stack_bound_check_block);
  452. if (comp_ctx->enable_stack_bound_check) {
  453. /*
  454. * trap if new_sp < native_stack_bound
  455. */
  456. LLVMValueRef bound_int = LLVMBuildPtrToInt(
  457. b, func_ctx->native_stack_bound, uintptr_type, "bound_base_int");
  458. if (!bound_int) {
  459. goto fail;
  460. }
  461. LLVMValueRef cmp =
  462. LLVMBuildICmp(b, LLVMIntULT, new_sp, bound_int, "cmp");
  463. if (!cmp) {
  464. goto fail;
  465. }
  466. cmp = LLVMBuildOr(b, underflow, cmp, "cmp2");
  467. if (!cmp) {
  468. goto fail;
  469. }
  470. /* todo: @llvm.expect.i1(i1 %cmp, i1 0) */
  471. if (!aot_emit_exception(comp_ctx, func_ctx, EXCE_NATIVE_STACK_OVERFLOW,
  472. true, cmp, call_wrapped_func_block))
  473. goto fail;
  474. }
  475. else {
  476. if (!LLVMBuildBr(b, call_wrapped_func_block)) {
  477. goto fail;
  478. }
  479. }
  480. /*
  481. * call the wrapped function
  482. * use a tail-call if possible
  483. */
  484. LLVMPositionBuilderAtEnd(b, call_wrapped_func_block);
  485. const char *name = "tail_call";
  486. LLVMTypeRef ret_type = LLVMGetReturnType(func_type);
  487. if (ret_type == VOID_TYPE) {
  488. name = "";
  489. }
  490. LLVMValueRef retval =
  491. LLVMBuildCall2(b, func_type, wrapped_func, params, param_count, name);
  492. if (!retval) {
  493. goto fail;
  494. }
  495. wasm_runtime_free(params);
  496. params = NULL;
  497. #if LLVM_VERSION_MAJOR < 17
  498. if (aot_target_precheck_can_use_musttail(comp_ctx)) {
  499. LLVMSetTailCallKind(retval, LLVMTailCallKindMustTail);
  500. }
  501. else {
  502. LLVMSetTailCallKind(retval, LLVMTailCallKindTail);
  503. }
  504. #else
  505. LLVMSetTailCall(retval, true);
  506. #endif
  507. if (ret_type == VOID_TYPE) {
  508. if (!LLVMBuildRetVoid(b)) {
  509. goto fail;
  510. }
  511. }
  512. else {
  513. if (!LLVMBuildRet(b, retval)) {
  514. goto fail;
  515. }
  516. }
  517. return precheck_func;
  518. fail:
  519. if (params != NULL) {
  520. wasm_runtime_free(params);
  521. }
  522. aot_set_last_error("failed to build precheck wrapper function.");
  523. return NULL;
  524. }
  525. /**
  526. * Add LLVM function
  527. */
  528. static LLVMValueRef
  529. aot_add_llvm_func(AOTCompContext *comp_ctx, LLVMModuleRef module,
  530. const AOTFuncType *aot_func_type, uint32 func_index,
  531. LLVMTypeRef *p_func_type, LLVMValueRef *p_precheck_func)
  532. {
  533. LLVMValueRef func = NULL;
  534. LLVMTypeRef *param_types, ret_type, func_type;
  535. LLVMTypeRef func_type_wrapper;
  536. LLVMValueRef func_wrapper;
  537. LLVMBasicBlockRef func_begin;
  538. char func_name[48];
  539. uint64 size;
  540. uint32 i, j = 0, param_count = (uint64)aot_func_type->param_count;
  541. uint32 backend_thread_num, compile_thread_num;
  542. /* exec env as first parameter */
  543. param_count++;
  544. /* Extra wasm function results(except the first one)'s address are
  545. * appended to aot function parameters. */
  546. if (aot_func_type->result_count > 1)
  547. param_count += aot_func_type->result_count - 1;
  548. /* Initialize parameter types of the LLVM function */
  549. size = sizeof(LLVMTypeRef) * ((uint64)param_count);
  550. if (size >= UINT32_MAX
  551. || !(param_types = wasm_runtime_malloc((uint32)size))) {
  552. aot_set_last_error("allocate memory failed.");
  553. return NULL;
  554. }
  555. /* exec env as first parameter */
  556. param_types[j++] = comp_ctx->exec_env_type;
  557. for (i = 0; i < aot_func_type->param_count; i++)
  558. param_types[j++] = TO_LLVM_TYPE(aot_func_type->types[i]);
  559. /* Extra results' address */
  560. for (i = 1; i < aot_func_type->result_count; i++, j++) {
  561. param_types[j] =
  562. TO_LLVM_TYPE(aot_func_type->types[aot_func_type->param_count + i]);
  563. if (!(param_types[j] = LLVMPointerType(param_types[j], 0))) {
  564. aot_set_last_error("llvm get pointer type failed.");
  565. goto fail;
  566. }
  567. }
  568. /* Resolve return type of the LLVM function */
  569. if (aot_func_type->result_count)
  570. ret_type =
  571. TO_LLVM_TYPE(aot_func_type->types[aot_func_type->param_count]);
  572. else
  573. ret_type = VOID_TYPE;
  574. /* Resolve function prototype */
  575. if (!(func_type =
  576. LLVMFunctionType(ret_type, param_types, param_count, false))) {
  577. aot_set_last_error("create LLVM function type failed.");
  578. goto fail;
  579. }
  580. bh_assert(func_index < comp_ctx->func_ctx_count);
  581. bh_assert(LLVMGetReturnType(func_type) == ret_type);
  582. const char *prefix = AOT_FUNC_PREFIX;
  583. const bool need_precheck =
  584. comp_ctx->enable_stack_bound_check || comp_ctx->enable_stack_estimation;
  585. if (need_precheck) {
  586. /*
  587. * REVISIT: probably this breaks windows hw bound check
  588. * (the RtlAddFunctionTable stuff)
  589. */
  590. prefix = AOT_FUNC_INTERNAL_PREFIX;
  591. }
  592. if (!(func = aot_add_llvm_func1(comp_ctx, module, func_index,
  593. aot_func_type->param_count, func_type,
  594. prefix)))
  595. goto fail;
  596. if (comp_ctx->is_indirect_mode) {
  597. /* avoid LUT relocations ("switch-table") */
  598. LLVMAttributeRef attr_no_jump_tables = LLVMCreateStringAttribute(
  599. comp_ctx->context, "no-jump-tables",
  600. (uint32)strlen("no-jump-tables"), "true", (uint32)strlen("true"));
  601. LLVMAddAttributeAtIndex(func, LLVMAttributeFunctionIndex,
  602. attr_no_jump_tables);
  603. }
  604. /* spread fp.all to every function */
  605. if (comp_ctx->emit_frame_pointer) {
  606. const char *key = "frame-pointer";
  607. const char *val = "all";
  608. LLVMAttributeRef no_omit_fp = LLVMCreateStringAttribute(
  609. comp_ctx->context, key, (unsigned)strlen(key), val,
  610. (unsigned)strlen(val));
  611. if (!no_omit_fp) {
  612. aot_set_last_error("create LLVM attribute (frame-pointer) failed.");
  613. goto fail;
  614. }
  615. LLVMAddAttributeAtIndex(func, LLVMAttributeFunctionIndex, no_omit_fp);
  616. }
  617. if (need_precheck) {
  618. if (!comp_ctx->is_jit_mode)
  619. LLVMSetLinkage(func, LLVMInternalLinkage);
  620. unsigned int kind =
  621. LLVMGetEnumAttributeKindForName("noinline", strlen("noinline"));
  622. LLVMAttributeRef attr_noinline =
  623. LLVMCreateEnumAttribute(comp_ctx->context, kind, 0);
  624. LLVMAddAttributeAtIndex(func, LLVMAttributeFunctionIndex,
  625. attr_noinline);
  626. LLVMValueRef precheck_func = aot_add_precheck_function(
  627. comp_ctx, module, func_index, aot_func_type->param_count, func_type,
  628. func);
  629. if (!precheck_func)
  630. goto fail;
  631. LLVMAddAttributeAtIndex(precheck_func, LLVMAttributeFunctionIndex,
  632. attr_noinline);
  633. *p_precheck_func = precheck_func;
  634. }
  635. else {
  636. *p_precheck_func = func;
  637. }
  638. if (p_func_type)
  639. *p_func_type = func_type;
  640. backend_thread_num = WASM_ORC_JIT_BACKEND_THREAD_NUM;
  641. compile_thread_num = WASM_ORC_JIT_COMPILE_THREAD_NUM;
  642. /* Add the jit wrapper function with simple prototype, so that we
  643. can easily call it to trigger its compilation and let LLVM JIT
  644. compile the actual jit functions by adding them into the function
  645. list in the PartitionFunction callback */
  646. if (comp_ctx->is_jit_mode
  647. && (func_index % (backend_thread_num * compile_thread_num)
  648. < backend_thread_num)) {
  649. func_type_wrapper = LLVMFunctionType(VOID_TYPE, NULL, 0, false);
  650. if (!func_type_wrapper) {
  651. aot_set_last_error("create LLVM function type failed.");
  652. goto fail;
  653. }
  654. snprintf(func_name, sizeof(func_name), "%s%d%s", AOT_FUNC_PREFIX,
  655. func_index, "_wrapper");
  656. if (!(func_wrapper =
  657. LLVMAddFunction(module, func_name, func_type_wrapper))) {
  658. aot_set_last_error("add LLVM function failed.");
  659. goto fail;
  660. }
  661. if (!(func_begin = LLVMAppendBasicBlockInContext(
  662. comp_ctx->context, func_wrapper, "func_begin"))) {
  663. aot_set_last_error("add LLVM basic block failed.");
  664. goto fail;
  665. }
  666. LLVMPositionBuilderAtEnd(comp_ctx->builder, func_begin);
  667. if (!LLVMBuildRetVoid(comp_ctx->builder)) {
  668. aot_set_last_error("llvm build ret failed.");
  669. goto fail;
  670. }
  671. }
  672. fail:
  673. wasm_runtime_free(param_types);
  674. return func;
  675. }
  676. static void
  677. free_block_memory(AOTBlock *block)
  678. {
  679. if (block->param_types)
  680. wasm_runtime_free(block->param_types);
  681. if (block->result_types)
  682. wasm_runtime_free(block->result_types);
  683. wasm_runtime_free(block);
  684. }
  685. /**
  686. * Create first AOTBlock, or function block for the function
  687. */
  688. static AOTBlock *
  689. aot_create_func_block(const AOTCompContext *comp_ctx,
  690. const AOTFuncContext *func_ctx, const AOTFunc *func,
  691. const AOTFuncType *aot_func_type)
  692. {
  693. AOTBlock *aot_block;
  694. uint32 param_count = aot_func_type->param_count,
  695. result_count = aot_func_type->result_count;
  696. /* Allocate memory */
  697. if (!(aot_block = wasm_runtime_malloc(sizeof(AOTBlock)))) {
  698. aot_set_last_error("allocate memory failed.");
  699. return NULL;
  700. }
  701. memset(aot_block, 0, sizeof(AOTBlock));
  702. if (param_count
  703. && !(aot_block->param_types = wasm_runtime_malloc(param_count))) {
  704. aot_set_last_error("allocate memory failed.");
  705. goto fail;
  706. }
  707. if (result_count) {
  708. if (!(aot_block->result_types = wasm_runtime_malloc(result_count))) {
  709. aot_set_last_error("allocate memory failed.");
  710. goto fail;
  711. }
  712. }
  713. /* Set block data */
  714. aot_block->label_type = LABEL_TYPE_FUNCTION;
  715. aot_block->param_count = param_count;
  716. if (param_count) {
  717. bh_memcpy_s(aot_block->param_types, param_count, aot_func_type->types,
  718. param_count);
  719. }
  720. aot_block->result_count = result_count;
  721. if (result_count) {
  722. bh_memcpy_s(aot_block->result_types, result_count,
  723. aot_func_type->types + param_count, result_count);
  724. }
  725. aot_block->wasm_code_end = func->code + func->code_size;
  726. /* Add function entry block */
  727. if (!(aot_block->llvm_entry_block = LLVMAppendBasicBlockInContext(
  728. comp_ctx->context, func_ctx->func, "func_begin"))) {
  729. aot_set_last_error("add LLVM basic block failed.");
  730. goto fail;
  731. }
  732. return aot_block;
  733. fail:
  734. free_block_memory(aot_block);
  735. return NULL;
  736. }
  737. static bool
  738. create_argv_buf(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx)
  739. {
  740. LLVMValueRef argv_buf_offset = I32_THREE, argv_buf_addr;
  741. LLVMTypeRef int32_ptr_type;
  742. /* Get argv buffer address */
  743. if (!(argv_buf_addr = LLVMBuildInBoundsGEP2(
  744. comp_ctx->builder, OPQ_PTR_TYPE, func_ctx->exec_env,
  745. &argv_buf_offset, 1, "argv_buf_addr"))) {
  746. aot_set_last_error("llvm build in bounds gep failed");
  747. return false;
  748. }
  749. if (!(int32_ptr_type = LLVMPointerType(INT32_PTR_TYPE, 0))) {
  750. aot_set_last_error("llvm add pointer type failed");
  751. return false;
  752. }
  753. /* Convert to int32 pointer type */
  754. if (!(argv_buf_addr = LLVMBuildBitCast(comp_ctx->builder, argv_buf_addr,
  755. int32_ptr_type, "argv_buf_ptr"))) {
  756. aot_set_last_error("llvm build load failed");
  757. return false;
  758. }
  759. if (!(func_ctx->argv_buf = LLVMBuildLoad2(comp_ctx->builder, INT32_PTR_TYPE,
  760. argv_buf_addr, "argv_buf"))) {
  761. aot_set_last_error("llvm build load failed");
  762. return false;
  763. }
  764. return true;
  765. }
  766. static bool
  767. create_native_stack_bound(const AOTCompContext *comp_ctx,
  768. AOTFuncContext *func_ctx)
  769. {
  770. LLVMValueRef stack_bound_offset = I32_FOUR, stack_bound_addr;
  771. if (!(stack_bound_addr = LLVMBuildInBoundsGEP2(
  772. comp_ctx->builder, OPQ_PTR_TYPE, func_ctx->exec_env,
  773. &stack_bound_offset, 1, "stack_bound_addr"))) {
  774. aot_set_last_error("llvm build in bounds gep failed");
  775. return false;
  776. }
  777. if (!(func_ctx->native_stack_bound =
  778. LLVMBuildLoad2(comp_ctx->builder, OPQ_PTR_TYPE, stack_bound_addr,
  779. "native_stack_bound"))) {
  780. aot_set_last_error("llvm build load failed");
  781. return false;
  782. }
  783. return true;
  784. }
  785. static bool
  786. create_native_stack_top_min(const AOTCompContext *comp_ctx,
  787. AOTFuncContext *func_ctx)
  788. {
  789. LLVMValueRef offset = I32_NINE;
  790. if (!(func_ctx->native_stack_top_min_addr = LLVMBuildInBoundsGEP2(
  791. comp_ctx->builder, OPQ_PTR_TYPE, func_ctx->exec_env, &offset, 1,
  792. "native_stack_top_min_addr"))) {
  793. aot_set_last_error("llvm build in bounds gep failed");
  794. return false;
  795. }
  796. return true;
  797. }
  798. static bool
  799. create_aux_stack_info(const AOTCompContext *comp_ctx, AOTFuncContext *func_ctx)
  800. {
  801. LLVMValueRef aux_stack_bound_offset = I32_SIX, aux_stack_bound_addr;
  802. LLVMValueRef aux_stack_bottom_offset = I32_SEVEN, aux_stack_bottom_addr;
  803. /* Get aux stack boundary address */
  804. if (!(aux_stack_bound_addr = LLVMBuildInBoundsGEP2(
  805. comp_ctx->builder, OPQ_PTR_TYPE, func_ctx->exec_env,
  806. &aux_stack_bound_offset, 1, "aux_stack_bound_addr"))) {
  807. aot_set_last_error("llvm build in bounds gep failed");
  808. return false;
  809. }
  810. if (!(aux_stack_bound_addr =
  811. LLVMBuildBitCast(comp_ctx->builder, aux_stack_bound_addr,
  812. INT32_PTR_TYPE, "aux_stack_bound_ptr"))) {
  813. aot_set_last_error("llvm build bit cast failed");
  814. return false;
  815. }
  816. if (!(func_ctx->aux_stack_bound =
  817. LLVMBuildLoad2(comp_ctx->builder, I32_TYPE, aux_stack_bound_addr,
  818. "aux_stack_bound"))) {
  819. aot_set_last_error("llvm build load failed");
  820. return false;
  821. }
  822. /* Get aux stack bottom address */
  823. if (!(aux_stack_bottom_addr = LLVMBuildInBoundsGEP2(
  824. comp_ctx->builder, OPQ_PTR_TYPE, func_ctx->exec_env,
  825. &aux_stack_bottom_offset, 1, "aux_stack_bottom_addr"))) {
  826. aot_set_last_error("llvm build in bounds gep failed");
  827. return false;
  828. }
  829. if (!(aux_stack_bottom_addr =
  830. LLVMBuildBitCast(comp_ctx->builder, aux_stack_bottom_addr,
  831. INT32_PTR_TYPE, "aux_stack_bottom_ptr"))) {
  832. aot_set_last_error("llvm build bit cast failed");
  833. return false;
  834. }
  835. if (!(func_ctx->aux_stack_bottom =
  836. LLVMBuildLoad2(comp_ctx->builder, I32_TYPE, aux_stack_bottom_addr,
  837. "aux_stack_bottom"))) {
  838. aot_set_last_error("llvm build load failed");
  839. return false;
  840. }
  841. return true;
  842. }
  843. static bool
  844. create_native_symbol(const AOTCompContext *comp_ctx, AOTFuncContext *func_ctx)
  845. {
  846. LLVMValueRef native_symbol_offset = I32_EIGHT, native_symbol_addr;
  847. if (!(native_symbol_addr = LLVMBuildInBoundsGEP2(
  848. comp_ctx->builder, OPQ_PTR_TYPE, func_ctx->exec_env,
  849. &native_symbol_offset, 1, "native_symbol_addr"))) {
  850. aot_set_last_error("llvm build in bounds gep failed");
  851. return false;
  852. }
  853. if (!(func_ctx->native_symbol =
  854. LLVMBuildLoad2(comp_ctx->builder, OPQ_PTR_TYPE,
  855. native_symbol_addr, "native_symbol_tmp"))) {
  856. aot_set_last_error("llvm build bit cast failed");
  857. return false;
  858. }
  859. if (!(func_ctx->native_symbol =
  860. LLVMBuildBitCast(comp_ctx->builder, func_ctx->native_symbol,
  861. comp_ctx->exec_env_type, "native_symbol"))) {
  862. aot_set_last_error("llvm build bit cast failed");
  863. return false;
  864. }
  865. return true;
  866. }
  867. static bool
  868. create_local_variables(const AOTCompData *comp_data,
  869. const AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
  870. const AOTFunc *func)
  871. {
  872. AOTFuncType *aot_func_type = comp_data->func_types[func->func_type_index];
  873. char local_name[32];
  874. uint32 i, j = 1;
  875. for (i = 0; i < aot_func_type->param_count; i++, j++) {
  876. snprintf(local_name, sizeof(local_name), "l%d", i);
  877. func_ctx->locals[i] =
  878. LLVMBuildAlloca(comp_ctx->builder,
  879. TO_LLVM_TYPE(aot_func_type->types[i]), local_name);
  880. if (!func_ctx->locals[i]) {
  881. aot_set_last_error("llvm build alloca failed.");
  882. return false;
  883. }
  884. if (!LLVMBuildStore(comp_ctx->builder, LLVMGetParam(func_ctx->func, j),
  885. func_ctx->locals[i])) {
  886. aot_set_last_error("llvm build store failed.");
  887. return false;
  888. }
  889. }
  890. for (i = 0; i < func->local_count; i++) {
  891. LLVMTypeRef local_type;
  892. LLVMValueRef local_value = NULL;
  893. snprintf(local_name, sizeof(local_name), "l%d",
  894. aot_func_type->param_count + i);
  895. local_type = TO_LLVM_TYPE(func->local_types[i]);
  896. func_ctx->locals[aot_func_type->param_count + i] =
  897. LLVMBuildAlloca(comp_ctx->builder, local_type, local_name);
  898. if (!func_ctx->locals[aot_func_type->param_count + i]) {
  899. aot_set_last_error("llvm build alloca failed.");
  900. return false;
  901. }
  902. switch (func->local_types[i]) {
  903. case VALUE_TYPE_I32:
  904. local_value = I32_ZERO;
  905. break;
  906. case VALUE_TYPE_I64:
  907. local_value = I64_ZERO;
  908. break;
  909. case VALUE_TYPE_F32:
  910. local_value = F32_ZERO;
  911. break;
  912. case VALUE_TYPE_F64:
  913. local_value = F64_ZERO;
  914. break;
  915. case VALUE_TYPE_V128:
  916. local_value = V128_i64x2_ZERO;
  917. break;
  918. case VALUE_TYPE_FUNCREF:
  919. case VALUE_TYPE_EXTERNREF:
  920. local_value = REF_NULL;
  921. break;
  922. default:
  923. bh_assert(0);
  924. break;
  925. }
  926. if (!LLVMBuildStore(comp_ctx->builder, local_value,
  927. func_ctx->locals[aot_func_type->param_count + i])) {
  928. aot_set_last_error("llvm build store failed.");
  929. return false;
  930. }
  931. }
  932. return true;
  933. }
  934. static bool
  935. create_memory_info(const AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
  936. LLVMTypeRef int8_ptr_type, uint32 func_index)
  937. {
  938. LLVMValueRef offset, mem_info_base;
  939. uint32 memory_count;
  940. WASMModule *module = comp_ctx->comp_data->wasm_module;
  941. WASMFunction *func = module->functions[func_index];
  942. LLVMTypeRef bound_check_type;
  943. bool mem_space_unchanged =
  944. (!func->has_op_memory_grow && !func->has_op_func_call)
  945. || (!module->possible_memory_grow);
  946. #if WASM_ENABLE_SHARED_MEMORY != 0
  947. bool is_shared_memory;
  948. #endif
  949. func_ctx->mem_space_unchanged = mem_space_unchanged;
  950. memory_count = module->memory_count + module->import_memory_count;
  951. /* If the module dosen't have memory, reserve
  952. one mem_info space with empty content */
  953. if (memory_count == 0)
  954. memory_count = 1;
  955. if (!(func_ctx->mem_info =
  956. wasm_runtime_malloc(sizeof(AOTMemInfo) * memory_count))) {
  957. return false;
  958. }
  959. memset(func_ctx->mem_info, 0, sizeof(AOTMemInfo));
  960. /* Currently we only create memory info for memory 0 */
  961. /* Load memory base address */
  962. #if WASM_ENABLE_SHARED_MEMORY != 0
  963. is_shared_memory =
  964. comp_ctx->comp_data->memories[0].memory_flags & 0x02 ? true : false;
  965. if (is_shared_memory) {
  966. LLVMValueRef shared_mem_addr;
  967. offset = I32_CONST(offsetof(AOTModuleInstance, memories));
  968. if (!offset) {
  969. aot_set_last_error("create llvm const failed.");
  970. return false;
  971. }
  972. /* aot_inst->memories */
  973. if (!(shared_mem_addr = LLVMBuildInBoundsGEP2(
  974. comp_ctx->builder, INT8_TYPE, func_ctx->aot_inst, &offset, 1,
  975. "shared_mem_addr_offset"))) {
  976. aot_set_last_error("llvm build in bounds gep failed");
  977. return false;
  978. }
  979. if (!(shared_mem_addr =
  980. LLVMBuildBitCast(comp_ctx->builder, shared_mem_addr,
  981. int8_ptr_type, "shared_mem_addr_ptr"))) {
  982. aot_set_last_error("llvm build bit cast failed");
  983. return false;
  984. }
  985. /* aot_inst->memories[0] */
  986. if (!(shared_mem_addr =
  987. LLVMBuildLoad2(comp_ctx->builder, OPQ_PTR_TYPE,
  988. shared_mem_addr, "shared_mem_addr"))) {
  989. aot_set_last_error("llvm build load failed");
  990. return false;
  991. }
  992. if (!(shared_mem_addr =
  993. LLVMBuildBitCast(comp_ctx->builder, shared_mem_addr,
  994. int8_ptr_type, "shared_mem_addr_ptr"))) {
  995. aot_set_last_error("llvm build bit cast failed");
  996. return false;
  997. }
  998. if (!(shared_mem_addr =
  999. LLVMBuildLoad2(comp_ctx->builder, OPQ_PTR_TYPE,
  1000. shared_mem_addr, "shared_mem_addr"))) {
  1001. aot_set_last_error("llvm build load failed");
  1002. return false;
  1003. }
  1004. /* memories[0]->memory_data */
  1005. offset = I32_CONST(offsetof(AOTMemoryInstance, memory_data));
  1006. if (!(func_ctx->mem_info[0].mem_base_addr = LLVMBuildInBoundsGEP2(
  1007. comp_ctx->builder, INT8_TYPE, shared_mem_addr, &offset, 1,
  1008. "mem_base_addr_offset"))) {
  1009. aot_set_last_error("llvm build in bounds gep failed");
  1010. return false;
  1011. }
  1012. /* memories[0]->cur_page_count */
  1013. offset = I32_CONST(offsetof(AOTMemoryInstance, cur_page_count));
  1014. if (!(func_ctx->mem_info[0].mem_cur_page_count_addr =
  1015. LLVMBuildInBoundsGEP2(comp_ctx->builder, INT8_TYPE,
  1016. shared_mem_addr, &offset, 1,
  1017. "mem_cur_page_offset"))) {
  1018. aot_set_last_error("llvm build in bounds gep failed");
  1019. return false;
  1020. }
  1021. /* memories[0]->memory_data_size */
  1022. offset = I32_CONST(offsetof(AOTMemoryInstance, memory_data_size));
  1023. if (!(func_ctx->mem_info[0].mem_data_size_addr = LLVMBuildInBoundsGEP2(
  1024. comp_ctx->builder, INT8_TYPE, shared_mem_addr, &offset, 1,
  1025. "mem_data_size_offset"))) {
  1026. aot_set_last_error("llvm build in bounds gep failed");
  1027. return false;
  1028. }
  1029. }
  1030. else
  1031. #endif
  1032. {
  1033. uint32 offset_of_global_table_data;
  1034. if (comp_ctx->is_jit_mode)
  1035. offset_of_global_table_data =
  1036. offsetof(WASMModuleInstance, global_table_data);
  1037. else
  1038. offset_of_global_table_data =
  1039. offsetof(AOTModuleInstance, global_table_data);
  1040. offset = I32_CONST(offset_of_global_table_data
  1041. + offsetof(AOTMemoryInstance, memory_data));
  1042. if (!(func_ctx->mem_info[0].mem_base_addr = LLVMBuildInBoundsGEP2(
  1043. comp_ctx->builder, INT8_TYPE, func_ctx->aot_inst, &offset, 1,
  1044. "mem_base_addr_offset"))) {
  1045. aot_set_last_error("llvm build in bounds gep failed");
  1046. return false;
  1047. }
  1048. offset = I32_CONST(offset_of_global_table_data
  1049. + offsetof(AOTMemoryInstance, cur_page_count));
  1050. if (!(func_ctx->mem_info[0].mem_cur_page_count_addr =
  1051. LLVMBuildInBoundsGEP2(comp_ctx->builder, INT8_TYPE,
  1052. func_ctx->aot_inst, &offset, 1,
  1053. "mem_cur_page_offset"))) {
  1054. aot_set_last_error("llvm build in bounds gep failed");
  1055. return false;
  1056. }
  1057. offset = I32_CONST(offset_of_global_table_data
  1058. + offsetof(AOTMemoryInstance, memory_data_size));
  1059. if (!(func_ctx->mem_info[0].mem_data_size_addr = LLVMBuildInBoundsGEP2(
  1060. comp_ctx->builder, INT8_TYPE, func_ctx->aot_inst, &offset, 1,
  1061. "mem_data_size_offset"))) {
  1062. aot_set_last_error("llvm build in bounds gep failed");
  1063. return false;
  1064. }
  1065. }
  1066. /* Store mem info base address before cast */
  1067. mem_info_base = func_ctx->mem_info[0].mem_base_addr;
  1068. if (!(func_ctx->mem_info[0].mem_base_addr = LLVMBuildBitCast(
  1069. comp_ctx->builder, func_ctx->mem_info[0].mem_base_addr,
  1070. int8_ptr_type, "mem_base_addr_ptr"))) {
  1071. aot_set_last_error("llvm build bit cast failed");
  1072. return false;
  1073. }
  1074. if (!(func_ctx->mem_info[0].mem_cur_page_count_addr = LLVMBuildBitCast(
  1075. comp_ctx->builder, func_ctx->mem_info[0].mem_cur_page_count_addr,
  1076. INT32_PTR_TYPE, "mem_cur_page_ptr"))) {
  1077. aot_set_last_error("llvm build bit cast failed");
  1078. return false;
  1079. }
  1080. if (!(func_ctx->mem_info[0].mem_data_size_addr = LLVMBuildBitCast(
  1081. comp_ctx->builder, func_ctx->mem_info[0].mem_data_size_addr,
  1082. INT32_PTR_TYPE, "mem_data_size_ptr"))) {
  1083. aot_set_last_error("llvm build bit cast failed");
  1084. return false;
  1085. }
  1086. if (mem_space_unchanged) {
  1087. if (!(func_ctx->mem_info[0].mem_base_addr = LLVMBuildLoad2(
  1088. comp_ctx->builder, OPQ_PTR_TYPE,
  1089. func_ctx->mem_info[0].mem_base_addr, "mem_base_addr"))) {
  1090. aot_set_last_error("llvm build load failed");
  1091. return false;
  1092. }
  1093. if (!(func_ctx->mem_info[0].mem_cur_page_count_addr =
  1094. LLVMBuildLoad2(comp_ctx->builder, I32_TYPE,
  1095. func_ctx->mem_info[0].mem_cur_page_count_addr,
  1096. "mem_cur_page_count"))) {
  1097. aot_set_last_error("llvm build load failed");
  1098. return false;
  1099. }
  1100. if (!(func_ctx->mem_info[0].mem_data_size_addr = LLVMBuildLoad2(
  1101. comp_ctx->builder, I32_TYPE,
  1102. func_ctx->mem_info[0].mem_data_size_addr, "mem_data_size"))) {
  1103. aot_set_last_error("llvm build load failed");
  1104. return false;
  1105. }
  1106. }
  1107. #if WASM_ENABLE_SHARED_MEMORY != 0
  1108. else if (is_shared_memory) {
  1109. /* The base address for shared memory will never changed,
  1110. we can load the value here */
  1111. if (!(func_ctx->mem_info[0].mem_base_addr = LLVMBuildLoad2(
  1112. comp_ctx->builder, OPQ_PTR_TYPE,
  1113. func_ctx->mem_info[0].mem_base_addr, "mem_base_addr"))) {
  1114. aot_set_last_error("llvm build load failed");
  1115. return false;
  1116. }
  1117. }
  1118. #endif
  1119. bound_check_type = (comp_ctx->pointer_size == sizeof(uint64))
  1120. ? INT64_PTR_TYPE
  1121. : INT32_PTR_TYPE;
  1122. /* Load memory bound check constants */
  1123. offset = I32_CONST(offsetof(AOTMemoryInstance, mem_bound_check_1byte)
  1124. - offsetof(AOTMemoryInstance, memory_data));
  1125. if (!(func_ctx->mem_info[0].mem_bound_check_1byte =
  1126. LLVMBuildInBoundsGEP2(comp_ctx->builder, INT8_TYPE, mem_info_base,
  1127. &offset, 1, "bound_check_1byte_offset"))) {
  1128. aot_set_last_error("llvm build in bounds gep failed");
  1129. return false;
  1130. }
  1131. if (!(func_ctx->mem_info[0].mem_bound_check_1byte = LLVMBuildBitCast(
  1132. comp_ctx->builder, func_ctx->mem_info[0].mem_bound_check_1byte,
  1133. bound_check_type, "bound_check_1byte_ptr"))) {
  1134. aot_set_last_error("llvm build bit cast failed");
  1135. return false;
  1136. }
  1137. if (mem_space_unchanged) {
  1138. if (!(func_ctx->mem_info[0].mem_bound_check_1byte = LLVMBuildLoad2(
  1139. comp_ctx->builder,
  1140. (comp_ctx->pointer_size == sizeof(uint64)) ? I64_TYPE
  1141. : I32_TYPE,
  1142. func_ctx->mem_info[0].mem_bound_check_1byte,
  1143. "bound_check_1byte"))) {
  1144. aot_set_last_error("llvm build load failed");
  1145. return false;
  1146. }
  1147. }
  1148. offset = I32_CONST(offsetof(AOTMemoryInstance, mem_bound_check_2bytes)
  1149. - offsetof(AOTMemoryInstance, memory_data));
  1150. if (!(func_ctx->mem_info[0].mem_bound_check_2bytes =
  1151. LLVMBuildInBoundsGEP2(comp_ctx->builder, INT8_TYPE, mem_info_base,
  1152. &offset, 1, "bound_check_2bytes_offset"))) {
  1153. aot_set_last_error("llvm build in bounds gep failed");
  1154. return false;
  1155. }
  1156. if (!(func_ctx->mem_info[0].mem_bound_check_2bytes = LLVMBuildBitCast(
  1157. comp_ctx->builder, func_ctx->mem_info[0].mem_bound_check_2bytes,
  1158. bound_check_type, "bound_check_2bytes_ptr"))) {
  1159. aot_set_last_error("llvm build bit cast failed");
  1160. return false;
  1161. }
  1162. if (mem_space_unchanged) {
  1163. if (!(func_ctx->mem_info[0].mem_bound_check_2bytes = LLVMBuildLoad2(
  1164. comp_ctx->builder,
  1165. (comp_ctx->pointer_size == sizeof(uint64)) ? I64_TYPE
  1166. : I32_TYPE,
  1167. func_ctx->mem_info[0].mem_bound_check_2bytes,
  1168. "bound_check_2bytes"))) {
  1169. aot_set_last_error("llvm build load failed");
  1170. return false;
  1171. }
  1172. }
  1173. offset = I32_CONST(offsetof(AOTMemoryInstance, mem_bound_check_4bytes)
  1174. - offsetof(AOTMemoryInstance, memory_data));
  1175. if (!(func_ctx->mem_info[0].mem_bound_check_4bytes =
  1176. LLVMBuildInBoundsGEP2(comp_ctx->builder, INT8_TYPE, mem_info_base,
  1177. &offset, 1, "bound_check_4bytes_offset"))) {
  1178. aot_set_last_error("llvm build in bounds gep failed");
  1179. return false;
  1180. }
  1181. if (!(func_ctx->mem_info[0].mem_bound_check_4bytes = LLVMBuildBitCast(
  1182. comp_ctx->builder, func_ctx->mem_info[0].mem_bound_check_4bytes,
  1183. bound_check_type, "bound_check_4bytes_ptr"))) {
  1184. aot_set_last_error("llvm build bit cast failed");
  1185. return false;
  1186. }
  1187. if (mem_space_unchanged) {
  1188. if (!(func_ctx->mem_info[0].mem_bound_check_4bytes = LLVMBuildLoad2(
  1189. comp_ctx->builder,
  1190. (comp_ctx->pointer_size == sizeof(uint64)) ? I64_TYPE
  1191. : I32_TYPE,
  1192. func_ctx->mem_info[0].mem_bound_check_4bytes,
  1193. "bound_check_4bytes"))) {
  1194. aot_set_last_error("llvm build load failed");
  1195. return false;
  1196. }
  1197. }
  1198. offset = I32_CONST(offsetof(AOTMemoryInstance, mem_bound_check_8bytes)
  1199. - offsetof(AOTMemoryInstance, memory_data));
  1200. if (!(func_ctx->mem_info[0].mem_bound_check_8bytes =
  1201. LLVMBuildInBoundsGEP2(comp_ctx->builder, INT8_TYPE, mem_info_base,
  1202. &offset, 1, "bound_check_8bytes_offset"))) {
  1203. aot_set_last_error("llvm build in bounds gep failed");
  1204. return false;
  1205. }
  1206. if (!(func_ctx->mem_info[0].mem_bound_check_8bytes = LLVMBuildBitCast(
  1207. comp_ctx->builder, func_ctx->mem_info[0].mem_bound_check_8bytes,
  1208. bound_check_type, "bound_check_8bytes_ptr"))) {
  1209. aot_set_last_error("llvm build bit cast failed");
  1210. return false;
  1211. }
  1212. if (mem_space_unchanged) {
  1213. if (!(func_ctx->mem_info[0].mem_bound_check_8bytes = LLVMBuildLoad2(
  1214. comp_ctx->builder,
  1215. (comp_ctx->pointer_size == sizeof(uint64)) ? I64_TYPE
  1216. : I32_TYPE,
  1217. func_ctx->mem_info[0].mem_bound_check_8bytes,
  1218. "bound_check_8bytes"))) {
  1219. aot_set_last_error("llvm build load failed");
  1220. return false;
  1221. }
  1222. }
  1223. offset = I32_CONST(offsetof(AOTMemoryInstance, mem_bound_check_16bytes)
  1224. - offsetof(AOTMemoryInstance, memory_data));
  1225. if (!(func_ctx->mem_info[0].mem_bound_check_16bytes = LLVMBuildInBoundsGEP2(
  1226. comp_ctx->builder, INT8_TYPE, mem_info_base, &offset, 1,
  1227. "bound_check_16bytes_offset"))) {
  1228. aot_set_last_error("llvm build in bounds gep failed");
  1229. return false;
  1230. }
  1231. if (!(func_ctx->mem_info[0].mem_bound_check_16bytes = LLVMBuildBitCast(
  1232. comp_ctx->builder, func_ctx->mem_info[0].mem_bound_check_16bytes,
  1233. bound_check_type, "bound_check_16bytes_ptr"))) {
  1234. aot_set_last_error("llvm build bit cast failed");
  1235. return false;
  1236. }
  1237. if (mem_space_unchanged) {
  1238. if (!(func_ctx->mem_info[0].mem_bound_check_16bytes = LLVMBuildLoad2(
  1239. comp_ctx->builder,
  1240. (comp_ctx->pointer_size == sizeof(uint64)) ? I64_TYPE
  1241. : I32_TYPE,
  1242. func_ctx->mem_info[0].mem_bound_check_16bytes,
  1243. "bound_check_16bytes"))) {
  1244. aot_set_last_error("llvm build load failed");
  1245. return false;
  1246. }
  1247. }
  1248. return true;
  1249. }
  1250. static bool
  1251. create_cur_exception(const AOTCompContext *comp_ctx, AOTFuncContext *func_ctx)
  1252. {
  1253. LLVMValueRef offset;
  1254. offset = I32_CONST(offsetof(AOTModuleInstance, cur_exception));
  1255. func_ctx->cur_exception =
  1256. LLVMBuildInBoundsGEP2(comp_ctx->builder, INT8_TYPE, func_ctx->aot_inst,
  1257. &offset, 1, "cur_exception");
  1258. if (!func_ctx->cur_exception) {
  1259. aot_set_last_error("llvm build in bounds gep failed.");
  1260. return false;
  1261. }
  1262. return true;
  1263. }
  1264. static bool
  1265. create_func_type_indexes(const AOTCompContext *comp_ctx,
  1266. AOTFuncContext *func_ctx)
  1267. {
  1268. LLVMValueRef offset, func_type_indexes_ptr;
  1269. LLVMTypeRef int32_ptr_type;
  1270. offset = I32_CONST(offsetof(AOTModuleInstance, func_type_indexes));
  1271. func_type_indexes_ptr =
  1272. LLVMBuildInBoundsGEP2(comp_ctx->builder, INT8_TYPE, func_ctx->aot_inst,
  1273. &offset, 1, "func_type_indexes_ptr");
  1274. if (!func_type_indexes_ptr) {
  1275. aot_set_last_error("llvm build add failed.");
  1276. return false;
  1277. }
  1278. if (!(int32_ptr_type = LLVMPointerType(INT32_PTR_TYPE, 0))) {
  1279. aot_set_last_error("llvm get pointer type failed.");
  1280. return false;
  1281. }
  1282. func_ctx->func_type_indexes =
  1283. LLVMBuildBitCast(comp_ctx->builder, func_type_indexes_ptr,
  1284. int32_ptr_type, "func_type_indexes_tmp");
  1285. if (!func_ctx->func_type_indexes) {
  1286. aot_set_last_error("llvm build bit cast failed.");
  1287. return false;
  1288. }
  1289. func_ctx->func_type_indexes =
  1290. LLVMBuildLoad2(comp_ctx->builder, INT32_PTR_TYPE,
  1291. func_ctx->func_type_indexes, "func_type_indexes");
  1292. if (!func_ctx->func_type_indexes) {
  1293. aot_set_last_error("llvm build load failed.");
  1294. return false;
  1295. }
  1296. return true;
  1297. }
  1298. static bool
  1299. create_func_ptrs(const AOTCompContext *comp_ctx, AOTFuncContext *func_ctx)
  1300. {
  1301. LLVMValueRef offset;
  1302. offset = I32_CONST(offsetof(AOTModuleInstance, func_ptrs));
  1303. func_ctx->func_ptrs =
  1304. LLVMBuildInBoundsGEP2(comp_ctx->builder, INT8_TYPE, func_ctx->aot_inst,
  1305. &offset, 1, "func_ptrs_offset");
  1306. if (!func_ctx->func_ptrs) {
  1307. aot_set_last_error("llvm build in bounds gep failed.");
  1308. return false;
  1309. }
  1310. func_ctx->func_ptrs =
  1311. LLVMBuildBitCast(comp_ctx->builder, func_ctx->func_ptrs,
  1312. comp_ctx->exec_env_type, "func_ptrs_tmp");
  1313. if (!func_ctx->func_ptrs) {
  1314. aot_set_last_error("llvm build bit cast failed.");
  1315. return false;
  1316. }
  1317. func_ctx->func_ptrs = LLVMBuildLoad2(comp_ctx->builder, OPQ_PTR_TYPE,
  1318. func_ctx->func_ptrs, "func_ptrs_ptr");
  1319. if (!func_ctx->func_ptrs) {
  1320. aot_set_last_error("llvm build load failed.");
  1321. return false;
  1322. }
  1323. func_ctx->func_ptrs =
  1324. LLVMBuildBitCast(comp_ctx->builder, func_ctx->func_ptrs,
  1325. comp_ctx->exec_env_type, "func_ptrs");
  1326. if (!func_ctx->func_ptrs) {
  1327. aot_set_last_error("llvm build bit cast failed.");
  1328. return false;
  1329. }
  1330. return true;
  1331. }
  1332. const char *aot_stack_sizes_name = AOT_STACK_SIZES_NAME;
  1333. const char *aot_stack_sizes_alias_name = AOT_STACK_SIZES_ALIAS_NAME;
  1334. const char *aot_stack_sizes_section_name = AOT_STACK_SIZES_SECTION_NAME;
  1335. static bool
  1336. aot_create_stack_sizes(const AOTCompData *comp_data, AOTCompContext *comp_ctx)
  1337. {
  1338. LLVMValueRef stack_sizes, *values, array, alias;
  1339. LLVMTypeRef stack_sizes_type;
  1340. #if LLVM_VERSION_MAJOR <= 13
  1341. LLVMTypeRef alias_type;
  1342. #endif
  1343. uint64 size;
  1344. uint32 i;
  1345. stack_sizes_type = LLVMArrayType(I32_TYPE, comp_data->func_count);
  1346. if (!stack_sizes_type) {
  1347. aot_set_last_error("failed to create stack_sizes type.");
  1348. return false;
  1349. }
  1350. stack_sizes =
  1351. LLVMAddGlobal(comp_ctx->module, stack_sizes_type, aot_stack_sizes_name);
  1352. if (!stack_sizes) {
  1353. aot_set_last_error("failed to create stack_sizes global.");
  1354. return false;
  1355. }
  1356. size = sizeof(LLVMValueRef) * comp_data->func_count;
  1357. if (size >= UINT32_MAX || !(values = wasm_runtime_malloc((uint32)size))) {
  1358. aot_set_last_error("allocate memory failed.");
  1359. return false;
  1360. }
  1361. for (i = 0; i < comp_data->func_count; i++) {
  1362. /*
  1363. * This value is a placeholder, which will be replaced
  1364. * after the corresponding functions are compiled.
  1365. *
  1366. * Don't use zeros becasue LLVM can optimize them to
  1367. * zeroinitializer.
  1368. */
  1369. values[i] = I32_NEG_ONE;
  1370. }
  1371. array = LLVMConstArray(I32_TYPE, values, comp_data->func_count);
  1372. wasm_runtime_free(values);
  1373. if (!array) {
  1374. aot_set_last_error("failed to create stack_sizes initializer.");
  1375. return false;
  1376. }
  1377. LLVMSetInitializer(stack_sizes, array);
  1378. /*
  1379. * create an alias so that aot_resolve_stack_sizes can find it.
  1380. */
  1381. #if LLVM_VERSION_MAJOR > 13
  1382. alias = LLVMAddAlias2(comp_ctx->module, stack_sizes_type, 0, stack_sizes,
  1383. aot_stack_sizes_alias_name);
  1384. #else
  1385. alias_type = LLVMPointerType(stack_sizes_type, 0);
  1386. if (!alias_type) {
  1387. aot_set_last_error("failed to create alias type.");
  1388. return false;
  1389. }
  1390. alias = LLVMAddAlias(comp_ctx->module, alias_type, stack_sizes,
  1391. aot_stack_sizes_alias_name);
  1392. #endif
  1393. if (!alias) {
  1394. aot_set_last_error("failed to create stack_sizes alias.");
  1395. return false;
  1396. }
  1397. /*
  1398. * make the original symbol internal. we mainly use this version to
  1399. * avoid creating extra relocations in the precheck functions.
  1400. */
  1401. LLVMSetLinkage(stack_sizes, LLVMInternalLinkage);
  1402. LLVMSetSection(stack_sizes, aot_stack_sizes_section_name);
  1403. comp_ctx->stack_sizes_type = stack_sizes_type;
  1404. comp_ctx->stack_sizes = stack_sizes;
  1405. return true;
  1406. }
  1407. /**
  1408. * Create function compiler context
  1409. */
  1410. static AOTFuncContext *
  1411. aot_create_func_context(const AOTCompData *comp_data, AOTCompContext *comp_ctx,
  1412. AOTFunc *func, uint32 func_index)
  1413. {
  1414. AOTFuncContext *func_ctx;
  1415. AOTFuncType *aot_func_type = comp_data->func_types[func->func_type_index];
  1416. WASMModule *module = comp_ctx->comp_data->wasm_module;
  1417. WASMFunction *wasm_func = module->functions[func_index];
  1418. AOTBlock *aot_block;
  1419. LLVMTypeRef int8_ptr_type;
  1420. uint64 size;
  1421. /* Allocate memory for the function context */
  1422. size = offsetof(AOTFuncContext, locals)
  1423. + sizeof(LLVMValueRef)
  1424. * ((uint64)aot_func_type->param_count + func->local_count);
  1425. if (size >= UINT32_MAX || !(func_ctx = wasm_runtime_malloc((uint32)size))) {
  1426. aot_set_last_error("allocate memory failed.");
  1427. return NULL;
  1428. }
  1429. memset(func_ctx, 0, (uint32)size);
  1430. func_ctx->aot_func = func;
  1431. func_ctx->module = comp_ctx->module;
  1432. /* Add LLVM function */
  1433. if (!(func_ctx->func = aot_add_llvm_func(
  1434. comp_ctx, func_ctx->module, aot_func_type, func_index,
  1435. &func_ctx->func_type, &func_ctx->precheck_func))) {
  1436. goto fail;
  1437. }
  1438. /* Create function's first AOTBlock */
  1439. if (!(aot_block =
  1440. aot_create_func_block(comp_ctx, func_ctx, func, aot_func_type))) {
  1441. goto fail;
  1442. }
  1443. #if WASM_ENABLE_DEBUG_AOT != 0
  1444. func_ctx->debug_func = dwarf_gen_func_info(comp_ctx, func_ctx);
  1445. #endif
  1446. aot_block_stack_push(&func_ctx->block_stack, aot_block);
  1447. /* Add local variables */
  1448. LLVMPositionBuilderAtEnd(comp_ctx->builder, aot_block->llvm_entry_block);
  1449. if (!create_basic_func_context(comp_ctx, func_ctx)) {
  1450. goto fail;
  1451. }
  1452. /* Get argv buffer address */
  1453. if (wasm_func->has_op_func_call && !create_argv_buf(comp_ctx, func_ctx)) {
  1454. goto fail;
  1455. }
  1456. /* Get auxiliary stack info */
  1457. if (wasm_func->has_op_set_global_aux_stack
  1458. && !create_aux_stack_info(comp_ctx, func_ctx)) {
  1459. goto fail;
  1460. }
  1461. /* Create local variables */
  1462. if (!create_local_variables(comp_data, comp_ctx, func_ctx, func)) {
  1463. goto fail;
  1464. }
  1465. if (!(int8_ptr_type = LLVMPointerType(INT8_PTR_TYPE, 0))) {
  1466. aot_set_last_error("llvm add pointer type failed.");
  1467. goto fail;
  1468. }
  1469. /* Create base addr, end addr, data size of mem, heap */
  1470. if (wasm_func->has_memory_operations
  1471. && !create_memory_info(comp_ctx, func_ctx, int8_ptr_type, func_index)) {
  1472. goto fail;
  1473. }
  1474. /* Load current exception */
  1475. if (!create_cur_exception(comp_ctx, func_ctx)) {
  1476. goto fail;
  1477. }
  1478. /* Load function type indexes */
  1479. if (wasm_func->has_op_call_indirect
  1480. && !create_func_type_indexes(comp_ctx, func_ctx)) {
  1481. goto fail;
  1482. }
  1483. /* Load function pointers */
  1484. if (!create_func_ptrs(comp_ctx, func_ctx)) {
  1485. goto fail;
  1486. }
  1487. return func_ctx;
  1488. fail:
  1489. if (func_ctx->mem_info)
  1490. wasm_runtime_free(func_ctx->mem_info);
  1491. aot_block_stack_destroy(&func_ctx->block_stack);
  1492. wasm_runtime_free(func_ctx);
  1493. return NULL;
  1494. }
  1495. static void
  1496. aot_destroy_func_contexts(AOTFuncContext **func_ctxes, uint32 count)
  1497. {
  1498. uint32 i;
  1499. for (i = 0; i < count; i++)
  1500. if (func_ctxes[i]) {
  1501. if (func_ctxes[i]->mem_info)
  1502. wasm_runtime_free(func_ctxes[i]->mem_info);
  1503. aot_block_stack_destroy(&func_ctxes[i]->block_stack);
  1504. aot_checked_addr_list_destroy(func_ctxes[i]);
  1505. wasm_runtime_free(func_ctxes[i]);
  1506. }
  1507. wasm_runtime_free(func_ctxes);
  1508. }
  1509. /**
  1510. * Create function compiler contexts
  1511. */
  1512. static AOTFuncContext **
  1513. aot_create_func_contexts(const AOTCompData *comp_data, AOTCompContext *comp_ctx)
  1514. {
  1515. AOTFuncContext **func_ctxes;
  1516. uint64 size;
  1517. uint32 i;
  1518. if ((comp_ctx->enable_stack_bound_check
  1519. || comp_ctx->enable_stack_estimation)
  1520. && !aot_create_stack_sizes(comp_data, comp_ctx))
  1521. return NULL;
  1522. /* Allocate memory */
  1523. size = sizeof(AOTFuncContext *) * (uint64)comp_data->func_count;
  1524. if (size >= UINT32_MAX
  1525. || !(func_ctxes = wasm_runtime_malloc((uint32)size))) {
  1526. aot_set_last_error("allocate memory failed.");
  1527. return NULL;
  1528. }
  1529. memset(func_ctxes, 0, size);
  1530. /* Create each function context */
  1531. for (i = 0; i < comp_data->func_count; i++) {
  1532. AOTFunc *func = comp_data->funcs[i];
  1533. if (!(func_ctxes[i] =
  1534. aot_create_func_context(comp_data, comp_ctx, func, i))) {
  1535. aot_destroy_func_contexts(func_ctxes, comp_data->func_count);
  1536. return NULL;
  1537. }
  1538. }
  1539. return func_ctxes;
  1540. }
  1541. static bool
  1542. aot_set_llvm_basic_types(AOTLLVMTypes *basic_types, LLVMContextRef context)
  1543. {
  1544. basic_types->int1_type = LLVMInt1TypeInContext(context);
  1545. basic_types->int8_type = LLVMInt8TypeInContext(context);
  1546. basic_types->int16_type = LLVMInt16TypeInContext(context);
  1547. basic_types->int32_type = LLVMInt32TypeInContext(context);
  1548. basic_types->int64_type = LLVMInt64TypeInContext(context);
  1549. basic_types->float32_type = LLVMFloatTypeInContext(context);
  1550. basic_types->float64_type = LLVMDoubleTypeInContext(context);
  1551. basic_types->void_type = LLVMVoidTypeInContext(context);
  1552. basic_types->meta_data_type = LLVMMetadataTypeInContext(context);
  1553. basic_types->int8_ptr_type = LLVMPointerType(basic_types->int8_type, 0);
  1554. if (basic_types->int8_ptr_type) {
  1555. basic_types->int8_pptr_type =
  1556. LLVMPointerType(basic_types->int8_ptr_type, 0);
  1557. }
  1558. basic_types->int16_ptr_type = LLVMPointerType(basic_types->int16_type, 0);
  1559. basic_types->int32_ptr_type = LLVMPointerType(basic_types->int32_type, 0);
  1560. basic_types->int64_ptr_type = LLVMPointerType(basic_types->int64_type, 0);
  1561. basic_types->float32_ptr_type =
  1562. LLVMPointerType(basic_types->float32_type, 0);
  1563. basic_types->float64_ptr_type =
  1564. LLVMPointerType(basic_types->float64_type, 0);
  1565. basic_types->i8x16_vec_type = LLVMVectorType(basic_types->int8_type, 16);
  1566. basic_types->i16x8_vec_type = LLVMVectorType(basic_types->int16_type, 8);
  1567. basic_types->i32x4_vec_type = LLVMVectorType(basic_types->int32_type, 4);
  1568. basic_types->i64x2_vec_type = LLVMVectorType(basic_types->int64_type, 2);
  1569. basic_types->f32x4_vec_type = LLVMVectorType(basic_types->float32_type, 4);
  1570. basic_types->f64x2_vec_type = LLVMVectorType(basic_types->float64_type, 2);
  1571. basic_types->v128_type = basic_types->i64x2_vec_type;
  1572. basic_types->v128_ptr_type = LLVMPointerType(basic_types->v128_type, 0);
  1573. basic_types->int8_ptr_type_gs =
  1574. LLVMPointerType(basic_types->int8_type, 256);
  1575. basic_types->int16_ptr_type_gs =
  1576. LLVMPointerType(basic_types->int16_type, 256);
  1577. basic_types->int32_ptr_type_gs =
  1578. LLVMPointerType(basic_types->int32_type, 256);
  1579. basic_types->int64_ptr_type_gs =
  1580. LLVMPointerType(basic_types->int64_type, 256);
  1581. basic_types->float32_ptr_type_gs =
  1582. LLVMPointerType(basic_types->float32_type, 256);
  1583. basic_types->float64_ptr_type_gs =
  1584. LLVMPointerType(basic_types->float64_type, 256);
  1585. basic_types->v128_ptr_type_gs =
  1586. LLVMPointerType(basic_types->v128_type, 256);
  1587. if (!basic_types->int8_ptr_type_gs || !basic_types->int16_ptr_type_gs
  1588. || !basic_types->int32_ptr_type_gs || !basic_types->int64_ptr_type_gs
  1589. || !basic_types->float32_ptr_type_gs
  1590. || !basic_types->float64_ptr_type_gs
  1591. || !basic_types->v128_ptr_type_gs) {
  1592. return false;
  1593. }
  1594. basic_types->i1x2_vec_type = LLVMVectorType(basic_types->int1_type, 2);
  1595. basic_types->funcref_type = LLVMInt32TypeInContext(context);
  1596. basic_types->externref_type = LLVMInt32TypeInContext(context);
  1597. return (basic_types->int8_ptr_type && basic_types->int8_pptr_type
  1598. && basic_types->int16_ptr_type && basic_types->int32_ptr_type
  1599. && basic_types->int64_ptr_type && basic_types->float32_ptr_type
  1600. && basic_types->float64_ptr_type && basic_types->i8x16_vec_type
  1601. && basic_types->i16x8_vec_type && basic_types->i32x4_vec_type
  1602. && basic_types->i64x2_vec_type && basic_types->f32x4_vec_type
  1603. && basic_types->f64x2_vec_type && basic_types->i1x2_vec_type
  1604. && basic_types->meta_data_type && basic_types->funcref_type
  1605. && basic_types->externref_type)
  1606. ? true
  1607. : false;
  1608. }
  1609. static bool
  1610. aot_create_llvm_consts(AOTLLVMConsts *consts, AOTCompContext *comp_ctx)
  1611. {
  1612. #define CREATE_I1_CONST(name, value) \
  1613. if (!(consts->i1_##name = \
  1614. LLVMConstInt(comp_ctx->basic_types.int1_type, value, true))) \
  1615. return false;
  1616. CREATE_I1_CONST(zero, 0)
  1617. CREATE_I1_CONST(one, 1)
  1618. #undef CREATE_I1_CONST
  1619. if (!(consts->i8_zero = I8_CONST(0)))
  1620. return false;
  1621. if (!(consts->f32_zero = F32_CONST(0)))
  1622. return false;
  1623. if (!(consts->f64_zero = F64_CONST(0)))
  1624. return false;
  1625. #define CREATE_I32_CONST(name, value) \
  1626. if (!(consts->i32_##name = LLVMConstInt(I32_TYPE, value, true))) \
  1627. return false;
  1628. CREATE_I32_CONST(min, (uint32)INT32_MIN)
  1629. CREATE_I32_CONST(neg_one, (uint32)-1)
  1630. CREATE_I32_CONST(zero, 0)
  1631. CREATE_I32_CONST(one, 1)
  1632. CREATE_I32_CONST(two, 2)
  1633. CREATE_I32_CONST(three, 3)
  1634. CREATE_I32_CONST(four, 4)
  1635. CREATE_I32_CONST(five, 5)
  1636. CREATE_I32_CONST(six, 6)
  1637. CREATE_I32_CONST(seven, 7)
  1638. CREATE_I32_CONST(eight, 8)
  1639. CREATE_I32_CONST(nine, 9)
  1640. CREATE_I32_CONST(ten, 10)
  1641. CREATE_I32_CONST(eleven, 11)
  1642. CREATE_I32_CONST(twelve, 12)
  1643. CREATE_I32_CONST(thirteen, 13)
  1644. CREATE_I32_CONST(fourteen, 14)
  1645. CREATE_I32_CONST(fifteen, 15)
  1646. CREATE_I32_CONST(31, 31)
  1647. CREATE_I32_CONST(32, 32)
  1648. #undef CREATE_I32_CONST
  1649. #define CREATE_I64_CONST(name, value) \
  1650. if (!(consts->i64_##name = LLVMConstInt(I64_TYPE, value, true))) \
  1651. return false;
  1652. CREATE_I64_CONST(min, (uint64)INT64_MIN)
  1653. CREATE_I64_CONST(neg_one, (uint64)-1)
  1654. CREATE_I64_CONST(zero, 0)
  1655. CREATE_I64_CONST(63, 63)
  1656. CREATE_I64_CONST(64, 64)
  1657. #undef CREATE_I64_CONST
  1658. #define CREATE_V128_CONST(name, type) \
  1659. if (!(consts->name##_vec_zero = LLVMConstNull(type))) \
  1660. return false; \
  1661. if (!(consts->name##_undef = LLVMGetUndef(type))) \
  1662. return false;
  1663. CREATE_V128_CONST(i8x16, V128_i8x16_TYPE)
  1664. CREATE_V128_CONST(i16x8, V128_i16x8_TYPE)
  1665. CREATE_V128_CONST(i32x4, V128_i32x4_TYPE)
  1666. CREATE_V128_CONST(i64x2, V128_i64x2_TYPE)
  1667. CREATE_V128_CONST(f32x4, V128_f32x4_TYPE)
  1668. CREATE_V128_CONST(f64x2, V128_f64x2_TYPE)
  1669. #undef CREATE_V128_CONST
  1670. #define CREATE_VEC_ZERO_MASK(slot) \
  1671. { \
  1672. LLVMTypeRef type = LLVMVectorType(I32_TYPE, slot); \
  1673. if (!type || !(consts->i32x##slot##_zero = LLVMConstNull(type))) \
  1674. return false; \
  1675. }
  1676. CREATE_VEC_ZERO_MASK(16)
  1677. CREATE_VEC_ZERO_MASK(8)
  1678. CREATE_VEC_ZERO_MASK(4)
  1679. CREATE_VEC_ZERO_MASK(2)
  1680. #undef CREATE_VEC_ZERO_MASK
  1681. return true;
  1682. }
  1683. typedef struct ArchItem {
  1684. char *arch;
  1685. bool support_eb;
  1686. } ArchItem;
  1687. /* clang-format off */
  1688. static ArchItem valid_archs[] = {
  1689. { "x86_64", false },
  1690. { "i386", false },
  1691. { "xtensa", false },
  1692. { "mips", true },
  1693. { "mipsel", false },
  1694. { "aarch64v8", false },
  1695. { "aarch64v8.1", false },
  1696. { "aarch64v8.2", false },
  1697. { "aarch64v8.3", false },
  1698. { "aarch64v8.4", false },
  1699. { "aarch64v8.5", false },
  1700. { "aarch64_bev8", false }, /* big endian */
  1701. { "aarch64_bev8.1", false },
  1702. { "aarch64_bev8.2", false },
  1703. { "aarch64_bev8.3", false },
  1704. { "aarch64_bev8.4", false },
  1705. { "aarch64_bev8.5", false },
  1706. { "armv4", true },
  1707. { "armv4t", true },
  1708. { "armv5t", true },
  1709. { "armv5te", true },
  1710. { "armv5tej", true },
  1711. { "armv6", true },
  1712. { "armv6kz", true },
  1713. { "armv6t2", true },
  1714. { "armv6k", true },
  1715. { "armv7", true },
  1716. { "armv6m", true },
  1717. { "armv6sm", true },
  1718. { "armv7em", true },
  1719. { "armv8a", true },
  1720. { "armv8r", true },
  1721. { "armv8m.base", true },
  1722. { "armv8m.main", true },
  1723. { "armv8.1m.main", true },
  1724. { "thumbv4", true },
  1725. { "thumbv4t", true },
  1726. { "thumbv5t", true },
  1727. { "thumbv5te", true },
  1728. { "thumbv5tej", true },
  1729. { "thumbv6", true },
  1730. { "thumbv6kz", true },
  1731. { "thumbv6t2", true },
  1732. { "thumbv6k", true },
  1733. { "thumbv7", true },
  1734. { "thumbv6m", true },
  1735. { "thumbv6sm", true },
  1736. { "thumbv7em", true },
  1737. { "thumbv8a", true },
  1738. { "thumbv8r", true },
  1739. { "thumbv8m.base", true },
  1740. { "thumbv8m.main", true },
  1741. { "thumbv8.1m.main", true },
  1742. { "riscv32", true },
  1743. { "riscv64", true },
  1744. { "arc", true }
  1745. };
  1746. static const char *valid_abis[] = {
  1747. "gnu",
  1748. "eabi",
  1749. "eabihf",
  1750. "gnueabihf",
  1751. "msvc",
  1752. "ilp32",
  1753. "ilp32f",
  1754. "ilp32d",
  1755. "lp64",
  1756. "lp64f",
  1757. "lp64d"
  1758. };
  1759. /* clang-format on */
  1760. static void
  1761. print_supported_targets()
  1762. {
  1763. uint32 i;
  1764. const char *target_name;
  1765. os_printf("Supported targets:\n");
  1766. /* over the list of all available targets */
  1767. for (LLVMTargetRef target = LLVMGetFirstTarget(); target != NULL;
  1768. target = LLVMGetNextTarget(target)) {
  1769. target_name = LLVMGetTargetName(target);
  1770. /* Skip mipsel, aarch64_be since prefix mips, aarch64 will cover them */
  1771. if (strcmp(target_name, "mipsel") == 0)
  1772. continue;
  1773. else if (strcmp(target_name, "aarch64_be") == 0)
  1774. continue;
  1775. if (strcmp(target_name, "x86-64") == 0)
  1776. os_printf(" x86_64\n");
  1777. else if (strcmp(target_name, "x86") == 0)
  1778. os_printf(" i386\n");
  1779. else {
  1780. for (i = 0; i < sizeof(valid_archs) / sizeof(ArchItem); i++) {
  1781. /* If target_name is prefix for valid_archs[i].arch */
  1782. if ((strncmp(target_name, valid_archs[i].arch,
  1783. strlen(target_name))
  1784. == 0))
  1785. os_printf(" %s\n", valid_archs[i].arch);
  1786. }
  1787. }
  1788. }
  1789. }
  1790. static void
  1791. print_supported_abis()
  1792. {
  1793. uint32 i;
  1794. os_printf("Supported ABI: ");
  1795. for (i = 0; i < sizeof(valid_abis) / sizeof(const char *); i++)
  1796. os_printf("%s ", valid_abis[i]);
  1797. os_printf("\n");
  1798. }
  1799. static bool
  1800. check_target_arch(const char *target_arch)
  1801. {
  1802. uint32 i;
  1803. char *arch;
  1804. bool support_eb;
  1805. for (i = 0; i < sizeof(valid_archs) / sizeof(ArchItem); i++) {
  1806. arch = valid_archs[i].arch;
  1807. support_eb = valid_archs[i].support_eb;
  1808. if (!strncmp(target_arch, arch, strlen(arch))
  1809. && ((support_eb
  1810. && (!strcmp(target_arch + strlen(arch), "eb")
  1811. || !strcmp(target_arch + strlen(arch), "")))
  1812. || (!support_eb && !strcmp(target_arch + strlen(arch), "")))) {
  1813. return true;
  1814. }
  1815. }
  1816. return false;
  1817. }
  1818. static bool
  1819. check_target_abi(const char *target_abi)
  1820. {
  1821. uint32 i;
  1822. for (i = 0; i < sizeof(valid_abis) / sizeof(char *); i++) {
  1823. if (!strcmp(target_abi, valid_abis[i]))
  1824. return true;
  1825. }
  1826. return false;
  1827. }
  1828. static void
  1829. get_target_arch_from_triple(const char *triple, char *arch_buf, uint32 buf_size)
  1830. {
  1831. uint32 i = 0;
  1832. while (*triple != '-' && *triple != '\0' && i < buf_size - 1)
  1833. arch_buf[i++] = *triple++;
  1834. /* Make sure buffer is long enough */
  1835. bh_assert(*triple == '-' || *triple == '\0');
  1836. }
  1837. static bool
  1838. is_baremetal_target(const char *target, const char *cpu, const char *abi)
  1839. {
  1840. /* TODO: support more baremetal targets */
  1841. if (target) {
  1842. /* If target is thumbxxx, then it is baremetal target */
  1843. if (!strncmp(target, "thumb", strlen("thumb")))
  1844. return true;
  1845. }
  1846. return false;
  1847. }
  1848. void
  1849. aot_handle_llvm_errmsg(const char *string, LLVMErrorRef err)
  1850. {
  1851. char *err_msg = LLVMGetErrorMessage(err);
  1852. aot_set_last_error_v("%s: %s", string, err_msg);
  1853. LLVMDisposeErrorMessage(err_msg);
  1854. }
  1855. static bool
  1856. create_target_machine_detect_host(AOTCompContext *comp_ctx)
  1857. {
  1858. char *triple = NULL;
  1859. LLVMTargetRef target = NULL;
  1860. char *err_msg = NULL;
  1861. char *cpu = NULL;
  1862. char *features = NULL;
  1863. LLVMTargetMachineRef target_machine = NULL;
  1864. bool ret = false;
  1865. triple = LLVMGetDefaultTargetTriple();
  1866. if (triple == NULL) {
  1867. aot_set_last_error("failed to get default target triple.");
  1868. goto fail;
  1869. }
  1870. if (LLVMGetTargetFromTriple(triple, &target, &err_msg) != 0) {
  1871. aot_set_last_error_v("failed to get llvm target from triple %s.",
  1872. err_msg);
  1873. LLVMDisposeMessage(err_msg);
  1874. goto fail;
  1875. }
  1876. if (!LLVMTargetHasJIT(target)) {
  1877. aot_set_last_error("unspported JIT on this platform.");
  1878. goto fail;
  1879. }
  1880. cpu = LLVMGetHostCPUName();
  1881. if (cpu == NULL) {
  1882. aot_set_last_error("failed to get host cpu information.");
  1883. goto fail;
  1884. }
  1885. features = LLVMGetHostCPUFeatures();
  1886. if (features == NULL) {
  1887. aot_set_last_error("failed to get host cpu features.");
  1888. goto fail;
  1889. }
  1890. LOG_VERBOSE("LLVM ORCJIT detected CPU \"%s\", with features \"%s\"\n", cpu,
  1891. features);
  1892. /* create TargetMachine */
  1893. target_machine = LLVMCreateTargetMachine(
  1894. target, triple, cpu, features, LLVMCodeGenLevelDefault,
  1895. LLVMRelocDefault, LLVMCodeModelJITDefault);
  1896. if (!target_machine) {
  1897. aot_set_last_error("failed to create target machine.");
  1898. goto fail;
  1899. }
  1900. comp_ctx->target_machine = target_machine;
  1901. /* Save target arch */
  1902. get_target_arch_from_triple(triple, comp_ctx->target_arch,
  1903. sizeof(comp_ctx->target_arch));
  1904. ret = true;
  1905. fail:
  1906. if (triple)
  1907. LLVMDisposeMessage(triple);
  1908. if (features)
  1909. LLVMDisposeMessage(features);
  1910. if (cpu)
  1911. LLVMDisposeMessage(cpu);
  1912. return ret;
  1913. }
  1914. static void
  1915. jit_stack_size_callback(void *user_data, const char *name, size_t namelen,
  1916. size_t stack_size)
  1917. {
  1918. AOTCompContext *comp_ctx = user_data;
  1919. /*
  1920. * Note: the longest name we care is
  1921. * something like "aot_func_internal#4294967295".
  1922. */
  1923. char buf[64];
  1924. uint32 func_idx;
  1925. const AOTFuncContext *func_ctx;
  1926. bool musttail;
  1927. unsigned int stack_consumption_to_call_wrapped_func;
  1928. unsigned int call_size;
  1929. int ret;
  1930. bh_assert(comp_ctx != NULL);
  1931. bh_assert(comp_ctx->jit_stack_sizes != NULL);
  1932. if (namelen >= sizeof(buf)) {
  1933. LOG_DEBUG("too long name: %.*s", (int)namelen, name);
  1934. return;
  1935. }
  1936. /* ensure NUL termination */
  1937. bh_memcpy_s(buf, (uint32)sizeof(buf), name, (uint32)namelen);
  1938. buf[namelen] = 0;
  1939. ret = sscanf(buf, AOT_FUNC_INTERNAL_PREFIX "%" SCNu32, &func_idx);
  1940. if (ret != 1) {
  1941. return;
  1942. }
  1943. bh_assert(func_idx < comp_ctx->func_ctx_count);
  1944. func_ctx = comp_ctx->func_ctxes[func_idx];
  1945. call_size = func_ctx->stack_consumption_for_func_call;
  1946. musttail = aot_target_precheck_can_use_musttail(comp_ctx);
  1947. stack_consumption_to_call_wrapped_func =
  1948. musttail ? 0
  1949. : aot_estimate_stack_usage_for_function_call(
  1950. comp_ctx, func_ctx->aot_func->func_type);
  1951. LOG_VERBOSE("func %.*s stack %u + %zu + %u", (int)namelen, name,
  1952. stack_consumption_to_call_wrapped_func, stack_size, call_size);
  1953. /* Note: -1 == AOT_NEG_ONE from aot_create_stack_sizes */
  1954. bh_assert(comp_ctx->jit_stack_sizes[func_idx] == (uint32)-1);
  1955. comp_ctx->jit_stack_sizes[func_idx] = (uint32)stack_size + call_size;
  1956. }
  1957. static bool
  1958. orc_jit_create(AOTCompContext *comp_ctx)
  1959. {
  1960. LLVMErrorRef err;
  1961. LLVMOrcLLLazyJITRef orc_jit = NULL;
  1962. LLVMOrcLLLazyJITBuilderRef builder = NULL;
  1963. LLVMOrcJITTargetMachineBuilderRef jtmb = NULL;
  1964. bool ret = false;
  1965. builder = LLVMOrcCreateLLLazyJITBuilder();
  1966. if (builder == NULL) {
  1967. aot_set_last_error("failed to create jit builder.");
  1968. goto fail;
  1969. }
  1970. if (comp_ctx->enable_stack_bound_check || comp_ctx->enable_stack_estimation)
  1971. LLVMOrcLLJITBuilderSetCompileFuncitonCreatorWithStackSizesCallback(
  1972. builder, jit_stack_size_callback, comp_ctx);
  1973. err = LLVMOrcJITTargetMachineBuilderDetectHost(&jtmb);
  1974. if (err != LLVMErrorSuccess) {
  1975. aot_handle_llvm_errmsg(
  1976. "quited to create LLVMOrcJITTargetMachineBuilderRef", err);
  1977. goto fail;
  1978. }
  1979. LLVMOrcLLLazyJITBuilderSetNumCompileThreads(
  1980. builder, WASM_ORC_JIT_COMPILE_THREAD_NUM);
  1981. /* Ownership transfer:
  1982. LLVMOrcJITTargetMachineBuilderRef -> LLVMOrcLLJITBuilderRef */
  1983. LLVMOrcLLLazyJITBuilderSetJITTargetMachineBuilder(builder, jtmb);
  1984. err = LLVMOrcCreateLLLazyJIT(&orc_jit, builder);
  1985. if (err != LLVMErrorSuccess) {
  1986. aot_handle_llvm_errmsg("quited to create llvm lazy orcjit instance",
  1987. err);
  1988. goto fail;
  1989. }
  1990. /* Ownership transfer: LLVMOrcLLJITBuilderRef -> LLVMOrcLLJITRef */
  1991. builder = NULL;
  1992. #if WASM_ENABLE_LINUX_PERF != 0
  1993. if (wasm_runtime_get_linux_perf()) {
  1994. LOG_DEBUG("Enable linux perf support in JIT");
  1995. LLVMOrcObjectLayerRef obj_linking_layer =
  1996. (LLVMOrcObjectLayerRef)LLVMOrcLLLazyJITGetObjLinkingLayer(orc_jit);
  1997. LLVMOrcRTDyldObjectLinkingLayerRegisterJITEventListener(
  1998. obj_linking_layer, LLVMCreatePerfJITEventListener());
  1999. }
  2000. #endif
  2001. /* Ownership transfer: local -> AOTCompContext */
  2002. comp_ctx->orc_jit = orc_jit;
  2003. orc_jit = NULL;
  2004. ret = true;
  2005. fail:
  2006. if (builder)
  2007. LLVMOrcDisposeLLLazyJITBuilder(builder);
  2008. if (orc_jit)
  2009. LLVMOrcDisposeLLLazyJIT(orc_jit);
  2010. return ret;
  2011. }
  2012. bool
  2013. aot_compiler_init(void)
  2014. {
  2015. /* Initialize LLVM environment */
  2016. #if LLVM_VERSION_MAJOR < 17
  2017. LLVMInitializeCore(LLVMGetGlobalPassRegistry());
  2018. #endif
  2019. #if WASM_ENABLE_WAMR_COMPILER != 0
  2020. /* Init environment of all targets for AOT compiler */
  2021. LLVMInitializeAllTargetInfos();
  2022. LLVMInitializeAllTargets();
  2023. LLVMInitializeAllTargetMCs();
  2024. LLVMInitializeAllAsmPrinters();
  2025. #else
  2026. /* Init environment of native for JIT compiler */
  2027. LLVMInitializeNativeTarget();
  2028. LLVMInitializeNativeTarget();
  2029. LLVMInitializeNativeAsmPrinter();
  2030. #endif
  2031. return true;
  2032. }
  2033. void
  2034. aot_compiler_destroy(void)
  2035. {
  2036. LLVMShutdown();
  2037. }
  2038. AOTCompContext *
  2039. aot_create_comp_context(const AOTCompData *comp_data, aot_comp_option_t option)
  2040. {
  2041. AOTCompContext *comp_ctx, *ret = NULL;
  2042. LLVMTargetRef target;
  2043. char *triple = NULL, *triple_norm, *arch, *abi;
  2044. char *cpu = NULL, *features, buf[128];
  2045. char *triple_norm_new = NULL, *cpu_new = NULL;
  2046. char *err = NULL, *fp_round = "round.tonearest",
  2047. *fp_exce = "fpexcept.strict";
  2048. char triple_buf[128] = { 0 }, features_buf[128] = { 0 };
  2049. uint32 opt_level, size_level, i;
  2050. LLVMCodeModel code_model;
  2051. LLVMTargetDataRef target_data_ref;
  2052. /* Allocate memory */
  2053. if (!(comp_ctx = wasm_runtime_malloc(sizeof(AOTCompContext)))) {
  2054. aot_set_last_error("allocate memory failed.");
  2055. return NULL;
  2056. }
  2057. memset(comp_ctx, 0, sizeof(AOTCompContext));
  2058. comp_ctx->comp_data = comp_data;
  2059. /* Create LLVM context, module and builder */
  2060. comp_ctx->orc_thread_safe_context = LLVMOrcCreateNewThreadSafeContext();
  2061. if (!comp_ctx->orc_thread_safe_context) {
  2062. aot_set_last_error("create LLVM ThreadSafeContext failed.");
  2063. goto fail;
  2064. }
  2065. /* Get a reference to the underlying LLVMContext, note:
  2066. different from non LAZY JIT mode, no need to dispose this context,
  2067. if will be disposed when the thread safe context is disposed */
  2068. if (!(comp_ctx->context = LLVMOrcThreadSafeContextGetContext(
  2069. comp_ctx->orc_thread_safe_context))) {
  2070. aot_set_last_error("get context from LLVM ThreadSafeContext failed.");
  2071. goto fail;
  2072. }
  2073. if (!(comp_ctx->builder = LLVMCreateBuilderInContext(comp_ctx->context))) {
  2074. aot_set_last_error("create LLVM builder failed.");
  2075. goto fail;
  2076. }
  2077. /* Create LLVM module for each jit function, note:
  2078. different from non ORC JIT mode, no need to dispose it,
  2079. it will be disposed when the thread safe context is disposed */
  2080. if (!(comp_ctx->module = LLVMModuleCreateWithNameInContext(
  2081. "WASM Module", comp_ctx->context))) {
  2082. aot_set_last_error("create LLVM module failed.");
  2083. goto fail;
  2084. }
  2085. #if WASM_ENABLE_LINUX_PERF != 0
  2086. if (wasm_runtime_get_linux_perf()) {
  2087. /* FramePointerKind.All */
  2088. LLVMMetadataRef val =
  2089. LLVMValueAsMetadata(LLVMConstInt(LLVMInt32Type(), 2, false));
  2090. const char *key = "frame-pointer";
  2091. LLVMAddModuleFlag(comp_ctx->module, LLVMModuleFlagBehaviorWarning, key,
  2092. strlen(key), val);
  2093. comp_ctx->emit_frame_pointer = true;
  2094. }
  2095. #endif
  2096. if (BH_LIST_ERROR == bh_list_init(&comp_ctx->native_symbols)) {
  2097. goto fail;
  2098. }
  2099. #if WASM_ENABLE_DEBUG_AOT != 0
  2100. if (!(comp_ctx->debug_builder = LLVMCreateDIBuilder(comp_ctx->module))) {
  2101. aot_set_last_error("create LLVM Debug Infor builder failed.");
  2102. goto fail;
  2103. }
  2104. LLVMAddModuleFlag(
  2105. comp_ctx->module, LLVMModuleFlagBehaviorWarning, "Debug Info Version",
  2106. strlen("Debug Info Version"),
  2107. LLVMValueAsMetadata(LLVMConstInt(LLVMInt32Type(), 3, false)));
  2108. comp_ctx->debug_file = dwarf_gen_file_info(comp_ctx);
  2109. if (!comp_ctx->debug_file) {
  2110. aot_set_last_error("dwarf generate file info failed");
  2111. goto fail;
  2112. }
  2113. comp_ctx->debug_comp_unit = dwarf_gen_comp_unit_info(comp_ctx);
  2114. if (!comp_ctx->debug_comp_unit) {
  2115. aot_set_last_error("dwarf generate compile unit info failed");
  2116. goto fail;
  2117. }
  2118. #endif
  2119. if (option->enable_bulk_memory)
  2120. comp_ctx->enable_bulk_memory = true;
  2121. if (option->enable_thread_mgr)
  2122. comp_ctx->enable_thread_mgr = true;
  2123. if (option->enable_tail_call)
  2124. comp_ctx->enable_tail_call = true;
  2125. if (option->enable_ref_types)
  2126. comp_ctx->enable_ref_types = true;
  2127. if (option->enable_aux_stack_frame)
  2128. comp_ctx->enable_aux_stack_frame = true;
  2129. if (option->enable_aux_stack_check)
  2130. comp_ctx->enable_aux_stack_check = true;
  2131. if (option->is_indirect_mode)
  2132. comp_ctx->is_indirect_mode = true;
  2133. if (option->disable_llvm_intrinsics)
  2134. comp_ctx->disable_llvm_intrinsics = true;
  2135. if (option->disable_llvm_lto)
  2136. comp_ctx->disable_llvm_lto = true;
  2137. if (option->enable_llvm_pgo)
  2138. comp_ctx->enable_llvm_pgo = true;
  2139. if (option->use_prof_file)
  2140. comp_ctx->use_prof_file = option->use_prof_file;
  2141. if (option->enable_stack_estimation)
  2142. comp_ctx->enable_stack_estimation = true;
  2143. if (option->quick_invoke_c_api_import)
  2144. comp_ctx->quick_invoke_c_api_import = true;
  2145. if (option->llvm_passes)
  2146. comp_ctx->llvm_passes = option->llvm_passes;
  2147. if (option->builtin_intrinsics)
  2148. comp_ctx->builtin_intrinsics = option->builtin_intrinsics;
  2149. comp_ctx->opt_level = option->opt_level;
  2150. comp_ctx->size_level = option->size_level;
  2151. comp_ctx->custom_sections_wp = option->custom_sections;
  2152. comp_ctx->custom_sections_count = option->custom_sections_count;
  2153. if (option->is_jit_mode) {
  2154. comp_ctx->is_jit_mode = true;
  2155. #ifndef OS_ENABLE_HW_BOUND_CHECK
  2156. comp_ctx->enable_bound_check = true;
  2157. /* Always enable stack boundary check if `bounds-checks`
  2158. is enabled */
  2159. comp_ctx->enable_stack_bound_check = true;
  2160. #else
  2161. comp_ctx->enable_bound_check = false;
  2162. /* When `bounds-checks` is disabled, we set stack boundary
  2163. check status according to the compilation option */
  2164. #if WASM_DISABLE_STACK_HW_BOUND_CHECK != 0
  2165. /* Native stack overflow check with hardware trap is disabled,
  2166. we need to enable the check by LLVM JITed/AOTed code */
  2167. comp_ctx->enable_stack_bound_check = true;
  2168. #else
  2169. /* Native stack overflow check with hardware trap is enabled,
  2170. no need to enable the check by LLVM JITed/AOTed code */
  2171. comp_ctx->enable_stack_bound_check = false;
  2172. #endif
  2173. #endif
  2174. /* Create TargetMachine */
  2175. if (!create_target_machine_detect_host(comp_ctx))
  2176. goto fail;
  2177. /* Create LLJIT Instance */
  2178. if (!orc_jit_create(comp_ctx))
  2179. goto fail;
  2180. }
  2181. else {
  2182. /* Create LLVM target machine */
  2183. arch = option->target_arch;
  2184. abi = option->target_abi;
  2185. cpu = option->target_cpu;
  2186. features = option->cpu_features;
  2187. opt_level = option->opt_level;
  2188. size_level = option->size_level;
  2189. /* verify external llc compiler */
  2190. comp_ctx->external_llc_compiler = getenv("WAMRC_LLC_COMPILER");
  2191. if (comp_ctx->external_llc_compiler) {
  2192. #if defined(_WIN32) || defined(_WIN32_)
  2193. comp_ctx->external_llc_compiler = NULL;
  2194. LOG_WARNING("External LLC compiler not supported on Windows.");
  2195. #else
  2196. if (access(comp_ctx->external_llc_compiler, X_OK) != 0) {
  2197. LOG_WARNING("WAMRC_LLC_COMPILER [%s] not found, fallback to "
  2198. "default pipeline",
  2199. comp_ctx->external_llc_compiler);
  2200. comp_ctx->external_llc_compiler = NULL;
  2201. }
  2202. else {
  2203. comp_ctx->llc_compiler_flags = getenv("WAMRC_LLC_FLAGS");
  2204. LOG_VERBOSE("Using external LLC compiler [%s]",
  2205. comp_ctx->external_llc_compiler);
  2206. }
  2207. #endif
  2208. }
  2209. /* verify external asm compiler */
  2210. if (!comp_ctx->external_llc_compiler) {
  2211. comp_ctx->external_asm_compiler = getenv("WAMRC_ASM_COMPILER");
  2212. if (comp_ctx->external_asm_compiler) {
  2213. #if defined(_WIN32) || defined(_WIN32_)
  2214. comp_ctx->external_asm_compiler = NULL;
  2215. LOG_WARNING("External ASM compiler not supported on Windows.");
  2216. #else
  2217. if (access(comp_ctx->external_asm_compiler, X_OK) != 0) {
  2218. LOG_WARNING(
  2219. "WAMRC_ASM_COMPILER [%s] not found, fallback to "
  2220. "default pipeline",
  2221. comp_ctx->external_asm_compiler);
  2222. comp_ctx->external_asm_compiler = NULL;
  2223. }
  2224. else {
  2225. comp_ctx->asm_compiler_flags = getenv("WAMRC_ASM_FLAGS");
  2226. LOG_VERBOSE("Using external ASM compiler [%s]",
  2227. comp_ctx->external_asm_compiler);
  2228. }
  2229. #endif
  2230. }
  2231. }
  2232. if (arch) {
  2233. /* Add default sub-arch if not specified */
  2234. if (!strcmp(arch, "arm"))
  2235. arch = "armv4";
  2236. else if (!strcmp(arch, "armeb"))
  2237. arch = "armv4eb";
  2238. else if (!strcmp(arch, "thumb"))
  2239. arch = "thumbv4t";
  2240. else if (!strcmp(arch, "thumbeb"))
  2241. arch = "thumbv4teb";
  2242. else if (!strcmp(arch, "aarch64"))
  2243. arch = "aarch64v8";
  2244. else if (!strcmp(arch, "aarch64_be"))
  2245. arch = "aarch64_bev8";
  2246. }
  2247. /* Check target arch */
  2248. if (arch && !check_target_arch(arch)) {
  2249. if (!strcmp(arch, "help"))
  2250. print_supported_targets();
  2251. else
  2252. aot_set_last_error(
  2253. "Invalid target. "
  2254. "Use --target=help to list all supported targets");
  2255. goto fail;
  2256. }
  2257. /* Check target ABI */
  2258. if (abi && !check_target_abi(abi)) {
  2259. if (!strcmp(abi, "help"))
  2260. print_supported_abis();
  2261. else
  2262. aot_set_last_error(
  2263. "Invalid target ABI. "
  2264. "Use --target-abi=help to list all supported ABI");
  2265. goto fail;
  2266. }
  2267. /* Set default abi for riscv target */
  2268. if (arch && !strncmp(arch, "riscv", 5) && !abi) {
  2269. if (!strcmp(arch, "riscv64"))
  2270. abi = "lp64d";
  2271. else
  2272. abi = "ilp32d";
  2273. }
  2274. #if defined(__APPLE__) || defined(__MACH__)
  2275. if (!abi) {
  2276. /* On MacOS platform, set abi to "gnu" to avoid generating
  2277. object file of Mach-O binary format which is unsupported */
  2278. abi = "gnu";
  2279. if (!arch && !cpu && !features) {
  2280. /* Get CPU name of the host machine to avoid checking
  2281. SIMD capability failed */
  2282. if (!(cpu = cpu_new = LLVMGetHostCPUName())) {
  2283. aot_set_last_error("llvm get host cpu name failed.");
  2284. goto fail;
  2285. }
  2286. }
  2287. }
  2288. #endif
  2289. if (abi) {
  2290. /* Construct target triple: <arch>-<vendor>-<sys>-<abi> */
  2291. const char *vendor_sys;
  2292. char *arch1 = arch, default_arch[32] = { 0 };
  2293. if (!arch1) {
  2294. char *default_triple = LLVMGetDefaultTargetTriple();
  2295. if (!default_triple) {
  2296. aot_set_last_error(
  2297. "llvm get default target triple failed.");
  2298. goto fail;
  2299. }
  2300. vendor_sys = strstr(default_triple, "-");
  2301. bh_assert(vendor_sys);
  2302. bh_memcpy_s(default_arch, sizeof(default_arch), default_triple,
  2303. (uint32)(vendor_sys - default_triple));
  2304. arch1 = default_arch;
  2305. LLVMDisposeMessage(default_triple);
  2306. }
  2307. /**
  2308. * Set <vendor>-<sys> according to abi to generate the object file
  2309. * with the correct file format which might be different from the
  2310. * default object file format of the host, e.g., generating AOT file
  2311. * for Windows/MacOS under Linux host, or generating AOT file for
  2312. * Linux/MacOS under Windows host.
  2313. */
  2314. if (!strcmp(abi, "msvc")) {
  2315. if (!strcmp(arch1, "i386"))
  2316. vendor_sys = "-pc-win32-";
  2317. else
  2318. vendor_sys = "-pc-windows-";
  2319. }
  2320. else {
  2321. if (is_baremetal_target(arch, cpu, abi))
  2322. vendor_sys = "-unknown-none-";
  2323. else
  2324. vendor_sys = "-pc-linux-";
  2325. }
  2326. bh_assert(strlen(arch1) + strlen(vendor_sys) + strlen(abi)
  2327. < sizeof(triple_buf));
  2328. bh_memcpy_s(triple_buf, (uint32)sizeof(triple_buf), arch1,
  2329. (uint32)strlen(arch1));
  2330. bh_memcpy_s(triple_buf + strlen(arch1),
  2331. (uint32)(sizeof(triple_buf) - strlen(arch1)),
  2332. vendor_sys, (uint32)strlen(vendor_sys));
  2333. bh_memcpy_s(triple_buf + strlen(arch1) + strlen(vendor_sys),
  2334. (uint32)(sizeof(triple_buf) - strlen(arch1)
  2335. - strlen(vendor_sys)),
  2336. abi, (uint32)strlen(abi));
  2337. triple = triple_buf;
  2338. }
  2339. else if (arch) {
  2340. /* Construct target triple: <arch>-<vendor>-<sys>-<abi> */
  2341. const char *vendor_sys;
  2342. char *default_triple = LLVMGetDefaultTargetTriple();
  2343. if (!default_triple) {
  2344. aot_set_last_error("llvm get default target triple failed.");
  2345. goto fail;
  2346. }
  2347. if (strstr(default_triple, "windows")) {
  2348. vendor_sys = "-pc-windows-";
  2349. if (!abi)
  2350. abi = "msvc";
  2351. }
  2352. else if (strstr(default_triple, "win32")) {
  2353. vendor_sys = "-pc-win32-";
  2354. if (!abi)
  2355. abi = "msvc";
  2356. }
  2357. else if (is_baremetal_target(arch, cpu, abi)) {
  2358. vendor_sys = "-unknown-none-";
  2359. if (!abi)
  2360. abi = "gnu";
  2361. }
  2362. else {
  2363. vendor_sys = "-pc-linux-";
  2364. if (!abi)
  2365. abi = "gnu";
  2366. }
  2367. LLVMDisposeMessage(default_triple);
  2368. bh_assert(strlen(arch) + strlen(vendor_sys) + strlen(abi)
  2369. < sizeof(triple_buf));
  2370. bh_memcpy_s(triple_buf, (uint32)sizeof(triple_buf), arch,
  2371. (uint32)strlen(arch));
  2372. bh_memcpy_s(triple_buf + strlen(arch),
  2373. (uint32)(sizeof(triple_buf) - strlen(arch)), vendor_sys,
  2374. (uint32)strlen(vendor_sys));
  2375. bh_memcpy_s(triple_buf + strlen(arch) + strlen(vendor_sys),
  2376. (uint32)(sizeof(triple_buf) - strlen(arch)
  2377. - strlen(vendor_sys)),
  2378. abi, (uint32)strlen(abi));
  2379. triple = triple_buf;
  2380. }
  2381. if (!cpu && features) {
  2382. aot_set_last_error("cpu isn't specified for cpu features.");
  2383. goto fail;
  2384. }
  2385. if (!triple && !cpu) {
  2386. /* Get a triple for the host machine */
  2387. if (!(triple_norm = triple_norm_new =
  2388. LLVMGetDefaultTargetTriple())) {
  2389. aot_set_last_error("llvm get default target triple failed.");
  2390. goto fail;
  2391. }
  2392. /* Get CPU name of the host machine */
  2393. if (!(cpu = cpu_new = LLVMGetHostCPUName())) {
  2394. aot_set_last_error("llvm get host cpu name failed.");
  2395. goto fail;
  2396. }
  2397. }
  2398. else if (triple) {
  2399. /* Normalize a target triple */
  2400. if (!(triple_norm = triple_norm_new =
  2401. LLVMNormalizeTargetTriple(triple))) {
  2402. snprintf(buf, sizeof(buf),
  2403. "llvm normlalize target triple (%s) failed.", triple);
  2404. aot_set_last_error(buf);
  2405. goto fail;
  2406. }
  2407. if (!cpu)
  2408. cpu = "";
  2409. }
  2410. else {
  2411. /* triple is NULL, cpu isn't NULL */
  2412. snprintf(buf, sizeof(buf), "target isn't specified for cpu %s.",
  2413. cpu);
  2414. aot_set_last_error(buf);
  2415. goto fail;
  2416. }
  2417. /* Add module flag and cpu feature for riscv target */
  2418. if (arch && !strncmp(arch, "riscv", 5)) {
  2419. LLVMMetadataRef meta_target_abi;
  2420. if (!(meta_target_abi = LLVMMDStringInContext2(comp_ctx->context,
  2421. abi, strlen(abi)))) {
  2422. aot_set_last_error("create metadata string failed.");
  2423. goto fail;
  2424. }
  2425. LLVMAddModuleFlag(comp_ctx->module, LLVMModuleFlagBehaviorError,
  2426. "target-abi", strlen("target-abi"),
  2427. meta_target_abi);
  2428. if (!strcmp(abi, "lp64d") || !strcmp(abi, "ilp32d")) {
  2429. if (features && !strstr(features, "+d")) {
  2430. snprintf(features_buf, sizeof(features_buf), "%s%s",
  2431. features, ",+d");
  2432. features = features_buf;
  2433. }
  2434. else if (!features) {
  2435. features = "+d";
  2436. }
  2437. }
  2438. }
  2439. if (!features)
  2440. features = "";
  2441. /* Get target with triple, note that LLVMGetTargetFromTriple()
  2442. return 0 when success, but not true. */
  2443. if (LLVMGetTargetFromTriple(triple_norm, &target, &err) != 0) {
  2444. if (err) {
  2445. LLVMDisposeMessage(err);
  2446. err = NULL;
  2447. }
  2448. snprintf(buf, sizeof(buf),
  2449. "llvm get target from triple (%s) failed", triple_norm);
  2450. aot_set_last_error(buf);
  2451. goto fail;
  2452. }
  2453. /* Save target arch */
  2454. get_target_arch_from_triple(triple_norm, comp_ctx->target_arch,
  2455. sizeof(comp_ctx->target_arch));
  2456. if (option->bounds_checks == 1 || option->bounds_checks == 0) {
  2457. /* Set by user */
  2458. comp_ctx->enable_bound_check =
  2459. (option->bounds_checks == 1) ? true : false;
  2460. }
  2461. else {
  2462. /* Unset by user, use default value */
  2463. if (strstr(comp_ctx->target_arch, "64")
  2464. && !option->is_sgx_platform) {
  2465. comp_ctx->enable_bound_check = false;
  2466. }
  2467. else {
  2468. comp_ctx->enable_bound_check = true;
  2469. }
  2470. }
  2471. if (comp_ctx->enable_bound_check) {
  2472. /* Always enable stack boundary check if `bounds-checks`
  2473. is enabled */
  2474. comp_ctx->enable_stack_bound_check = true;
  2475. }
  2476. else {
  2477. /* When `bounds-checks` is disabled, we set stack boundary
  2478. check status according to the input option */
  2479. comp_ctx->enable_stack_bound_check =
  2480. (option->stack_bounds_checks == 1) ? true : false;
  2481. }
  2482. if ((comp_ctx->enable_stack_bound_check
  2483. || comp_ctx->enable_stack_estimation)
  2484. && option->stack_usage_file == NULL) {
  2485. if (!aot_generate_tempfile_name(
  2486. "wamrc-su", "su", comp_ctx->stack_usage_temp_file,
  2487. sizeof(comp_ctx->stack_usage_temp_file)))
  2488. goto fail;
  2489. comp_ctx->stack_usage_file = comp_ctx->stack_usage_temp_file;
  2490. }
  2491. else {
  2492. comp_ctx->stack_usage_file = option->stack_usage_file;
  2493. }
  2494. os_printf("Create AoT compiler with:\n");
  2495. os_printf(" target: %s\n", comp_ctx->target_arch);
  2496. os_printf(" target cpu: %s\n", cpu);
  2497. os_printf(" target triple: %s\n", triple_norm);
  2498. os_printf(" cpu features: %s\n", features);
  2499. os_printf(" opt level: %d\n", opt_level);
  2500. os_printf(" size level: %d\n", size_level);
  2501. switch (option->output_format) {
  2502. case AOT_LLVMIR_UNOPT_FILE:
  2503. os_printf(" output format: unoptimized LLVM IR\n");
  2504. break;
  2505. case AOT_LLVMIR_OPT_FILE:
  2506. os_printf(" output format: optimized LLVM IR\n");
  2507. break;
  2508. case AOT_FORMAT_FILE:
  2509. os_printf(" output format: AoT file\n");
  2510. break;
  2511. case AOT_OBJECT_FILE:
  2512. os_printf(" output format: native object file\n");
  2513. break;
  2514. }
  2515. LLVMSetTarget(comp_ctx->module, triple_norm);
  2516. if (!LLVMTargetHasTargetMachine(target)) {
  2517. snprintf(buf, sizeof(buf),
  2518. "no target machine for this target (%s).", triple_norm);
  2519. aot_set_last_error(buf);
  2520. goto fail;
  2521. }
  2522. /* Report error if target isn't arc and hasn't asm backend.
  2523. For arc target, as it cannot emit to memory buffer of elf file
  2524. currently, we let it emit to assembly file instead, and then call
  2525. arc-gcc to compile
  2526. asm file to elf file, and read elf file to memory buffer. */
  2527. if (strncmp(comp_ctx->target_arch, "arc", 3)
  2528. && !LLVMTargetHasAsmBackend(target)) {
  2529. snprintf(buf, sizeof(buf), "no asm backend for this target (%s).",
  2530. LLVMGetTargetName(target));
  2531. aot_set_last_error(buf);
  2532. goto fail;
  2533. }
  2534. /* Set code model */
  2535. if (size_level == 0)
  2536. code_model = LLVMCodeModelLarge;
  2537. else if (size_level == 1)
  2538. code_model = LLVMCodeModelMedium;
  2539. else if (size_level == 2)
  2540. code_model = LLVMCodeModelKernel;
  2541. else
  2542. code_model = LLVMCodeModelSmall;
  2543. /* Create the target machine */
  2544. if (!(comp_ctx->target_machine = LLVMCreateTargetMachineWithOpts(
  2545. target, triple_norm, cpu, features, opt_level,
  2546. LLVMRelocStatic, code_model, false,
  2547. comp_ctx->stack_usage_file))) {
  2548. aot_set_last_error("create LLVM target machine failed.");
  2549. goto fail;
  2550. }
  2551. /* If only to create target machine for querying information, early stop
  2552. */
  2553. if ((arch && !strcmp(arch, "help")) || (abi && !strcmp(abi, "help"))
  2554. || (cpu && !strcmp(cpu, "help"))
  2555. || (features && !strcmp(features, "+help"))) {
  2556. LOG_DEBUG(
  2557. "create LLVM target machine only for printing help info.");
  2558. goto fail;
  2559. }
  2560. }
  2561. triple = LLVMGetTargetMachineTriple(comp_ctx->target_machine);
  2562. if (!triple) {
  2563. aot_set_last_error("get target machine triple failed.");
  2564. goto fail;
  2565. }
  2566. if (strstr(triple, "linux") && !strcmp(comp_ctx->target_arch, "x86_64")) {
  2567. if (option->segue_flags) {
  2568. if (option->segue_flags & (1 << 0))
  2569. comp_ctx->enable_segue_i32_load = true;
  2570. if (option->segue_flags & (1 << 1))
  2571. comp_ctx->enable_segue_i64_load = true;
  2572. if (option->segue_flags & (1 << 2))
  2573. comp_ctx->enable_segue_f32_load = true;
  2574. if (option->segue_flags & (1 << 3))
  2575. comp_ctx->enable_segue_f64_load = true;
  2576. if (option->segue_flags & (1 << 4))
  2577. comp_ctx->enable_segue_v128_load = true;
  2578. if (option->segue_flags & (1 << 8))
  2579. comp_ctx->enable_segue_i32_store = true;
  2580. if (option->segue_flags & (1 << 9))
  2581. comp_ctx->enable_segue_i64_store = true;
  2582. if (option->segue_flags & (1 << 10))
  2583. comp_ctx->enable_segue_f32_store = true;
  2584. if (option->segue_flags & (1 << 11))
  2585. comp_ctx->enable_segue_f64_store = true;
  2586. if (option->segue_flags & (1 << 12))
  2587. comp_ctx->enable_segue_v128_store = true;
  2588. }
  2589. }
  2590. LLVMDisposeMessage(triple);
  2591. if (option->enable_simd && strcmp(comp_ctx->target_arch, "x86_64") != 0
  2592. && strncmp(comp_ctx->target_arch, "aarch64", 7) != 0) {
  2593. /* Disable simd if it isn't supported by target arch */
  2594. option->enable_simd = false;
  2595. }
  2596. if (option->enable_simd) {
  2597. char *tmp;
  2598. bool check_simd_ret;
  2599. comp_ctx->enable_simd = true;
  2600. if (!(tmp = LLVMGetTargetMachineCPU(comp_ctx->target_machine))) {
  2601. aot_set_last_error("get CPU from Target Machine fail");
  2602. goto fail;
  2603. }
  2604. check_simd_ret =
  2605. aot_check_simd_compatibility(comp_ctx->target_arch, tmp);
  2606. LLVMDisposeMessage(tmp);
  2607. if (!check_simd_ret) {
  2608. aot_set_last_error("SIMD compatibility check failed, "
  2609. "try adding --cpu=<cpu> to specify a cpu "
  2610. "or adding --disable-simd to disable SIMD");
  2611. goto fail;
  2612. }
  2613. }
  2614. if (!(target_data_ref =
  2615. LLVMCreateTargetDataLayout(comp_ctx->target_machine))) {
  2616. aot_set_last_error("create LLVM target data layout failed.");
  2617. goto fail;
  2618. }
  2619. LLVMSetModuleDataLayout(comp_ctx->module, target_data_ref);
  2620. comp_ctx->pointer_size = LLVMPointerSize(target_data_ref);
  2621. LLVMDisposeTargetData(target_data_ref);
  2622. comp_ctx->optimize = true;
  2623. if (option->output_format == AOT_LLVMIR_UNOPT_FILE)
  2624. comp_ctx->optimize = false;
  2625. /* Create metadata for llvm float experimental constrained intrinsics */
  2626. if (!(comp_ctx->fp_rounding_mode = LLVMMDStringInContext(
  2627. comp_ctx->context, fp_round, (uint32)strlen(fp_round)))
  2628. || !(comp_ctx->fp_exception_behavior = LLVMMDStringInContext(
  2629. comp_ctx->context, fp_exce, (uint32)strlen(fp_exce)))) {
  2630. aot_set_last_error("create float llvm metadata failed.");
  2631. goto fail;
  2632. }
  2633. if (!aot_set_llvm_basic_types(&comp_ctx->basic_types, comp_ctx->context)) {
  2634. aot_set_last_error("create LLVM basic types failed.");
  2635. goto fail;
  2636. }
  2637. if (!aot_create_llvm_consts(&comp_ctx->llvm_consts, comp_ctx)) {
  2638. aot_set_last_error("create LLVM const values failed.");
  2639. goto fail;
  2640. }
  2641. /* set exec_env data type to int8** */
  2642. comp_ctx->exec_env_type = comp_ctx->basic_types.int8_pptr_type;
  2643. /* set aot_inst data type to int8* */
  2644. comp_ctx->aot_inst_type = INT8_PTR_TYPE;
  2645. /* Create function context for each function */
  2646. comp_ctx->func_ctx_count = comp_data->func_count;
  2647. if (comp_data->func_count > 0
  2648. && !(comp_ctx->func_ctxes =
  2649. aot_create_func_contexts(comp_data, comp_ctx)))
  2650. goto fail;
  2651. if (cpu) {
  2652. uint32 len = (uint32)strlen(cpu) + 1;
  2653. if (!(comp_ctx->target_cpu = wasm_runtime_malloc(len))) {
  2654. aot_set_last_error("allocate memory failed");
  2655. goto fail;
  2656. }
  2657. bh_memcpy_s(comp_ctx->target_cpu, len, cpu, len);
  2658. }
  2659. if (comp_ctx->disable_llvm_intrinsics)
  2660. aot_intrinsic_fill_capability_flags(comp_ctx);
  2661. ret = comp_ctx;
  2662. fail:
  2663. if (triple_norm_new)
  2664. LLVMDisposeMessage(triple_norm_new);
  2665. if (cpu_new)
  2666. LLVMDisposeMessage(cpu_new);
  2667. if (!ret)
  2668. aot_destroy_comp_context(comp_ctx);
  2669. (void)i;
  2670. return ret;
  2671. }
  2672. void
  2673. aot_destroy_comp_context(AOTCompContext *comp_ctx)
  2674. {
  2675. if (!comp_ctx)
  2676. return;
  2677. if (comp_ctx->stack_usage_file == comp_ctx->stack_usage_temp_file) {
  2678. (void)unlink(comp_ctx->stack_usage_temp_file);
  2679. }
  2680. if (comp_ctx->target_machine)
  2681. LLVMDisposeTargetMachine(comp_ctx->target_machine);
  2682. if (comp_ctx->builder)
  2683. LLVMDisposeBuilder(comp_ctx->builder);
  2684. if (comp_ctx->orc_thread_safe_context)
  2685. LLVMOrcDisposeThreadSafeContext(comp_ctx->orc_thread_safe_context);
  2686. /* Note: don't dispose comp_ctx->context and comp_ctx->module as
  2687. they are disposed when disposing the thread safe context */
  2688. /* Has to be the last one */
  2689. if (comp_ctx->orc_jit)
  2690. LLVMOrcDisposeLLLazyJIT(comp_ctx->orc_jit);
  2691. if (comp_ctx->func_ctxes)
  2692. aot_destroy_func_contexts(comp_ctx->func_ctxes,
  2693. comp_ctx->func_ctx_count);
  2694. if (bh_list_length(&comp_ctx->native_symbols) > 0) {
  2695. AOTNativeSymbol *sym = bh_list_first_elem(&comp_ctx->native_symbols);
  2696. while (sym) {
  2697. AOTNativeSymbol *t = bh_list_elem_next(sym);
  2698. bh_list_remove(&comp_ctx->native_symbols, sym);
  2699. wasm_runtime_free(sym);
  2700. sym = t;
  2701. }
  2702. }
  2703. if (comp_ctx->target_cpu) {
  2704. wasm_runtime_free(comp_ctx->target_cpu);
  2705. }
  2706. wasm_runtime_free(comp_ctx);
  2707. }
  2708. static bool
  2709. insert_native_symbol(AOTCompContext *comp_ctx, const char *symbol, int32 idx)
  2710. {
  2711. AOTNativeSymbol *sym = wasm_runtime_malloc(sizeof(AOTNativeSymbol));
  2712. if (!sym) {
  2713. aot_set_last_error("alloc native symbol failed.");
  2714. return false;
  2715. }
  2716. memset(sym, 0, sizeof(AOTNativeSymbol));
  2717. bh_assert(strlen(symbol) <= sizeof(sym->symbol));
  2718. snprintf(sym->symbol, sizeof(sym->symbol), "%s", symbol);
  2719. sym->index = idx;
  2720. if (BH_LIST_ERROR == bh_list_insert(&comp_ctx->native_symbols, sym)) {
  2721. wasm_runtime_free(sym);
  2722. aot_set_last_error("insert native symbol to list failed.");
  2723. return false;
  2724. }
  2725. return true;
  2726. }
  2727. int32
  2728. aot_get_native_symbol_index(AOTCompContext *comp_ctx, const char *symbol)
  2729. {
  2730. int32 idx = -1;
  2731. AOTNativeSymbol *sym = NULL;
  2732. sym = bh_list_first_elem(&comp_ctx->native_symbols);
  2733. /* Lookup an existing symobl record */
  2734. while (sym) {
  2735. if (strcmp(sym->symbol, symbol) == 0) {
  2736. idx = sym->index;
  2737. break;
  2738. }
  2739. sym = bh_list_elem_next(sym);
  2740. }
  2741. /* Given symbol is not exist in list, then we alloc a new index for it */
  2742. if (idx < 0) {
  2743. if (comp_ctx->pointer_size == sizeof(uint32)
  2744. && (!strncmp(symbol, "f64#", 4) || !strncmp(symbol, "i64#", 4))) {
  2745. idx = bh_list_length(&comp_ctx->native_symbols);
  2746. /* Add 4 bytes padding on 32-bit target to make sure that
  2747. the f64 const is stored on 8-byte aligned address */
  2748. if (idx & 1) {
  2749. if (!insert_native_symbol(comp_ctx, "__ignore", idx)) {
  2750. return -1;
  2751. }
  2752. }
  2753. }
  2754. idx = bh_list_length(&comp_ctx->native_symbols);
  2755. if (!insert_native_symbol(comp_ctx, symbol, idx)) {
  2756. return -1;
  2757. }
  2758. if (comp_ctx->pointer_size == sizeof(uint32)
  2759. && (!strncmp(symbol, "f64#", 4) || !strncmp(symbol, "i64#", 4))) {
  2760. /* f64 const occupies 2 pointer slots on 32-bit target */
  2761. if (!insert_native_symbol(comp_ctx, "__ignore", idx + 1)) {
  2762. return -1;
  2763. }
  2764. }
  2765. }
  2766. return idx;
  2767. }
  2768. void
  2769. aot_value_stack_push(AOTValueStack *stack, AOTValue *value)
  2770. {
  2771. if (!stack->value_list_head)
  2772. stack->value_list_head = stack->value_list_end = value;
  2773. else {
  2774. stack->value_list_end->next = value;
  2775. value->prev = stack->value_list_end;
  2776. stack->value_list_end = value;
  2777. }
  2778. }
  2779. AOTValue *
  2780. aot_value_stack_pop(AOTValueStack *stack)
  2781. {
  2782. AOTValue *value = stack->value_list_end;
  2783. bh_assert(stack->value_list_end);
  2784. if (stack->value_list_head == stack->value_list_end)
  2785. stack->value_list_head = stack->value_list_end = NULL;
  2786. else {
  2787. stack->value_list_end = stack->value_list_end->prev;
  2788. stack->value_list_end->next = NULL;
  2789. value->prev = NULL;
  2790. }
  2791. return value;
  2792. }
  2793. void
  2794. aot_value_stack_destroy(AOTValueStack *stack)
  2795. {
  2796. AOTValue *value = stack->value_list_head, *p;
  2797. while (value) {
  2798. p = value->next;
  2799. wasm_runtime_free(value);
  2800. value = p;
  2801. }
  2802. stack->value_list_head = NULL;
  2803. stack->value_list_end = NULL;
  2804. }
  2805. void
  2806. aot_block_stack_push(AOTBlockStack *stack, AOTBlock *block)
  2807. {
  2808. if (!stack->block_list_head)
  2809. stack->block_list_head = stack->block_list_end = block;
  2810. else {
  2811. stack->block_list_end->next = block;
  2812. block->prev = stack->block_list_end;
  2813. stack->block_list_end = block;
  2814. }
  2815. }
  2816. AOTBlock *
  2817. aot_block_stack_pop(AOTBlockStack *stack)
  2818. {
  2819. AOTBlock *block = stack->block_list_end;
  2820. bh_assert(stack->block_list_end);
  2821. if (stack->block_list_head == stack->block_list_end)
  2822. stack->block_list_head = stack->block_list_end = NULL;
  2823. else {
  2824. stack->block_list_end = stack->block_list_end->prev;
  2825. stack->block_list_end->next = NULL;
  2826. block->prev = NULL;
  2827. }
  2828. return block;
  2829. }
  2830. void
  2831. aot_block_stack_destroy(AOTBlockStack *stack)
  2832. {
  2833. AOTBlock *block = stack->block_list_head, *p;
  2834. while (block) {
  2835. p = block->next;
  2836. aot_value_stack_destroy(&block->value_stack);
  2837. aot_block_destroy(block);
  2838. block = p;
  2839. }
  2840. stack->block_list_head = NULL;
  2841. stack->block_list_end = NULL;
  2842. }
  2843. void
  2844. aot_block_destroy(AOTBlock *block)
  2845. {
  2846. aot_value_stack_destroy(&block->value_stack);
  2847. if (block->param_types)
  2848. wasm_runtime_free(block->param_types);
  2849. if (block->param_phis)
  2850. wasm_runtime_free(block->param_phis);
  2851. if (block->else_param_phis)
  2852. wasm_runtime_free(block->else_param_phis);
  2853. if (block->result_types)
  2854. wasm_runtime_free(block->result_types);
  2855. if (block->result_phis)
  2856. wasm_runtime_free(block->result_phis);
  2857. wasm_runtime_free(block);
  2858. }
  2859. bool
  2860. aot_checked_addr_list_add(AOTFuncContext *func_ctx, uint32 local_idx,
  2861. uint32 offset, uint32 bytes)
  2862. {
  2863. AOTCheckedAddr *node = func_ctx->checked_addr_list;
  2864. if (!(node = wasm_runtime_malloc(sizeof(AOTCheckedAddr)))) {
  2865. aot_set_last_error("allocate memory failed.");
  2866. return false;
  2867. }
  2868. node->local_idx = local_idx;
  2869. node->offset = offset;
  2870. node->bytes = bytes;
  2871. node->next = func_ctx->checked_addr_list;
  2872. func_ctx->checked_addr_list = node;
  2873. return true;
  2874. }
  2875. void
  2876. aot_checked_addr_list_del(AOTFuncContext *func_ctx, uint32 local_idx)
  2877. {
  2878. AOTCheckedAddr *node = func_ctx->checked_addr_list;
  2879. AOTCheckedAddr *node_prev = NULL, *node_next;
  2880. while (node) {
  2881. node_next = node->next;
  2882. if (node->local_idx == local_idx) {
  2883. if (!node_prev)
  2884. func_ctx->checked_addr_list = node_next;
  2885. else
  2886. node_prev->next = node_next;
  2887. wasm_runtime_free(node);
  2888. }
  2889. else {
  2890. node_prev = node;
  2891. }
  2892. node = node_next;
  2893. }
  2894. }
  2895. bool
  2896. aot_checked_addr_list_find(AOTFuncContext *func_ctx, uint32 local_idx,
  2897. uint32 offset, uint32 bytes)
  2898. {
  2899. AOTCheckedAddr *node = func_ctx->checked_addr_list;
  2900. while (node) {
  2901. if (node->local_idx == local_idx && node->offset == offset
  2902. && node->bytes >= bytes) {
  2903. return true;
  2904. }
  2905. node = node->next;
  2906. }
  2907. return false;
  2908. }
  2909. void
  2910. aot_checked_addr_list_destroy(AOTFuncContext *func_ctx)
  2911. {
  2912. AOTCheckedAddr *node = func_ctx->checked_addr_list, *node_next;
  2913. while (node) {
  2914. node_next = node->next;
  2915. wasm_runtime_free(node);
  2916. node = node_next;
  2917. }
  2918. func_ctx->checked_addr_list = NULL;
  2919. }
  2920. bool
  2921. aot_build_zero_function_ret(const AOTCompContext *comp_ctx,
  2922. AOTFuncContext *func_ctx, AOTFuncType *func_type)
  2923. {
  2924. LLVMValueRef ret = NULL;
  2925. if (func_type->result_count) {
  2926. switch (func_type->types[func_type->param_count]) {
  2927. case VALUE_TYPE_I32:
  2928. ret = LLVMBuildRet(comp_ctx->builder, I32_ZERO);
  2929. break;
  2930. case VALUE_TYPE_I64:
  2931. ret = LLVMBuildRet(comp_ctx->builder, I64_ZERO);
  2932. break;
  2933. case VALUE_TYPE_F32:
  2934. ret = LLVMBuildRet(comp_ctx->builder, F32_ZERO);
  2935. break;
  2936. case VALUE_TYPE_F64:
  2937. ret = LLVMBuildRet(comp_ctx->builder, F64_ZERO);
  2938. break;
  2939. case VALUE_TYPE_V128:
  2940. ret =
  2941. LLVMBuildRet(comp_ctx->builder, LLVM_CONST(i64x2_vec_zero));
  2942. break;
  2943. case VALUE_TYPE_FUNCREF:
  2944. case VALUE_TYPE_EXTERNREF:
  2945. ret = LLVMBuildRet(comp_ctx->builder, REF_NULL);
  2946. break;
  2947. default:
  2948. bh_assert(0);
  2949. }
  2950. }
  2951. else {
  2952. ret = LLVMBuildRetVoid(comp_ctx->builder);
  2953. }
  2954. if (!ret) {
  2955. aot_set_last_error("llvm build ret failed.");
  2956. return false;
  2957. }
  2958. #if WASM_ENABLE_DEBUG_AOT != 0
  2959. /* debug_func is NULL for precheck function */
  2960. if (func_ctx->debug_func != NULL) {
  2961. LLVMMetadataRef return_location =
  2962. dwarf_gen_func_ret_location(comp_ctx, func_ctx);
  2963. LLVMInstructionSetDebugLoc(ret, return_location);
  2964. }
  2965. #endif
  2966. return true;
  2967. }
  2968. static LLVMValueRef
  2969. __call_llvm_intrinsic(const AOTCompContext *comp_ctx,
  2970. const AOTFuncContext *func_ctx, const char *name,
  2971. LLVMTypeRef ret_type, LLVMTypeRef *param_types,
  2972. int param_count, LLVMValueRef *param_values)
  2973. {
  2974. LLVMValueRef func, ret;
  2975. LLVMTypeRef func_type;
  2976. const char *symname;
  2977. int32 func_idx;
  2978. if (comp_ctx->disable_llvm_intrinsics
  2979. && aot_intrinsic_check_capability(comp_ctx, name)) {
  2980. if (func_ctx == NULL) {
  2981. aot_set_last_error_v("invalid func_ctx for intrinsic: %s", name);
  2982. return NULL;
  2983. }
  2984. if (!(func_type = LLVMFunctionType(ret_type, param_types,
  2985. (uint32)param_count, false))) {
  2986. aot_set_last_error("create LLVM intrinsic function type failed.");
  2987. return NULL;
  2988. }
  2989. if (!(func_type = LLVMPointerType(func_type, 0))) {
  2990. aot_set_last_error(
  2991. "create LLVM intrinsic function pointer type failed.");
  2992. return NULL;
  2993. }
  2994. if (!(symname = aot_intrinsic_get_symbol(name))) {
  2995. aot_set_last_error_v("runtime intrinsic not implemented: %s\n",
  2996. name);
  2997. return NULL;
  2998. }
  2999. func_idx =
  3000. aot_get_native_symbol_index((AOTCompContext *)comp_ctx, symname);
  3001. if (func_idx < 0) {
  3002. aot_set_last_error_v("get runtime intrinsc index failed: %s\n",
  3003. name);
  3004. return NULL;
  3005. }
  3006. if (!(func = aot_get_func_from_table(comp_ctx, func_ctx->native_symbol,
  3007. func_type, func_idx))) {
  3008. aot_set_last_error_v("get runtime intrinsc failed: %s\n", name);
  3009. return NULL;
  3010. }
  3011. }
  3012. else {
  3013. /* Declare llvm intrinsic function if necessary */
  3014. if (!(func = LLVMGetNamedFunction(func_ctx->module, name))) {
  3015. if (!(func_type = LLVMFunctionType(ret_type, param_types,
  3016. (uint32)param_count, false))) {
  3017. aot_set_last_error(
  3018. "create LLVM intrinsic function type failed.");
  3019. return NULL;
  3020. }
  3021. if (!(func = LLVMAddFunction(func_ctx->module, name, func_type))) {
  3022. aot_set_last_error("add LLVM intrinsic function failed.");
  3023. return NULL;
  3024. }
  3025. }
  3026. }
  3027. #if LLVM_VERSION_MAJOR >= 14
  3028. func_type =
  3029. LLVMFunctionType(ret_type, param_types, (uint32)param_count, false);
  3030. #endif
  3031. /* Call the LLVM intrinsic function */
  3032. if (!(ret = LLVMBuildCall2(comp_ctx->builder, func_type, func, param_values,
  3033. (uint32)param_count, "call"))) {
  3034. aot_set_last_error("llvm build intrinsic call failed.");
  3035. return NULL;
  3036. }
  3037. return ret;
  3038. }
  3039. LLVMValueRef
  3040. aot_call_llvm_intrinsic(const AOTCompContext *comp_ctx,
  3041. const AOTFuncContext *func_ctx, const char *intrinsic,
  3042. LLVMTypeRef ret_type, LLVMTypeRef *param_types,
  3043. int param_count, ...)
  3044. {
  3045. LLVMValueRef *param_values, ret;
  3046. va_list argptr;
  3047. uint64 total_size;
  3048. int i = 0;
  3049. /* Create param values */
  3050. total_size = sizeof(LLVMValueRef) * (uint64)param_count;
  3051. if (total_size >= UINT32_MAX
  3052. || !(param_values = wasm_runtime_malloc((uint32)total_size))) {
  3053. aot_set_last_error("allocate memory for param values failed.");
  3054. return false;
  3055. }
  3056. /* Load each param value */
  3057. va_start(argptr, param_count);
  3058. while (i < param_count)
  3059. param_values[i++] = va_arg(argptr, LLVMValueRef);
  3060. va_end(argptr);
  3061. ret = __call_llvm_intrinsic(comp_ctx, func_ctx, intrinsic, ret_type,
  3062. param_types, param_count, param_values);
  3063. wasm_runtime_free(param_values);
  3064. return ret;
  3065. }
  3066. LLVMValueRef
  3067. aot_call_llvm_intrinsic_v(const AOTCompContext *comp_ctx,
  3068. const AOTFuncContext *func_ctx, const char *intrinsic,
  3069. LLVMTypeRef ret_type, LLVMTypeRef *param_types,
  3070. int param_count, va_list param_value_list)
  3071. {
  3072. LLVMValueRef *param_values, ret;
  3073. uint64 total_size;
  3074. int i = 0;
  3075. /* Create param values */
  3076. total_size = sizeof(LLVMValueRef) * (uint64)param_count;
  3077. if (total_size >= UINT32_MAX
  3078. || !(param_values = wasm_runtime_malloc((uint32)total_size))) {
  3079. aot_set_last_error("allocate memory for param values failed.");
  3080. return false;
  3081. }
  3082. /* Load each param value */
  3083. while (i < param_count)
  3084. param_values[i++] = va_arg(param_value_list, LLVMValueRef);
  3085. ret = __call_llvm_intrinsic(comp_ctx, func_ctx, intrinsic, ret_type,
  3086. param_types, param_count, param_values);
  3087. wasm_runtime_free(param_values);
  3088. return ret;
  3089. }
  3090. LLVMValueRef
  3091. aot_get_func_from_table(const AOTCompContext *comp_ctx, LLVMValueRef base,
  3092. LLVMTypeRef func_type, int32 index)
  3093. {
  3094. LLVMValueRef func;
  3095. LLVMValueRef func_addr;
  3096. if (!(func_addr = I32_CONST(index))) {
  3097. aot_set_last_error("construct function index failed.");
  3098. goto fail;
  3099. }
  3100. if (!(func_addr =
  3101. LLVMBuildInBoundsGEP2(comp_ctx->builder, OPQ_PTR_TYPE, base,
  3102. &func_addr, 1, "func_addr"))) {
  3103. aot_set_last_error("get function addr by index failed.");
  3104. goto fail;
  3105. }
  3106. func =
  3107. LLVMBuildLoad2(comp_ctx->builder, OPQ_PTR_TYPE, func_addr, "func_tmp");
  3108. if (func == NULL) {
  3109. aot_set_last_error("get function pointer failed.");
  3110. goto fail;
  3111. }
  3112. if (!(func =
  3113. LLVMBuildBitCast(comp_ctx->builder, func, func_type, "func"))) {
  3114. aot_set_last_error("cast function fialed.");
  3115. goto fail;
  3116. }
  3117. return func;
  3118. fail:
  3119. return NULL;
  3120. }
  3121. LLVMValueRef
  3122. aot_load_const_from_table(AOTCompContext *comp_ctx, LLVMValueRef base,
  3123. const WASMValue *value, uint8 value_type)
  3124. {
  3125. LLVMValueRef const_index, const_addr, const_value;
  3126. LLVMTypeRef const_ptr_type, const_type;
  3127. char buf[128] = { 0 };
  3128. int32 index;
  3129. switch (value_type) {
  3130. case VALUE_TYPE_I32:
  3131. /* Store the raw int bits of i32 const as a hex string */
  3132. snprintf(buf, sizeof(buf), "i32#%08" PRIX32, value->i32);
  3133. const_ptr_type = INT32_PTR_TYPE;
  3134. const_type = I32_TYPE;
  3135. break;
  3136. case VALUE_TYPE_I64:
  3137. /* Store the raw int bits of i64 const as a hex string */
  3138. snprintf(buf, sizeof(buf), "i64#%016" PRIX64, value->i64);
  3139. const_ptr_type = INT64_PTR_TYPE;
  3140. const_type = I64_TYPE;
  3141. break;
  3142. case VALUE_TYPE_F32:
  3143. /* Store the raw int bits of f32 const as a hex string */
  3144. snprintf(buf, sizeof(buf), "f32#%08" PRIX32, value->i32);
  3145. const_ptr_type = F32_PTR_TYPE;
  3146. const_type = F32_TYPE;
  3147. break;
  3148. case VALUE_TYPE_F64:
  3149. /* Store the raw int bits of f64 const as a hex string */
  3150. snprintf(buf, sizeof(buf), "f64#%016" PRIX64, value->i64);
  3151. const_ptr_type = F64_PTR_TYPE;
  3152. const_type = F64_TYPE;
  3153. break;
  3154. default:
  3155. bh_assert(0);
  3156. return NULL;
  3157. }
  3158. /* Load f32/f64 const from exec_env->native_symbol[index] */
  3159. index = aot_get_native_symbol_index(comp_ctx, buf);
  3160. if (index < 0) {
  3161. return NULL;
  3162. }
  3163. if (!(const_index = I32_CONST(index))) {
  3164. aot_set_last_error("construct const index failed.");
  3165. return NULL;
  3166. }
  3167. if (!(const_addr =
  3168. LLVMBuildInBoundsGEP2(comp_ctx->builder, OPQ_PTR_TYPE, base,
  3169. &const_index, 1, "const_addr_tmp"))) {
  3170. aot_set_last_error("get const addr by index failed.");
  3171. return NULL;
  3172. }
  3173. if (!(const_addr = LLVMBuildBitCast(comp_ctx->builder, const_addr,
  3174. const_ptr_type, "const_addr"))) {
  3175. aot_set_last_error("cast const fialed.");
  3176. return NULL;
  3177. }
  3178. if (!(const_value = LLVMBuildLoad2(comp_ctx->builder, const_type,
  3179. const_addr, "const_value"))) {
  3180. aot_set_last_error("load const failed.");
  3181. return NULL;
  3182. }
  3183. (void)const_type;
  3184. return const_value;
  3185. }
  3186. bool
  3187. aot_set_cond_br_weights(AOTCompContext *comp_ctx, LLVMValueRef cond_br,
  3188. int32 weights_true, int32 weights_false)
  3189. {
  3190. LLVMMetadataRef md_nodes[3], meta_data;
  3191. LLVMValueRef meta_data_as_value;
  3192. md_nodes[0] = LLVMMDStringInContext2(comp_ctx->context, "branch_weights",
  3193. strlen("branch_weights"));
  3194. md_nodes[1] = LLVMValueAsMetadata(I32_CONST(weights_true));
  3195. md_nodes[2] = LLVMValueAsMetadata(I32_CONST(weights_false));
  3196. meta_data = LLVMMDNodeInContext2(comp_ctx->context, md_nodes, 3);
  3197. meta_data_as_value = LLVMMetadataAsValue(comp_ctx->context, meta_data);
  3198. LLVMSetMetadata(cond_br, 2, meta_data_as_value);
  3199. return true;
  3200. }