| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283 |
- /*
- * Copyright (C) 2019 Intel Corporation. All rights reserved.
- * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
- */
- #include "aot_emit_memory.h"
- #include "aot_compiler.h"
- #include "aot_emit_exception.h"
- #include "../aot/aot_runtime.h"
- #include "aot_intrinsic.h"
- #include "aot_emit_control.h"
- #define BUILD_IS_NOT_NULL(value, res, name) \
- do { \
- if (!(res = LLVMBuildIsNotNull(comp_ctx->builder, value, name))) { \
- aot_set_last_error("llvm build is not null failed."); \
- goto fail; \
- } \
- } while (0)
- #define BUILD_BR(llvm_block) \
- do { \
- if (!LLVMBuildBr(comp_ctx->builder, llvm_block)) { \
- aot_set_last_error("llvm build br failed."); \
- goto fail; \
- } \
- } while (0)
- #define BUILD_COND_BR(value_if, block_then, block_else) \
- do { \
- if (!LLVMBuildCondBr(comp_ctx->builder, value_if, block_then, \
- block_else)) { \
- aot_set_last_error("llvm build cond br failed."); \
- goto fail; \
- } \
- } while (0)
- #define BUILD_TRUNC(value, data_type) \
- do { \
- if (!(value = LLVMBuildTrunc(comp_ctx->builder, value, data_type, \
- "val_trunc"))) { \
- aot_set_last_error("llvm build trunc failed."); \
- goto fail; \
- } \
- } while (0)
- #define BUILD_ICMP(op, left, right, res, name) \
- do { \
- if (!(res = \
- LLVMBuildICmp(comp_ctx->builder, op, left, right, name))) { \
- aot_set_last_error("llvm build icmp failed."); \
- goto fail; \
- } \
- } while (0)
- #define BUILD_OP(Op, left, right, res, name) \
- do { \
- if (!(res = LLVMBuild##Op(comp_ctx->builder, left, right, name))) { \
- aot_set_last_error("llvm build " #Op " fail."); \
- goto fail; \
- } \
- } while (0)
- #define ADD_BASIC_BLOCK(block, name) \
- do { \
- if (!(block = LLVMAppendBasicBlockInContext(comp_ctx->context, \
- func_ctx->func, name))) { \
- aot_set_last_error("llvm add basic block failed."); \
- goto fail; \
- } \
- } while (0)
- #define SET_BUILD_POS(block) LLVMPositionBuilderAtEnd(comp_ctx->builder, block)
- static bool
- zero_extend_u64(AOTCompContext *comp_ctx, LLVMValueRef *value, const char *name)
- {
- if (comp_ctx->pointer_size == sizeof(uint64)) {
- /* zero extend to uint64 if the target is 64-bit */
- *value = LLVMBuildZExt(comp_ctx->builder, *value, I64_TYPE, name);
- if (!*value) {
- aot_set_last_error("llvm build zero extend failed.");
- return false;
- }
- }
- return true;
- }
- static LLVMValueRef
- get_memory_check_bound(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- uint32 bytes)
- {
- LLVMValueRef mem_check_bound = NULL;
- switch (bytes) {
- case 1:
- mem_check_bound = func_ctx->mem_info[0].mem_bound_check_1byte;
- break;
- case 2:
- mem_check_bound = func_ctx->mem_info[0].mem_bound_check_2bytes;
- break;
- case 4:
- mem_check_bound = func_ctx->mem_info[0].mem_bound_check_4bytes;
- break;
- case 8:
- mem_check_bound = func_ctx->mem_info[0].mem_bound_check_8bytes;
- break;
- case 16:
- mem_check_bound = func_ctx->mem_info[0].mem_bound_check_16bytes;
- break;
- default:
- bh_assert(0);
- return NULL;
- }
- if (func_ctx->mem_space_unchanged)
- return mem_check_bound;
- if (!(mem_check_bound = LLVMBuildLoad2(
- comp_ctx->builder,
- (comp_ctx->pointer_size == sizeof(uint64)) ? I64_TYPE : I32_TYPE,
- mem_check_bound, "mem_check_bound"))) {
- aot_set_last_error("llvm build load failed.");
- return NULL;
- }
- return mem_check_bound;
- }
- #if defined(_WIN32) || defined(_WIN32_)
- static inline int
- ffs(int n)
- {
- int pos = 0;
- if (n == 0)
- return 0;
- while (!(n & 1)) {
- pos++;
- n >>= 1;
- }
- return pos + 1;
- }
- #endif
- static LLVMValueRef
- get_memory_curr_page_count(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx);
- #if WASM_ENABLE_SHARED_HEAP != 0
- uint32
- get_module_inst_extra_offset(AOTCompContext *comp_ctx);
- #define BUILD_LOAD_PTR(ptr, data_type, res) \
- do { \
- if (!(res = LLVMBuildLoad2(comp_ctx->builder, data_type, ptr, \
- "load_value"))) { \
- aot_set_last_error("llvm build load failed"); \
- goto fail; \
- } \
- } while (0)
- /* Update last used shared heap info(alloc ptr) in function ctx:
- * 1. shared_heap_start_off 2. shared_heap_end_off 3. shared_heap_base_addr_adj
- */
- bool
- aot_check_shared_heap_chain_and_update(AOTCompContext *comp_ctx,
- AOTFuncContext *func_ctx,
- LLVMBasicBlockRef check_succ,
- LLVMValueRef start_offset,
- LLVMValueRef bytes, bool is_memory64)
- {
- LLVMValueRef param_values[7], ret_value, func, value, cmp;
- LLVMTypeRef param_types[7], ret_type, func_type, func_ptr_type;
- param_types[0] = INT8_PTR_TYPE;
- param_types[1] = INTPTR_T_TYPE;
- param_types[2] = SIZE_T_TYPE;
- param_types[3] = INTPTR_T_PTR_TYPE;
- param_types[4] = INTPTR_T_PTR_TYPE;
- param_types[5] = INT8_PTR_TYPE;
- param_types[6] = INT8_TYPE;
- ret_type = INT8_TYPE;
- GET_AOT_FUNCTION(wasm_runtime_check_and_update_last_used_shared_heap, 7);
- /* Call function */
- param_values[0] = func_ctx->aot_inst;
- param_values[1] = start_offset;
- param_values[2] = bytes;
- /* pass alloc ptr */
- param_values[3] = func_ctx->shared_heap_start_off;
- param_values[4] = func_ctx->shared_heap_end_off;
- param_values[5] = func_ctx->shared_heap_base_addr_adj;
- param_values[6] = is_memory64 ? I8_ONE : I8_ZERO;
- if (!(ret_value = LLVMBuildCall2(comp_ctx->builder, func_type, func,
- param_values, 7, "call"))) {
- aot_set_last_error("llvm build call failed.");
- goto fail;
- }
- BUILD_ICMP(LLVMIntEQ, ret_value, I8_ZERO, cmp, "shared_heap_oob");
- if (!aot_emit_exception(comp_ctx, func_ctx,
- EXCE_OUT_OF_BOUNDS_MEMORY_ACCESS, true, cmp,
- check_succ)) {
- goto fail;
- }
- return true;
- fail:
- return false;
- }
- /*
- * Setup the basic blocks for shared heap and shared chain memory checks.
- *
- * Arguments:
- * block_curr: The current basic block.
- * app_addr_in_cache_shared_heap: Output, block for cache shared heap.
- * app_addr_in_linear_mem: Output, block for linear memory.
- * app_addr_in_shared_heap_chain: Output, block for shared heap chain
- * (only for shared heap chain).
- * check_shared_heap_chain: Output, block for checking shared heap chain
- * (only for shared heap chain).
- *
- * Topology:
- * If enable_shared_heap:
- * block_curr -> app_addr_in_cache_shared_heap
- * -> app_addr_in_linear_mem
- * If enable_shared_chain:
- * block_curr -> app_addr_in_shared_heap_chain
- * -> app_addr_in_cache_shared_heap
- * -> check_shared_heap_chain
- * -> app_addr_in_linear_mem
- */
- static bool
- setup_shared_heap_blocks(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- LLVMBasicBlockRef block_curr,
- LLVMBasicBlockRef *app_addr_in_cache_shared_heap,
- LLVMBasicBlockRef *app_addr_in_linear_mem,
- LLVMBasicBlockRef *app_addr_in_shared_heap_chain,
- LLVMBasicBlockRef *check_shared_heap_chain)
- {
- ADD_BASIC_BLOCK(*app_addr_in_cache_shared_heap,
- "app_addr_in_cache_shared_heap");
- ADD_BASIC_BLOCK(*app_addr_in_linear_mem, "app_addr_in_linear_mem");
- if (comp_ctx->enable_shared_heap) {
- LLVMMoveBasicBlockAfter(*app_addr_in_cache_shared_heap, block_curr);
- LLVMMoveBasicBlockAfter(*app_addr_in_linear_mem,
- *app_addr_in_cache_shared_heap);
- }
- else if (comp_ctx->enable_shared_chain) {
- ADD_BASIC_BLOCK(*app_addr_in_shared_heap_chain,
- "app_addr_in_shared_heap_chain");
- ADD_BASIC_BLOCK(*check_shared_heap_chain, "check_shared_heap_chain");
- LLVMMoveBasicBlockAfter(*app_addr_in_shared_heap_chain, block_curr);
- LLVMMoveBasicBlockAfter(*app_addr_in_cache_shared_heap,
- *app_addr_in_shared_heap_chain);
- LLVMMoveBasicBlockAfter(*check_shared_heap_chain,
- *app_addr_in_cache_shared_heap);
- LLVMMoveBasicBlockAfter(*app_addr_in_linear_mem,
- *app_addr_in_cache_shared_heap);
- }
- return true;
- fail:
- return false;
- }
- /*
- * Build a branch to check if start_offset is in the shared heap chain region.
- *
- * Arguments:
- * start_offset: The offset to check.
- * app_addr_in_shared_heap_chain: Block to branch if in shared heap chain.
- * app_addr_in_linear_mem: Block to branch if not in shared heap chain.
- */
- static bool
- build_check_app_addr_in_shared_heap_chain(
- AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- LLVMValueRef start_offset, LLVMBasicBlockRef app_addr_in_shared_heap_chain,
- LLVMBasicBlockRef app_addr_in_linear_mem)
- {
- LLVMValueRef is_in_shared_heap = NULL;
- /* Use start_offset > func_ctx->shared_heap_head_start_off to test
- * start_off falls in shared heap chain memory region. The shared heap
- * chain oob will be detected in app_addr_in_shared_heap block or
- * aot_check_shared_heap_chain_and_update function
- */
- BUILD_ICMP(LLVMIntUGT, start_offset, func_ctx->shared_heap_head_start_off,
- is_in_shared_heap, "shared_heap_lb_cmp");
- BUILD_COND_BR(is_in_shared_heap, app_addr_in_shared_heap_chain,
- app_addr_in_linear_mem);
- SET_BUILD_POS(app_addr_in_shared_heap_chain);
- return true;
- fail:
- return false;
- }
- /*
- * Build the conditional branch for cache shared heap or shared heap chain.
- *
- * Arguments:
- * cmp: The condition for being in cache shared heap.
- * app_addr_in_cache_shared_heap: Block for cache shared heap.
- * app_addr_in_linear_mem: Block for linear memory.
- * check_shared_heap_chain: Block for checking shared heap chain.
- * bytes: The access size in bytes.
- * start_offset: The offset to check.
- * is_memory64: Whether memory is 64-bit.
- */
- static bool
- build_shared_heap_conditional_branching(
- AOTCompContext *comp_ctx, AOTFuncContext *func_ctx, LLVMValueRef cmp,
- LLVMBasicBlockRef app_addr_in_cache_shared_heap,
- LLVMBasicBlockRef app_addr_in_linear_mem,
- LLVMBasicBlockRef check_shared_heap_chain, LLVMValueRef bytes,
- LLVMValueRef start_offset, bool is_memory64)
- {
- if (comp_ctx->enable_shared_heap) {
- BUILD_COND_BR(cmp, app_addr_in_cache_shared_heap,
- app_addr_in_linear_mem);
- }
- else if (comp_ctx->enable_shared_chain) {
- BUILD_COND_BR(cmp, app_addr_in_cache_shared_heap,
- check_shared_heap_chain);
- SET_BUILD_POS(check_shared_heap_chain);
- if (!aot_check_shared_heap_chain_and_update(
- comp_ctx, func_ctx, app_addr_in_cache_shared_heap, start_offset,
- bytes, is_memory64))
- goto fail;
- }
- return true;
- fail:
- return false;
- }
- /*
- * Get the native address in the cache shared heap.
- *
- * Arguments:
- * start_offset: The offset to use for address calculation.
- * maddr: Output, the native address that in the cache shared heap.
- */
- static bool
- build_get_maddr_in_cache_shared_heap(AOTCompContext *comp_ctx,
- AOTFuncContext *func_ctx,
- LLVMValueRef start_offset,
- LLVMValueRef *maddr)
- {
- LLVMValueRef shared_heap_base_addr_adj;
- /* load the local variable */
- BUILD_LOAD_PTR(func_ctx->shared_heap_base_addr_adj, INT8_PTR_TYPE,
- shared_heap_base_addr_adj);
- if (!(*maddr = LLVMBuildInBoundsGEP2(
- comp_ctx->builder, INT8_TYPE, shared_heap_base_addr_adj,
- &start_offset, 1, "maddr_cache_shared_heap"))) {
- aot_set_last_error("llvm build inbounds gep failed");
- goto fail;
- }
- return true;
- fail:
- return false;
- }
- /*
- * Check for memory overflow in shared heap for normal memory access.
- *
- * Arguments:
- * block_curr: The current basic block.
- * block_maddr_phi: The phi block for memory address.
- * maddr_phi: The phi node for memory address.
- * start_offset: The first offset to check.
- * mem_base_addr: The base address of memory. Only used with segue.
- * bytes_u32: The access size in bytes.
- * is_memory64: Whether memory is wasm64 memory.
- * is_target_64bit: Whether target is 64-bit.
- * enable_segue: Whether to use segment register addressing.
- */
- static bool
- aot_check_shared_heap_memory_overflow(
- AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- LLVMBasicBlockRef block_curr, LLVMBasicBlockRef block_maddr_phi,
- LLVMValueRef maddr_phi, LLVMValueRef start_offset,
- LLVMValueRef mem_base_addr, uint32 bytes_u32, bool is_memory64,
- bool is_target_64bit, bool enable_segue)
- {
- LLVMBasicBlockRef app_addr_in_cache_shared_heap, app_addr_in_linear_mem;
- LLVMBasicBlockRef app_addr_in_shared_heap_chain = NULL,
- check_shared_heap_chain = NULL;
- LLVMValueRef cmp, cmp1, cmp2, shared_heap_start_off, shared_heap_end_off,
- shared_heap_check_bound, maddr = NULL;
- /* On 64/32-bit target, the offset is 64/32-bit */
- LLVMTypeRef offset_type = is_target_64bit ? I64_TYPE : I32_TYPE;
- LLVMValueRef length, bytes;
- if (!setup_shared_heap_blocks(
- comp_ctx, func_ctx, block_curr, &app_addr_in_cache_shared_heap,
- &app_addr_in_linear_mem, &app_addr_in_shared_heap_chain,
- &check_shared_heap_chain))
- goto fail;
- LLVMMoveBasicBlockAfter(block_maddr_phi, app_addr_in_linear_mem);
- /* Early branching when it's not in shared heap chain at all */
- if (comp_ctx->enable_shared_chain
- && !build_check_app_addr_in_shared_heap_chain(
- comp_ctx, func_ctx, start_offset, app_addr_in_shared_heap_chain,
- app_addr_in_linear_mem))
- goto fail;
- /* Load the local variable of the function */
- BUILD_LOAD_PTR(func_ctx->shared_heap_start_off, offset_type,
- shared_heap_start_off);
- BUILD_LOAD_PTR(func_ctx->shared_heap_end_off, offset_type,
- shared_heap_end_off);
- /* Check if the app address is in the cache shared heap range.
- * If yes, branch to the cache branch; if not, check the shared heap chain
- */
- BUILD_ICMP(LLVMIntUGE, start_offset, shared_heap_start_off, cmp,
- "cmp_cache_shared_heap_start");
- length =
- is_target_64bit ? I64_CONST(bytes_u32 - 1) : I32_CONST(bytes_u32 - 1);
- CHECK_LLVM_CONST(length);
- BUILD_OP(Sub, shared_heap_end_off, length, shared_heap_check_bound,
- "cache_shared_heap_end_bound");
- BUILD_ICMP(LLVMIntULE, start_offset, shared_heap_check_bound, cmp1,
- "cmp_cache_shared_heap_end");
- BUILD_OP(And, cmp, cmp1, cmp2, "is_in_cache_shared_heap");
- /* Conditional branching based on whether in cached shared heap */
- bytes = is_target_64bit ? I64_CONST(bytes_u32) : I32_CONST(bytes_u32);
- if (!build_shared_heap_conditional_branching(
- comp_ctx, func_ctx, cmp2, app_addr_in_cache_shared_heap,
- app_addr_in_linear_mem, check_shared_heap_chain, bytes,
- start_offset, is_memory64))
- goto fail;
- SET_BUILD_POS(app_addr_in_cache_shared_heap);
- if (!build_get_maddr_in_cache_shared_heap(comp_ctx, func_ctx, start_offset,
- &maddr))
- goto fail;
- if (enable_segue) {
- LLVMValueRef mem_base_addr_u64, maddr_u64, offset_to_mem_base;
- if (!(maddr_u64 = LLVMBuildPtrToInt(comp_ctx->builder, maddr, I64_TYPE,
- "maddr_u64"))
- || !(mem_base_addr_u64 =
- LLVMBuildPtrToInt(comp_ctx->builder, mem_base_addr,
- I64_TYPE, "mem_base_addr_u64"))) {
- aot_set_last_error("llvm build ptr to int failed");
- goto fail;
- }
- if (!(offset_to_mem_base =
- LLVMBuildSub(comp_ctx->builder, maddr_u64, mem_base_addr_u64,
- "offset_to_mem_base"))) {
- aot_set_last_error("llvm build sub failed");
- goto fail;
- }
- if (!(maddr = LLVMBuildIntToPtr(comp_ctx->builder, offset_to_mem_base,
- INT8_PTR_TYPE_GS,
- "maddr_shared_heap_segue"))) {
- aot_set_last_error("llvm build int to ptr failed.");
- goto fail;
- }
- }
- LLVMAddIncoming(maddr_phi, &maddr, &app_addr_in_cache_shared_heap, 1);
- BUILD_BR(block_maddr_phi);
- SET_BUILD_POS(app_addr_in_linear_mem);
- return true;
- fail:
- return false;
- }
- /*
- * Check for memory overflow in shared heap for bulk memory access.
- *
- * Arguments:
- * block_curr: The current basic block.
- * block_maddr_phi: The phi block for memory address.
- * check_succ: The block to branch to on success.
- * maddr_phi: The phi node for memory address.
- * start_offset: The offset to check.
- * max_addr: The maximum address to check.
- * bytes: The access size in bytes (LLVMValueRef).
- * is_memory64: Whether memory is wasm64 memory.
- * is_target_64bit: Whether target is 64-bit.
- */
- static bool
- aot_check_bulk_memory_shared_heap_memory_overflow(
- AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- LLVMBasicBlockRef block_curr, LLVMBasicBlockRef block_maddr_phi,
- LLVMBasicBlockRef check_succ, LLVMValueRef maddr_phi,
- LLVMValueRef start_offset, LLVMValueRef max_addr, LLVMValueRef bytes,
- bool is_memory64, bool is_target_64bit)
- {
- LLVMBasicBlockRef app_addr_in_cache_shared_heap, app_addr_in_linear_mem;
- LLVMBasicBlockRef app_addr_in_shared_heap_chain = NULL,
- check_shared_heap_chain = NULL;
- LLVMValueRef cmp, cmp1, cmp2, shared_heap_start_off, shared_heap_end_off,
- maddr = NULL, max_offset;
- /* On 64/32-bit target, the offset is 64/32-bit */
- LLVMTypeRef offset_type = is_target_64bit ? I64_TYPE : I32_TYPE;
- if (!setup_shared_heap_blocks(
- comp_ctx, func_ctx, block_curr, &app_addr_in_cache_shared_heap,
- &app_addr_in_linear_mem, &app_addr_in_shared_heap_chain,
- &check_shared_heap_chain))
- goto fail;
- LLVMMoveBasicBlockAfter(block_maddr_phi, check_succ);
- /* Early branching when it's not in shared heap chain at all */
- if (comp_ctx->enable_shared_chain
- && !build_check_app_addr_in_shared_heap_chain(
- comp_ctx, func_ctx, start_offset, app_addr_in_shared_heap_chain,
- app_addr_in_linear_mem))
- goto fail;
- /* Load the local variable of the function */
- BUILD_LOAD_PTR(func_ctx->shared_heap_start_off, offset_type,
- shared_heap_start_off);
- BUILD_LOAD_PTR(func_ctx->shared_heap_end_off, offset_type,
- shared_heap_end_off);
- /* Check if the app address is in the cache shared heap range.
- * If yes, branch to the cache branch; if not, check the shared heap chain
- */
- BUILD_ICMP(LLVMIntUGE, start_offset, shared_heap_start_off, cmp,
- "cmp_cache_shared_heap_start");
- BUILD_OP(Add, max_addr, is_target_64bit ? I64_NEG_ONE : I32_NEG_ONE,
- max_offset, "max_offset");
- BUILD_ICMP(LLVMIntULE, max_offset, shared_heap_end_off, cmp1,
- "cmp_cache_shared_heap_end");
- BUILD_OP(And, cmp, cmp1, cmp2, "is_in_cache_shared_heap");
- /* Conditional branching based on whether in cached shared heap */
- if (!build_shared_heap_conditional_branching(
- comp_ctx, func_ctx, cmp2, app_addr_in_cache_shared_heap,
- app_addr_in_linear_mem, check_shared_heap_chain, bytes,
- start_offset, is_memory64))
- goto fail;
- SET_BUILD_POS(app_addr_in_cache_shared_heap);
- if (!build_get_maddr_in_cache_shared_heap(comp_ctx, func_ctx, start_offset,
- &maddr))
- goto fail;
- LLVMAddIncoming(maddr_phi, &maddr, &app_addr_in_cache_shared_heap, 1);
- BUILD_BR(block_maddr_phi);
- SET_BUILD_POS(app_addr_in_linear_mem);
- return true;
- fail:
- return false;
- }
- #endif
- LLVMValueRef
- aot_check_memory_overflow(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- mem_offset_t offset, uint32 bytes, bool enable_segue,
- unsigned int *alignp)
- {
- LLVMValueRef offset_const =
- MEMORY64_COND_VALUE(I64_CONST(offset), I32_CONST(offset));
- LLVMValueRef addr, maddr, offset1, cmp1, cmp;
- LLVMValueRef mem_base_addr, mem_check_bound;
- LLVMBasicBlockRef block_curr = LLVMGetInsertBlock(comp_ctx->builder);
- LLVMBasicBlockRef check_succ;
- AOTValue *aot_value_top;
- uint32 local_idx_of_aot_value = 0;
- uint64 const_value;
- bool is_target_64bit, is_local_of_aot_value = false;
- bool is_const = false;
- #if WASM_ENABLE_SHARED_MEMORY != 0
- bool is_shared_memory =
- comp_ctx->comp_data->memories[0].flags & SHARED_MEMORY_FLAG;
- #endif
- #if WASM_ENABLE_MEMORY64 == 0
- bool is_memory64 = false;
- #else
- bool is_memory64 = IS_MEMORY64;
- #endif
- #if WASM_ENABLE_SHARED_HEAP != 0
- LLVMValueRef maddr_phi = NULL;
- LLVMBasicBlockRef block_maddr_phi = NULL;
- #endif
- is_target_64bit = (comp_ctx->pointer_size == sizeof(uint64)) ? true : false;
- if (comp_ctx->is_indirect_mode
- && aot_intrinsic_check_capability(
- comp_ctx, MEMORY64_COND_VALUE("i64.const", "i32.const"))) {
- WASMValue wasm_value;
- #if WASM_ENABLE_MEMORY64 != 0
- if (IS_MEMORY64) {
- wasm_value.i64 = offset;
- }
- else
- #endif
- {
- wasm_value.i32 = (int32)offset;
- }
- offset_const = aot_load_const_from_table(
- comp_ctx, func_ctx->native_symbol, &wasm_value,
- MEMORY64_COND_VALUE(VALUE_TYPE_I64, VALUE_TYPE_I32));
- if (!offset_const) {
- return NULL;
- }
- }
- else {
- CHECK_LLVM_CONST(offset_const);
- }
- /* Get memory base address and memory data size */
- if (func_ctx->mem_space_unchanged
- #if WASM_ENABLE_SHARED_MEMORY != 0
- || is_shared_memory
- #endif
- ) {
- mem_base_addr = func_ctx->mem_info[0].mem_base_addr;
- }
- else {
- if (!(mem_base_addr = LLVMBuildLoad2(
- comp_ctx->builder, OPQ_PTR_TYPE,
- func_ctx->mem_info[0].mem_base_addr, "mem_base"))) {
- aot_set_last_error("llvm build load failed.");
- goto fail;
- }
- }
- aot_value_top =
- func_ctx->block_stack.block_list_end->value_stack.value_list_end;
- if (aot_value_top) {
- /* aot_value_top is freed in the following POP_I32(addr),
- so save its fields here for further use */
- is_local_of_aot_value = aot_value_top->is_local;
- is_const = aot_value_top->is_const;
- local_idx_of_aot_value = aot_value_top->local_idx;
- const_value = aot_value_top->const_value;
- }
- POP_MEM_OFFSET(addr);
- /*
- * Note: not throw the integer-overflow-exception here since it must
- * have been thrown when converting float to integer before
- */
- /* return address directly if constant offset and inside memory space */
- if (LLVMIsEfficientConstInt(addr) || is_const) {
- uint64 value;
- if (LLVMIsEfficientConstInt(addr)) {
- value = (uint64)LLVMConstIntGetZExtValue(addr);
- }
- else {
- value = const_value;
- }
- uint64 mem_offset = value + (uint64)offset;
- uint32 num_bytes_per_page =
- comp_ctx->comp_data->memories[0].num_bytes_per_page;
- uint32 init_page_count =
- comp_ctx->comp_data->memories[0].init_page_count;
- uint64 mem_data_size = (uint64)num_bytes_per_page * init_page_count;
- if (alignp != NULL) {
- /*
- * A note about max_align below:
- * the assumption here is the base address of a linear memory
- * has the natural alignment. for platforms using mmap, it can
- * be even larger. for now, use a conservative value.
- */
- const unsigned int max_align = 8;
- int shift = ffs((int)(unsigned int)mem_offset);
- if (shift == 0) {
- *alignp = max_align;
- }
- else {
- unsigned int align = 1 << (shift - 1);
- if (align > max_align) {
- align = max_align;
- }
- *alignp = align;
- }
- }
- if (mem_offset + bytes <= mem_data_size) {
- /* inside memory space */
- if (comp_ctx->pointer_size == sizeof(uint64))
- offset1 = I64_CONST(mem_offset);
- else
- offset1 = I32_CONST((uint32)mem_offset);
- CHECK_LLVM_CONST(offset1);
- if (!enable_segue) {
- if (!(maddr = LLVMBuildInBoundsGEP2(comp_ctx->builder,
- INT8_TYPE, mem_base_addr,
- &offset1, 1, "maddr"))) {
- aot_set_last_error("llvm build add failed.");
- goto fail;
- }
- }
- else {
- if (!(maddr = LLVMBuildIntToPtr(comp_ctx->builder, offset1,
- INT8_PTR_TYPE_GS, "maddr"))) {
- aot_set_last_error("llvm build IntToPtr failed.");
- goto fail;
- }
- }
- return maddr;
- }
- }
- else if (alignp != NULL) {
- *alignp = 1;
- }
- /* The overflow check needs to be done under following conditions:
- * 1. In 64-bit target, offset and addr will be extended to 64-bit
- * 1.1 offset + addr can overflow when it's memory64
- * 1.2 no overflow when it's memory32
- * 2. In 32-bit target, offset and addr will be 32-bit
- * 2.1 offset + addr can overflow when it's memory32
- */
- if (is_target_64bit) {
- if (!(offset_const = LLVMBuildZExt(comp_ctx->builder, offset_const,
- I64_TYPE, "offset_i64"))
- || !(addr = LLVMBuildZExt(comp_ctx->builder, addr, I64_TYPE,
- "addr_i64"))) {
- aot_set_last_error("llvm build zero extend failed.");
- goto fail;
- }
- }
- /* offset1 = offset + addr; */
- BUILD_OP(Add, offset_const, addr, offset1, "offset1");
- /* 1.1 offset + addr can overflow when it's memory64
- * 2.1 Or when it's on 32-bit platform */
- if (is_memory64 || !is_target_64bit) {
- /* Check whether integer overflow occurs in offset + addr */
- LLVMBasicBlockRef check_integer_overflow_end;
- ADD_BASIC_BLOCK(check_integer_overflow_end,
- "check_integer_overflow_end");
- LLVMMoveBasicBlockAfter(check_integer_overflow_end, block_curr);
- BUILD_ICMP(LLVMIntULT, offset1, offset_const, cmp1, "cmp1");
- if (!aot_emit_exception(comp_ctx, func_ctx,
- EXCE_OUT_OF_BOUNDS_MEMORY_ACCESS, true, cmp1,
- check_integer_overflow_end)) {
- goto fail;
- }
- SET_BUILD_POS(check_integer_overflow_end);
- block_curr = check_integer_overflow_end;
- }
- #if WASM_ENABLE_SHARED_HEAP != 0
- if (comp_ctx->enable_shared_heap
- || comp_ctx->enable_shared_chain /* TODO: && mem_idx == 0 */) {
- ADD_BASIC_BLOCK(block_maddr_phi, "maddr_phi");
- SET_BUILD_POS(block_maddr_phi);
- if (!(maddr_phi =
- LLVMBuildPhi(comp_ctx->builder,
- enable_segue ? INT8_PTR_TYPE_GS : INT8_PTR_TYPE,
- "maddr_phi"))) {
- aot_set_last_error("llvm build phi failed");
- goto fail;
- }
- SET_BUILD_POS(block_curr);
- if (!aot_check_shared_heap_memory_overflow(
- comp_ctx, func_ctx, block_curr, block_maddr_phi, maddr_phi,
- offset1, mem_base_addr, bytes, is_memory64, is_target_64bit,
- enable_segue)) {
- goto fail;
- }
- }
- #endif
- if (comp_ctx->enable_bound_check
- && !(is_local_of_aot_value
- && aot_checked_addr_list_find(func_ctx, local_idx_of_aot_value,
- offset, bytes))) {
- uint32 init_page_count =
- comp_ctx->comp_data->memories[0].init_page_count;
- if (init_page_count == 0) {
- LLVMValueRef mem_size;
- if (!(mem_size = get_memory_curr_page_count(comp_ctx, func_ctx))) {
- goto fail;
- }
- BUILD_ICMP(LLVMIntEQ, mem_size,
- MEMORY64_COND_VALUE(I64_ZERO, I32_ZERO), cmp, "is_zero");
- ADD_BASIC_BLOCK(check_succ, "check_mem_size_succ");
- LLVMMoveBasicBlockAfter(check_succ, block_curr);
- if (!aot_emit_exception(comp_ctx, func_ctx,
- EXCE_OUT_OF_BOUNDS_MEMORY_ACCESS, true, cmp,
- check_succ)) {
- goto fail;
- }
- SET_BUILD_POS(check_succ);
- block_curr = check_succ;
- }
- if (!(mem_check_bound =
- get_memory_check_bound(comp_ctx, func_ctx, bytes))) {
- goto fail;
- }
- BUILD_ICMP(LLVMIntUGT, offset1, mem_check_bound, cmp, "cmp");
- /* Add basic blocks */
- ADD_BASIC_BLOCK(check_succ, "check_succ");
- LLVMMoveBasicBlockAfter(check_succ, block_curr);
- if (!aot_emit_exception(comp_ctx, func_ctx,
- EXCE_OUT_OF_BOUNDS_MEMORY_ACCESS, true, cmp,
- check_succ)) {
- goto fail;
- }
- SET_BUILD_POS(check_succ);
- if (is_local_of_aot_value) {
- if (!aot_checked_addr_list_add(func_ctx, local_idx_of_aot_value,
- offset, bytes))
- goto fail;
- }
- }
- if (!enable_segue) {
- /* maddr = mem_base_addr + offset1 */
- if (!(maddr =
- LLVMBuildInBoundsGEP2(comp_ctx->builder, INT8_TYPE,
- mem_base_addr, &offset1, 1, "maddr"))) {
- aot_set_last_error("llvm build add failed.");
- goto fail;
- }
- }
- else {
- LLVMValueRef maddr_base;
- if (!(maddr_base = LLVMBuildIntToPtr(comp_ctx->builder, addr,
- INT8_PTR_TYPE_GS, "maddr_base"))) {
- aot_set_last_error("llvm build int to ptr failed.");
- goto fail;
- }
- if (!(maddr = LLVMBuildInBoundsGEP2(comp_ctx->builder, INT8_TYPE,
- maddr_base, &offset_const, 1,
- "maddr"))) {
- aot_set_last_error("llvm build inboundgep failed.");
- goto fail;
- }
- }
- #if WASM_ENABLE_SHARED_HEAP != 0
- if (comp_ctx->enable_shared_heap
- || comp_ctx->enable_shared_chain /* TODO: && mem_idx == 0 */) {
- block_curr = LLVMGetInsertBlock(comp_ctx->builder);
- LLVMAddIncoming(maddr_phi, &maddr, &block_curr, 1);
- if (!LLVMBuildBr(comp_ctx->builder, block_maddr_phi)) {
- aot_set_last_error("llvm build br failed");
- goto fail;
- }
- SET_BUILD_POS(block_maddr_phi);
- return maddr_phi;
- }
- else
- #endif
- return maddr;
- fail:
- return NULL;
- }
- #define BUILD_PTR_CAST(ptr_type) \
- do { \
- if (!(maddr = LLVMBuildBitCast(comp_ctx->builder, maddr, ptr_type, \
- "data_ptr"))) { \
- aot_set_last_error("llvm build bit cast failed."); \
- goto fail; \
- } \
- } while (0)
- #define BUILD_LOAD(data_type) \
- do { \
- if (!(value = LLVMBuildLoad2(comp_ctx->builder, data_type, maddr, \
- "data"))) { \
- aot_set_last_error("llvm build load failed."); \
- goto fail; \
- } \
- LLVMSetAlignment(value, known_align); \
- } while (0)
- #define BUILD_STORE() \
- do { \
- LLVMValueRef res; \
- if (!(res = LLVMBuildStore(comp_ctx->builder, value, maddr))) { \
- aot_set_last_error("llvm build store failed."); \
- goto fail; \
- } \
- LLVMSetAlignment(res, known_align); \
- } while (0)
- #define BUILD_SIGN_EXT(dst_type) \
- do { \
- if (!(value = LLVMBuildSExt(comp_ctx->builder, value, dst_type, \
- "data_s_ext"))) { \
- aot_set_last_error("llvm build sign ext failed."); \
- goto fail; \
- } \
- } while (0)
- #define BUILD_ZERO_EXT(dst_type) \
- do { \
- if (!(value = LLVMBuildZExt(comp_ctx->builder, value, dst_type, \
- "data_z_ext"))) { \
- aot_set_last_error("llvm build zero ext failed."); \
- goto fail; \
- } \
- } while (0)
- #if WASM_ENABLE_SHARED_MEMORY != 0 || WASM_ENABLE_STRINGREF != 0
- bool
- check_memory_alignment(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- LLVMValueRef addr, uint32 align)
- {
- LLVMBasicBlockRef block_curr = LLVMGetInsertBlock(comp_ctx->builder);
- LLVMBasicBlockRef check_align_succ;
- LLVMValueRef align_mask = I32_CONST(((uint32)1 << align) - 1);
- LLVMValueRef res;
- CHECK_LLVM_CONST(align_mask);
- /* Convert pointer to int */
- if (!(addr = LLVMBuildPtrToInt(comp_ctx->builder, addr, I32_TYPE,
- "address"))) {
- aot_set_last_error("llvm build ptr to int failed.");
- goto fail;
- }
- /* The memory address should be aligned */
- BUILD_OP(And, addr, align_mask, res, "and");
- BUILD_ICMP(LLVMIntNE, res, I32_ZERO, res, "cmp");
- /* Add basic blocks */
- ADD_BASIC_BLOCK(check_align_succ, "check_align_succ");
- LLVMMoveBasicBlockAfter(check_align_succ, block_curr);
- if (!aot_emit_exception(comp_ctx, func_ctx, EXCE_UNALIGNED_ATOMIC, true,
- res, check_align_succ)) {
- goto fail;
- }
- SET_BUILD_POS(check_align_succ);
- return true;
- fail:
- return false;
- }
- #endif /* WASM_ENABLE_SHARED_MEMORY != 0 || WASM_ENABLE_STRINGREF != 0 */
- #if WASM_ENABLE_SHARED_MEMORY != 0
- #define BUILD_ATOMIC_LOAD(align, data_type) \
- do { \
- if (!(check_memory_alignment(comp_ctx, func_ctx, maddr, align))) { \
- goto fail; \
- } \
- if (!(value = LLVMBuildLoad2(comp_ctx->builder, data_type, maddr, \
- "data"))) { \
- aot_set_last_error("llvm build load failed."); \
- goto fail; \
- } \
- LLVMSetAlignment(value, 1 << align); \
- LLVMSetVolatile(value, true); \
- LLVMSetOrdering(value, LLVMAtomicOrderingSequentiallyConsistent); \
- } while (0)
- #define BUILD_ATOMIC_STORE(align) \
- do { \
- LLVMValueRef res; \
- if (!(check_memory_alignment(comp_ctx, func_ctx, maddr, align))) { \
- goto fail; \
- } \
- if (!(res = LLVMBuildStore(comp_ctx->builder, value, maddr))) { \
- aot_set_last_error("llvm build store failed."); \
- goto fail; \
- } \
- LLVMSetAlignment(res, 1 << align); \
- LLVMSetVolatile(res, true); \
- LLVMSetOrdering(res, LLVMAtomicOrderingSequentiallyConsistent); \
- } while (0)
- #endif
- bool
- aot_compile_op_i32_load(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- uint32 align, mem_offset_t offset, uint32 bytes,
- bool sign, bool atomic)
- {
- LLVMValueRef maddr, value = NULL;
- LLVMTypeRef data_type;
- bool enable_segue = comp_ctx->enable_segue_i32_load;
- unsigned int known_align;
- if (!(maddr = aot_check_memory_overflow(comp_ctx, func_ctx, offset, bytes,
- enable_segue, &known_align)))
- return false;
- switch (bytes) {
- case 4:
- if (!enable_segue)
- BUILD_PTR_CAST(INT32_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT32_PTR_TYPE_GS);
- #if WASM_ENABLE_SHARED_MEMORY != 0
- if (atomic)
- BUILD_ATOMIC_LOAD(align, I32_TYPE);
- else
- #endif
- BUILD_LOAD(I32_TYPE);
- break;
- case 2:
- case 1:
- if (bytes == 2) {
- if (!enable_segue)
- BUILD_PTR_CAST(INT16_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT16_PTR_TYPE_GS);
- data_type = INT16_TYPE;
- }
- else {
- if (!enable_segue)
- BUILD_PTR_CAST(INT8_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT8_PTR_TYPE_GS);
- data_type = INT8_TYPE;
- }
- #if WASM_ENABLE_SHARED_MEMORY != 0
- if (atomic) {
- BUILD_ATOMIC_LOAD(align, data_type);
- BUILD_ZERO_EXT(I32_TYPE);
- }
- else
- #endif
- {
- BUILD_LOAD(data_type);
- if (sign)
- BUILD_SIGN_EXT(I32_TYPE);
- else
- BUILD_ZERO_EXT(I32_TYPE);
- }
- break;
- default:
- bh_assert(0);
- break;
- }
- PUSH_I32(value);
- (void)data_type;
- return true;
- fail:
- return false;
- }
- bool
- aot_compile_op_i64_load(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- uint32 align, mem_offset_t offset, uint32 bytes,
- bool sign, bool atomic)
- {
- LLVMValueRef maddr, value = NULL;
- LLVMTypeRef data_type;
- bool enable_segue = comp_ctx->enable_segue_i64_load;
- unsigned int known_align;
- if (!(maddr = aot_check_memory_overflow(comp_ctx, func_ctx, offset, bytes,
- enable_segue, &known_align)))
- return false;
- switch (bytes) {
- case 8:
- if (!enable_segue)
- BUILD_PTR_CAST(INT64_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT64_PTR_TYPE_GS);
- #if WASM_ENABLE_SHARED_MEMORY != 0
- if (atomic)
- BUILD_ATOMIC_LOAD(align, I64_TYPE);
- else
- #endif
- BUILD_LOAD(I64_TYPE);
- break;
- case 4:
- case 2:
- case 1:
- if (bytes == 4) {
- if (!enable_segue)
- BUILD_PTR_CAST(INT32_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT32_PTR_TYPE_GS);
- data_type = I32_TYPE;
- }
- else if (bytes == 2) {
- if (!enable_segue)
- BUILD_PTR_CAST(INT16_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT16_PTR_TYPE_GS);
- data_type = INT16_TYPE;
- }
- else {
- if (!enable_segue)
- BUILD_PTR_CAST(INT8_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT8_PTR_TYPE_GS);
- data_type = INT8_TYPE;
- }
- #if WASM_ENABLE_SHARED_MEMORY != 0
- if (atomic) {
- BUILD_ATOMIC_LOAD(align, data_type);
- BUILD_ZERO_EXT(I64_TYPE);
- }
- else
- #endif
- {
- BUILD_LOAD(data_type);
- if (sign)
- BUILD_SIGN_EXT(I64_TYPE);
- else
- BUILD_ZERO_EXT(I64_TYPE);
- }
- break;
- default:
- bh_assert(0);
- break;
- }
- PUSH_I64(value);
- (void)data_type;
- return true;
- fail:
- return false;
- }
- bool
- aot_compile_op_f32_load(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- uint32 align, mem_offset_t offset)
- {
- LLVMValueRef maddr, value;
- bool enable_segue = comp_ctx->enable_segue_f32_load;
- unsigned int known_align;
- if (!(maddr = aot_check_memory_overflow(comp_ctx, func_ctx, offset, 4,
- enable_segue, &known_align)))
- return false;
- if (!enable_segue)
- BUILD_PTR_CAST(F32_PTR_TYPE);
- else
- BUILD_PTR_CAST(F32_PTR_TYPE_GS);
- BUILD_LOAD(F32_TYPE);
- PUSH_F32(value);
- return true;
- fail:
- return false;
- }
- bool
- aot_compile_op_f64_load(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- uint32 align, mem_offset_t offset)
- {
- LLVMValueRef maddr, value;
- bool enable_segue = comp_ctx->enable_segue_f64_load;
- unsigned int known_align;
- if (!(maddr = aot_check_memory_overflow(comp_ctx, func_ctx, offset, 8,
- enable_segue, &known_align)))
- return false;
- if (!enable_segue)
- BUILD_PTR_CAST(F64_PTR_TYPE);
- else
- BUILD_PTR_CAST(F64_PTR_TYPE_GS);
- BUILD_LOAD(F64_TYPE);
- PUSH_F64(value);
- return true;
- fail:
- return false;
- }
- bool
- aot_compile_op_i32_store(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- uint32 align, mem_offset_t offset, uint32 bytes,
- bool atomic)
- {
- LLVMValueRef maddr, value;
- bool enable_segue = comp_ctx->enable_segue_i32_store;
- POP_I32(value);
- unsigned int known_align;
- if (!(maddr = aot_check_memory_overflow(comp_ctx, func_ctx, offset, bytes,
- enable_segue, &known_align)))
- return false;
- switch (bytes) {
- case 4:
- if (!enable_segue)
- BUILD_PTR_CAST(INT32_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT32_PTR_TYPE_GS);
- break;
- case 2:
- if (!enable_segue)
- BUILD_PTR_CAST(INT16_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT16_PTR_TYPE_GS);
- BUILD_TRUNC(value, INT16_TYPE);
- break;
- case 1:
- if (!enable_segue)
- BUILD_PTR_CAST(INT8_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT8_PTR_TYPE_GS);
- BUILD_TRUNC(value, INT8_TYPE);
- break;
- default:
- bh_assert(0);
- break;
- }
- #if WASM_ENABLE_SHARED_MEMORY != 0
- if (atomic)
- BUILD_ATOMIC_STORE(align);
- else
- #endif
- BUILD_STORE();
- return true;
- fail:
- return false;
- }
- bool
- aot_compile_op_i64_store(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- uint32 align, mem_offset_t offset, uint32 bytes,
- bool atomic)
- {
- LLVMValueRef maddr, value;
- bool enable_segue = comp_ctx->enable_segue_i64_store;
- POP_I64(value);
- unsigned int known_align;
- if (!(maddr = aot_check_memory_overflow(comp_ctx, func_ctx, offset, bytes,
- enable_segue, &known_align)))
- return false;
- switch (bytes) {
- case 8:
- if (!enable_segue)
- BUILD_PTR_CAST(INT64_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT64_PTR_TYPE_GS);
- break;
- case 4:
- if (!enable_segue)
- BUILD_PTR_CAST(INT32_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT32_PTR_TYPE_GS);
- BUILD_TRUNC(value, I32_TYPE);
- break;
- case 2:
- if (!enable_segue)
- BUILD_PTR_CAST(INT16_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT16_PTR_TYPE_GS);
- BUILD_TRUNC(value, INT16_TYPE);
- break;
- case 1:
- if (!enable_segue)
- BUILD_PTR_CAST(INT8_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT8_PTR_TYPE_GS);
- BUILD_TRUNC(value, INT8_TYPE);
- break;
- default:
- bh_assert(0);
- break;
- }
- #if WASM_ENABLE_SHARED_MEMORY != 0
- if (atomic)
- BUILD_ATOMIC_STORE(align);
- else
- #endif
- BUILD_STORE();
- return true;
- fail:
- return false;
- }
- bool
- aot_compile_op_f32_store(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- uint32 align, mem_offset_t offset)
- {
- LLVMValueRef maddr, value;
- bool enable_segue = comp_ctx->enable_segue_f32_store;
- POP_F32(value);
- unsigned int known_align;
- if (!(maddr = aot_check_memory_overflow(comp_ctx, func_ctx, offset, 4,
- enable_segue, &known_align)))
- return false;
- if (!enable_segue)
- BUILD_PTR_CAST(F32_PTR_TYPE);
- else
- BUILD_PTR_CAST(F32_PTR_TYPE_GS);
- BUILD_STORE();
- return true;
- fail:
- return false;
- }
- bool
- aot_compile_op_f64_store(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- uint32 align, mem_offset_t offset)
- {
- LLVMValueRef maddr, value;
- bool enable_segue = comp_ctx->enable_segue_f64_store;
- POP_F64(value);
- unsigned int known_align;
- if (!(maddr = aot_check_memory_overflow(comp_ctx, func_ctx, offset, 8,
- enable_segue, &known_align)))
- return false;
- if (!enable_segue)
- BUILD_PTR_CAST(F64_PTR_TYPE);
- else
- BUILD_PTR_CAST(F64_PTR_TYPE_GS);
- BUILD_STORE();
- return true;
- fail:
- return false;
- }
- static LLVMValueRef
- get_memory_curr_page_count(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx)
- {
- LLVMValueRef mem_size;
- if (func_ctx->mem_space_unchanged) {
- mem_size = func_ctx->mem_info[0].mem_cur_page_count_addr;
- }
- else {
- if (!(mem_size = LLVMBuildLoad2(
- comp_ctx->builder, I32_TYPE,
- func_ctx->mem_info[0].mem_cur_page_count_addr, "mem_size"))) {
- aot_set_last_error("llvm build load failed.");
- goto fail;
- }
- }
- return LLVMBuildIntCast(comp_ctx->builder, mem_size,
- MEMORY64_COND_VALUE(I64_TYPE, I32_TYPE), "");
- fail:
- return NULL;
- }
- bool
- aot_compile_op_memory_size(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx)
- {
- LLVMValueRef mem_size = get_memory_curr_page_count(comp_ctx, func_ctx);
- if (mem_size)
- PUSH_PAGE_COUNT(mem_size);
- return mem_size ? true : false;
- fail:
- return false;
- }
- bool
- aot_compile_op_memory_grow(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx)
- {
- LLVMValueRef mem_size = get_memory_curr_page_count(comp_ctx, func_ctx);
- LLVMValueRef delta, param_values[2], ret_value, func, value;
- LLVMTypeRef param_types[2], ret_type, func_type, func_ptr_type;
- int32 func_index;
- #if WASM_ENABLE_MEMORY64 != 0
- LLVMValueRef u32_max, u32_cmp_result;
- #endif
- if (!mem_size)
- return false;
- POP_PAGE_COUNT(delta);
- /* TODO: multi-memory aot_enlarge_memory_with_idx() */
- /* Function type of aot_enlarge_memory() */
- param_types[0] = INT8_PTR_TYPE;
- param_types[1] = I32_TYPE;
- ret_type = INT8_TYPE;
- if (!(func_type = LLVMFunctionType(ret_type, param_types, 2, false))) {
- aot_set_last_error("llvm add function type failed.");
- return false;
- }
- if (comp_ctx->is_jit_mode) {
- /* JIT mode, call the function directly */
- if (!(func_ptr_type = LLVMPointerType(func_type, 0))) {
- aot_set_last_error("llvm add pointer type failed.");
- return false;
- }
- if (!(value = I64_CONST((uint64)(uintptr_t)wasm_enlarge_memory))
- || !(func = LLVMConstIntToPtr(value, func_ptr_type))) {
- aot_set_last_error("create LLVM value failed.");
- return false;
- }
- }
- else if (comp_ctx->is_indirect_mode) {
- if (!(func_ptr_type = LLVMPointerType(func_type, 0))) {
- aot_set_last_error("create LLVM function type failed.");
- return false;
- }
- func_index =
- aot_get_native_symbol_index(comp_ctx, "aot_enlarge_memory");
- if (func_index < 0) {
- return false;
- }
- if (!(func = aot_get_func_from_table(comp_ctx, func_ctx->native_symbol,
- func_ptr_type, func_index))) {
- return false;
- }
- }
- else {
- char *func_name = "aot_enlarge_memory";
- /* AOT mode, declare the function */
- if (!(func = LLVMGetNamedFunction(func_ctx->module, func_name))
- && !(func =
- LLVMAddFunction(func_ctx->module, func_name, func_type))) {
- aot_set_last_error("llvm add function failed.");
- return false;
- }
- }
- /* Call function aot_enlarge_memory() */
- param_values[0] = func_ctx->aot_inst;
- param_values[1] = LLVMBuildTrunc(comp_ctx->builder, delta, I32_TYPE, "");
- if (!(ret_value = LLVMBuildCall2(comp_ctx->builder, func_type, func,
- param_values, 2, "call"))) {
- aot_set_last_error("llvm build call failed.");
- return false;
- }
- BUILD_ICMP(LLVMIntUGT, ret_value, I8_ZERO, ret_value, "mem_grow_ret");
- #if WASM_ENABLE_MEMORY64 != 0
- if (IS_MEMORY64) {
- if (!(u32_max = I64_CONST(UINT32_MAX))) {
- aot_set_last_error("llvm build const failed");
- return false;
- }
- BUILD_ICMP(LLVMIntULE, delta, u32_max, u32_cmp_result, "page_size_cmp");
- BUILD_OP(And, ret_value, u32_cmp_result, ret_value, "and");
- }
- #endif
- /* ret_value = ret_value == true ? pre_page_count : -1 */
- if (!(ret_value = LLVMBuildSelect(
- comp_ctx->builder, ret_value, mem_size,
- MEMORY64_COND_VALUE(I64_NEG_ONE, I32_NEG_ONE), "mem_grow_ret"))) {
- aot_set_last_error("llvm build select failed.");
- return false;
- }
- PUSH_PAGE_COUNT(ret_value);
- return true;
- fail:
- return false;
- }
- #if WASM_ENABLE_BULK_MEMORY_OPT != 0 || WASM_ENABLE_STRINGREF != 0
- LLVMValueRef
- check_bulk_memory_overflow(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- LLVMValueRef offset, LLVMValueRef bytes)
- {
- LLVMValueRef maddr, max_addr, cmp, cmp1, offset1;
- LLVMValueRef mem_base_addr;
- LLVMBasicBlockRef block_curr = LLVMGetInsertBlock(comp_ctx->builder);
- LLVMBasicBlockRef check_succ;
- LLVMValueRef mem_size;
- bool is_target_64bit;
- #if WASM_ENABLE_MEMORY64 == 0
- bool is_memory64 = false;
- #else
- bool is_memory64 = IS_MEMORY64;
- #endif
- #if WASM_ENABLE_SHARED_HEAP != 0
- LLVMValueRef maddr_phi = NULL;
- LLVMBasicBlockRef block_maddr_phi = NULL;
- #endif
- is_target_64bit = (comp_ctx->pointer_size == sizeof(uint64)) ? true : false;
- /* Get memory base address and memory data size */
- #if WASM_ENABLE_SHARED_MEMORY != 0
- bool is_shared_memory = comp_ctx->comp_data->memories[0].flags & 0x02;
- if (func_ctx->mem_space_unchanged || is_shared_memory) {
- #else
- if (func_ctx->mem_space_unchanged) {
- #endif
- mem_base_addr = func_ctx->mem_info[0].mem_base_addr;
- }
- else {
- if (!(mem_base_addr = LLVMBuildLoad2(
- comp_ctx->builder, OPQ_PTR_TYPE,
- func_ctx->mem_info[0].mem_base_addr, "mem_base"))) {
- aot_set_last_error("llvm build load failed.");
- goto fail;
- }
- }
- /*
- * Note: not throw the integer-overflow-exception here since it must
- * have been thrown when converting float to integer before
- */
- /* return address directly if constant offset and inside memory space */
- if (LLVMIsEfficientConstInt(offset) && LLVMIsEfficientConstInt(bytes)) {
- uint64 mem_offset = (uint64)LLVMConstIntGetZExtValue(offset);
- uint64 mem_len = (uint64)LLVMConstIntGetZExtValue(bytes);
- uint32 num_bytes_per_page =
- comp_ctx->comp_data->memories[0].num_bytes_per_page;
- uint32 init_page_count =
- comp_ctx->comp_data->memories[0].init_page_count;
- uint64 mem_data_size = (uint64)num_bytes_per_page * init_page_count;
- if (mem_data_size > 0 && mem_offset + mem_len <= mem_data_size) {
- /* inside memory space */
- /* maddr = mem_base_addr + moffset */
- /* Perform zero extension in advance to avoid LLVMBuildInBoundsGEP2
- * interpreting a negative address due to sign extension when
- * mem_offset >= 2GiB */
- if (comp_ctx->pointer_size == sizeof(uint64)) {
- offset1 = I64_CONST(mem_offset);
- }
- else {
- offset1 = I32_CONST((uint32)mem_offset);
- }
- CHECK_LLVM_CONST(offset1);
- if (!(maddr = LLVMBuildInBoundsGEP2(comp_ctx->builder, INT8_TYPE,
- mem_base_addr, &offset1, 1,
- "maddr"))) {
- aot_set_last_error("llvm build add failed.");
- goto fail;
- }
- return maddr;
- }
- }
- if (func_ctx->mem_space_unchanged) {
- mem_size = func_ctx->mem_info[0].mem_data_size_addr;
- }
- else {
- if (!(mem_size = LLVMBuildLoad2(
- comp_ctx->builder, I64_TYPE,
- func_ctx->mem_info[0].mem_data_size_addr, "mem_size"))) {
- aot_set_last_error("llvm build load failed.");
- goto fail;
- }
- }
- ADD_BASIC_BLOCK(check_succ, "check_succ");
- LLVMMoveBasicBlockAfter(check_succ, block_curr);
- /* Same logic with aot_check_memory_overflow, offset and bytes are 32/64
- * bits on 32/64 bits platform */
- if (is_target_64bit) {
- offset =
- LLVMBuildZExt(comp_ctx->builder, offset, I64_TYPE, "extend_offset");
- bytes = LLVMBuildZExt(comp_ctx->builder, bytes, I64_TYPE, "extend_len");
- if (!offset || !bytes) {
- aot_set_last_error("llvm build zext failed.");
- goto fail;
- }
- }
- BUILD_OP(Add, offset, bytes, max_addr, "max_addr");
- /* Check overflow when it's memory64 or it's on 32 bits platform */
- if (is_memory64 || !is_target_64bit) {
- /* Check whether integer overflow occurs in offset + bytes */
- LLVMBasicBlockRef check_integer_overflow_end;
- ADD_BASIC_BLOCK(check_integer_overflow_end,
- "check_integer_overflow_end");
- LLVMMoveBasicBlockAfter(check_integer_overflow_end, block_curr);
- /* offset + bytes can overflow yet is valid(for example, 0xffffffff, 1),
- * allow it to be 0(either 0, 0 or overflow and valid) */
- BUILD_ICMP(LLVMIntULT, max_addr, offset, cmp, "cmp");
- BUILD_ICMP(LLVMIntNE, max_addr, is_target_64bit ? I64_ZERO : I32_ZERO,
- cmp1, "cmp1");
- BUILD_OP(And, cmp, cmp1, cmp, "overflow");
- if (!aot_emit_exception(comp_ctx, func_ctx,
- EXCE_OUT_OF_BOUNDS_MEMORY_ACCESS, true, cmp,
- check_integer_overflow_end)) {
- goto fail;
- }
- SET_BUILD_POS(check_integer_overflow_end);
- block_curr = check_integer_overflow_end;
- }
- #if WASM_ENABLE_SHARED_HEAP != 0
- if (comp_ctx->enable_shared_heap
- || comp_ctx->enable_shared_chain /* TODO: && mem_idx == 0 */) {
- ADD_BASIC_BLOCK(block_maddr_phi, "maddr_phi");
- SET_BUILD_POS(block_maddr_phi);
- if (!(maddr_phi = LLVMBuildPhi(comp_ctx->builder, INT8_PTR_TYPE,
- "maddr_phi"))) {
- aot_set_last_error("llvm build phi failed");
- goto fail;
- }
- SET_BUILD_POS(block_curr);
- if (!aot_check_bulk_memory_shared_heap_memory_overflow(
- comp_ctx, func_ctx, block_curr, block_maddr_phi, check_succ,
- maddr_phi, offset, max_addr, bytes, is_memory64,
- is_target_64bit)) {
- goto fail;
- }
- }
- #endif
- /* mem_size is always 64-bit, extend max_addr on 32 bits platform */
- if (!is_target_64bit
- && !(max_addr = LLVMBuildZExt(comp_ctx->builder, max_addr, I64_TYPE,
- "extend_max_addr"))) {
- aot_set_last_error("llvm build zext failed.");
- goto fail;
- }
- BUILD_ICMP(LLVMIntUGT, max_addr, mem_size, cmp, "cmp_max_mem_addr");
- if (!aot_emit_exception(comp_ctx, func_ctx,
- EXCE_OUT_OF_BOUNDS_MEMORY_ACCESS, true, cmp,
- check_succ)) {
- goto fail;
- }
- /* maddr = mem_base_addr + offset */
- if (!(maddr = LLVMBuildInBoundsGEP2(comp_ctx->builder, INT8_TYPE,
- mem_base_addr, &offset, 1, "maddr"))) {
- aot_set_last_error("llvm build add failed.");
- goto fail;
- }
- #if WASM_ENABLE_SHARED_HEAP != 0
- if (comp_ctx->enable_shared_heap
- || comp_ctx->enable_shared_chain /* TODO: && mem_idx == 0 */) {
- block_curr = LLVMGetInsertBlock(comp_ctx->builder);
- LLVMAddIncoming(maddr_phi, &maddr, &block_curr, 1);
- if (!LLVMBuildBr(comp_ctx->builder, block_maddr_phi)) {
- aot_set_last_error("llvm build br failed");
- goto fail;
- }
- LLVMPositionBuilderAtEnd(comp_ctx->builder, block_maddr_phi);
- return maddr_phi;
- }
- else
- #endif
- return maddr;
- fail:
- return NULL;
- }
- #endif /* end of WASM_ENABLE_BULK_MEMORY != 0 || WASM_ENABLE_STRINGREF != 0 */
- #if WASM_ENABLE_BULK_MEMORY != 0
- bool
- aot_compile_op_memory_init(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- uint32 seg_index)
- {
- LLVMValueRef seg, offset, dst, len, param_values[5], ret_value, func, value;
- LLVMTypeRef param_types[5], ret_type, func_type, func_ptr_type;
- AOTFuncType *aot_func_type = func_ctx->aot_func->func_type;
- LLVMBasicBlockRef block_curr = LLVMGetInsertBlock(comp_ctx->builder);
- LLVMBasicBlockRef mem_init_fail, init_success;
- seg = I32_CONST(seg_index);
- POP_I32(len);
- POP_I32(offset);
- POP_MEM_OFFSET(dst);
- if (!zero_extend_u64(comp_ctx, &dst, "dst64")) {
- return false;
- }
- param_types[0] = INT8_PTR_TYPE;
- param_types[1] = I32_TYPE;
- param_types[2] = I32_TYPE;
- param_types[3] = I32_TYPE;
- param_types[4] = SIZE_T_TYPE;
- ret_type = INT8_TYPE;
- if (comp_ctx->is_jit_mode)
- GET_AOT_FUNCTION(llvm_jit_memory_init, 5);
- else
- GET_AOT_FUNCTION(aot_memory_init, 5);
- /* Call function aot_memory_init() */
- param_values[0] = func_ctx->aot_inst;
- param_values[1] = seg;
- param_values[2] = offset;
- param_values[3] = len;
- param_values[4] = dst;
- if (!(ret_value = LLVMBuildCall2(comp_ctx->builder, func_type, func,
- param_values, 5, "call"))) {
- aot_set_last_error("llvm build call failed.");
- return false;
- }
- BUILD_ICMP(LLVMIntUGT, ret_value, I8_ZERO, ret_value, "mem_init_ret");
- ADD_BASIC_BLOCK(mem_init_fail, "mem_init_fail");
- ADD_BASIC_BLOCK(init_success, "init_success");
- LLVMMoveBasicBlockAfter(mem_init_fail, block_curr);
- LLVMMoveBasicBlockAfter(init_success, block_curr);
- if (!LLVMBuildCondBr(comp_ctx->builder, ret_value, init_success,
- mem_init_fail)) {
- aot_set_last_error("llvm build cond br failed.");
- goto fail;
- }
- /* If memory.init failed, return this function
- so the runtime can catch the exception */
- LLVMPositionBuilderAtEnd(comp_ctx->builder, mem_init_fail);
- if (!aot_build_zero_function_ret(comp_ctx, func_ctx, aot_func_type)) {
- goto fail;
- }
- LLVMPositionBuilderAtEnd(comp_ctx->builder, init_success);
- return true;
- fail:
- return false;
- }
- bool
- aot_compile_op_data_drop(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- uint32 seg_index)
- {
- LLVMValueRef seg, param_values[2], ret_value, func, value;
- LLVMTypeRef param_types[2], ret_type, func_type, func_ptr_type;
- seg = I32_CONST(seg_index);
- CHECK_LLVM_CONST(seg);
- param_types[0] = INT8_PTR_TYPE;
- param_types[1] = I32_TYPE;
- ret_type = INT8_TYPE;
- if (comp_ctx->is_jit_mode)
- GET_AOT_FUNCTION(llvm_jit_data_drop, 2);
- else
- GET_AOT_FUNCTION(aot_data_drop, 2);
- /* Call function aot_data_drop() */
- param_values[0] = func_ctx->aot_inst;
- param_values[1] = seg;
- if (!(ret_value = LLVMBuildCall2(comp_ctx->builder, func_type, func,
- param_values, 2, "call"))) {
- aot_set_last_error("llvm build call failed.");
- return false;
- }
- return true;
- fail:
- return false;
- }
- #endif /* end of WASM_ENABLE_BULK_MEMORY */
- #if WASM_ENABLE_BULK_MEMORY_OPT != 0
- bool
- aot_compile_op_memory_copy(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx)
- {
- LLVMValueRef src, dst, src_addr, dst_addr, len, res;
- bool call_aot_memmove = false;
- POP_MEM_OFFSET(len);
- POP_MEM_OFFSET(src);
- POP_MEM_OFFSET(dst);
- if (!(src_addr = check_bulk_memory_overflow(comp_ctx, func_ctx, src, len)))
- return false;
- if (!(dst_addr = check_bulk_memory_overflow(comp_ctx, func_ctx, dst, len)))
- return false;
- if (!zero_extend_u64(comp_ctx, &len, "len64")) {
- return false;
- }
- call_aot_memmove = comp_ctx->is_indirect_mode || comp_ctx->is_jit_mode;
- if (call_aot_memmove) {
- LLVMTypeRef param_types[3], ret_type, func_type, func_ptr_type;
- LLVMValueRef func, params[3];
- param_types[0] = INT8_PTR_TYPE;
- param_types[1] = INT8_PTR_TYPE;
- param_types[2] = SIZE_T_TYPE;
- ret_type = INT8_PTR_TYPE;
- if (!(func_type = LLVMFunctionType(ret_type, param_types, 3, false))) {
- aot_set_last_error("create LLVM function type failed.");
- return false;
- }
- if (!(func_ptr_type = LLVMPointerType(func_type, 0))) {
- aot_set_last_error("create LLVM function pointer type failed.");
- return false;
- }
- if (comp_ctx->is_jit_mode) {
- if (!(func = I64_CONST((uint64)(uintptr_t)aot_memmove))
- || !(func = LLVMConstIntToPtr(func, func_ptr_type))) {
- aot_set_last_error("create LLVM value failed.");
- return false;
- }
- }
- else {
- int32 func_index;
- func_index = aot_get_native_symbol_index(comp_ctx, "memmove");
- if (func_index < 0) {
- return false;
- }
- if (!(func =
- aot_get_func_from_table(comp_ctx, func_ctx->native_symbol,
- func_ptr_type, func_index))) {
- return false;
- }
- }
- params[0] = dst_addr;
- params[1] = src_addr;
- params[2] = len;
- if (!(res = LLVMBuildCall2(comp_ctx->builder, func_type, func, params,
- 3, "call_memmove"))) {
- aot_set_last_error("llvm build memmove failed.");
- return false;
- }
- }
- else {
- if (!(res = LLVMBuildMemMove(comp_ctx->builder, dst_addr, 1, src_addr,
- 1, len))) {
- aot_set_last_error("llvm build memmove failed.");
- return false;
- }
- }
- return true;
- fail:
- return false;
- }
- static void *
- jit_memset(void *s, int c, size_t n)
- {
- return memset(s, c, n);
- }
- bool
- aot_compile_op_memory_fill(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx)
- {
- LLVMValueRef val, dst, dst_addr, len, res;
- LLVMTypeRef param_types[3], ret_type, func_type, func_ptr_type;
- LLVMValueRef func, params[3];
- POP_MEM_OFFSET(len);
- POP_I32(val);
- POP_MEM_OFFSET(dst);
- if (!(dst_addr = check_bulk_memory_overflow(comp_ctx, func_ctx, dst, len)))
- return false;
- if (!zero_extend_u64(comp_ctx, &len, "len64")) {
- return false;
- }
- param_types[0] = INT8_PTR_TYPE;
- param_types[1] = I32_TYPE;
- param_types[2] = SIZE_T_TYPE;
- ret_type = INT8_PTR_TYPE;
- if (!(func_type = LLVMFunctionType(ret_type, param_types, 3, false))) {
- aot_set_last_error("create LLVM function type failed.");
- return false;
- }
- if (!(func_ptr_type = LLVMPointerType(func_type, 0))) {
- aot_set_last_error("create LLVM function pointer type failed.");
- return false;
- }
- if (comp_ctx->is_jit_mode) {
- if (!(func = I64_CONST((uint64)(uintptr_t)jit_memset))
- || !(func = LLVMConstIntToPtr(func, func_ptr_type))) {
- aot_set_last_error("create LLVM value failed.");
- return false;
- }
- }
- else if (comp_ctx->is_indirect_mode) {
- int32 func_index;
- func_index = aot_get_native_symbol_index(comp_ctx, "memset");
- if (func_index < 0) {
- return false;
- }
- if (!(func = aot_get_func_from_table(comp_ctx, func_ctx->native_symbol,
- func_ptr_type, func_index))) {
- return false;
- }
- }
- else {
- if (!(func = LLVMGetNamedFunction(func_ctx->module, "memset"))
- && !(func =
- LLVMAddFunction(func_ctx->module, "memset", func_type))) {
- aot_set_last_error("llvm add function failed.");
- return false;
- }
- }
- params[0] = dst_addr;
- params[1] = val;
- params[2] = len;
- if (!(res = LLVMBuildCall2(comp_ctx->builder, func_type, func, params, 3,
- "call_memset"))) {
- aot_set_last_error("llvm build memset failed.");
- return false;
- }
- return true;
- fail:
- return false;
- }
- #endif /* end of WASM_ENABLE_BULK_MEMORY_OPT */
- #if WASM_ENABLE_SHARED_MEMORY != 0
- bool
- aot_compile_op_atomic_rmw(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- uint8 atomic_op, uint8 op_type, uint32 align,
- mem_offset_t offset, uint32 bytes)
- {
- LLVMValueRef maddr, value, result;
- bool enable_segue = (op_type == VALUE_TYPE_I32)
- ? comp_ctx->enable_segue_i32_load
- && comp_ctx->enable_segue_i32_store
- : comp_ctx->enable_segue_i64_load
- && comp_ctx->enable_segue_i64_store;
- if (op_type == VALUE_TYPE_I32)
- POP_I32(value);
- else
- POP_I64(value);
- if (!(maddr = aot_check_memory_overflow(comp_ctx, func_ctx, offset, bytes,
- enable_segue, NULL)))
- return false;
- if (!check_memory_alignment(comp_ctx, func_ctx, maddr, align))
- return false;
- switch (bytes) {
- case 8:
- if (!enable_segue)
- BUILD_PTR_CAST(INT64_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT64_PTR_TYPE_GS);
- break;
- case 4:
- if (!enable_segue)
- BUILD_PTR_CAST(INT32_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT32_PTR_TYPE_GS);
- if (op_type == VALUE_TYPE_I64)
- BUILD_TRUNC(value, I32_TYPE);
- break;
- case 2:
- if (!enable_segue)
- BUILD_PTR_CAST(INT16_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT16_PTR_TYPE_GS);
- BUILD_TRUNC(value, INT16_TYPE);
- break;
- case 1:
- if (!enable_segue)
- BUILD_PTR_CAST(INT8_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT8_PTR_TYPE_GS);
- BUILD_TRUNC(value, INT8_TYPE);
- break;
- default:
- bh_assert(0);
- break;
- }
- if (!(result = LLVMBuildAtomicRMW(
- comp_ctx->builder, atomic_op, maddr, value,
- LLVMAtomicOrderingSequentiallyConsistent, false))) {
- goto fail;
- }
- LLVMSetVolatile(result, true);
- if (op_type == VALUE_TYPE_I32) {
- if (!(result = LLVMBuildZExt(comp_ctx->builder, result, I32_TYPE,
- "result_i32"))) {
- goto fail;
- }
- PUSH_I32(result);
- }
- else {
- if (!(result = LLVMBuildZExt(comp_ctx->builder, result, I64_TYPE,
- "result_i64"))) {
- goto fail;
- }
- PUSH_I64(result);
- }
- return true;
- fail:
- return false;
- }
- bool
- aot_compile_op_atomic_cmpxchg(AOTCompContext *comp_ctx,
- AOTFuncContext *func_ctx, uint8 op_type,
- uint32 align, mem_offset_t offset, uint32 bytes)
- {
- LLVMValueRef maddr, value, expect, result;
- bool enable_segue = (op_type == VALUE_TYPE_I32)
- ? comp_ctx->enable_segue_i32_load
- && comp_ctx->enable_segue_i32_store
- : comp_ctx->enable_segue_i64_load
- && comp_ctx->enable_segue_i64_store;
- if (op_type == VALUE_TYPE_I32) {
- POP_I32(value);
- POP_I32(expect);
- }
- else {
- POP_I64(value);
- POP_I64(expect);
- }
- if (!(maddr = aot_check_memory_overflow(comp_ctx, func_ctx, offset, bytes,
- enable_segue, NULL)))
- return false;
- if (!check_memory_alignment(comp_ctx, func_ctx, maddr, align))
- return false;
- switch (bytes) {
- case 8:
- if (!enable_segue)
- BUILD_PTR_CAST(INT64_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT64_PTR_TYPE_GS);
- break;
- case 4:
- if (!enable_segue)
- BUILD_PTR_CAST(INT32_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT32_PTR_TYPE_GS);
- if (op_type == VALUE_TYPE_I64) {
- BUILD_TRUNC(value, I32_TYPE);
- BUILD_TRUNC(expect, I32_TYPE);
- }
- break;
- case 2:
- if (!enable_segue)
- BUILD_PTR_CAST(INT16_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT16_PTR_TYPE_GS);
- BUILD_TRUNC(value, INT16_TYPE);
- BUILD_TRUNC(expect, INT16_TYPE);
- break;
- case 1:
- if (!enable_segue)
- BUILD_PTR_CAST(INT8_PTR_TYPE);
- else
- BUILD_PTR_CAST(INT8_PTR_TYPE_GS);
- BUILD_TRUNC(value, INT8_TYPE);
- BUILD_TRUNC(expect, INT8_TYPE);
- break;
- default:
- bh_assert(0);
- break;
- }
- if (!(result = LLVMBuildAtomicCmpXchg(
- comp_ctx->builder, maddr, expect, value,
- LLVMAtomicOrderingSequentiallyConsistent,
- LLVMAtomicOrderingSequentiallyConsistent, false))) {
- goto fail;
- }
- LLVMSetVolatile(result, true);
- /* CmpXchg return {i32, i1} structure,
- we need to extract the previous_value from the structure */
- if (!(result = LLVMBuildExtractValue(comp_ctx->builder, result, 0,
- "previous_value"))) {
- goto fail;
- }
- if (op_type == VALUE_TYPE_I32) {
- if (LLVMTypeOf(result) != I32_TYPE) {
- if (!(result = LLVMBuildZExt(comp_ctx->builder, result, I32_TYPE,
- "result_i32"))) {
- goto fail;
- }
- }
- PUSH_I32(result);
- }
- else {
- if (LLVMTypeOf(result) != I64_TYPE) {
- if (!(result = LLVMBuildZExt(comp_ctx->builder, result, I64_TYPE,
- "result_i64"))) {
- goto fail;
- }
- }
- PUSH_I64(result);
- }
- return true;
- fail:
- return false;
- }
- bool
- aot_compile_op_atomic_wait(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx,
- uint8 op_type, uint32 align, mem_offset_t offset,
- uint32 bytes)
- {
- LLVMValueRef maddr, value, timeout, expect, cmp;
- LLVMValueRef param_values[5], ret_value, func, is_wait64;
- LLVMTypeRef param_types[5], ret_type, func_type, func_ptr_type;
- LLVMBasicBlockRef wait_fail, wait_success;
- LLVMBasicBlockRef block_curr = LLVMGetInsertBlock(comp_ctx->builder);
- AOTFuncType *aot_func_type = func_ctx->aot_func->func_type;
- POP_I64(timeout);
- if (op_type == VALUE_TYPE_I32) {
- POP_I32(expect);
- is_wait64 = I8_CONST(false);
- if (!(expect = LLVMBuildZExt(comp_ctx->builder, expect, I64_TYPE,
- "expect_i64"))) {
- goto fail;
- }
- }
- else {
- POP_I64(expect);
- is_wait64 = I8_CONST(true);
- }
- CHECK_LLVM_CONST(is_wait64);
- if (!(maddr = aot_check_memory_overflow(comp_ctx, func_ctx, offset, bytes,
- false, NULL)))
- return false;
- if (!check_memory_alignment(comp_ctx, func_ctx, maddr, align))
- return false;
- param_types[0] = INT8_PTR_TYPE;
- param_types[1] = INT8_PTR_TYPE;
- param_types[2] = I64_TYPE;
- param_types[3] = I64_TYPE;
- param_types[4] = INT8_TYPE;
- ret_type = I32_TYPE;
- GET_AOT_FUNCTION(wasm_runtime_atomic_wait, 5);
- /* Call function wasm_runtime_atomic_wait() */
- param_values[0] = func_ctx->aot_inst;
- param_values[1] = maddr;
- param_values[2] = expect;
- param_values[3] = timeout;
- param_values[4] = is_wait64;
- if (!(ret_value = LLVMBuildCall2(comp_ctx->builder, func_type, func,
- param_values, 5, "call"))) {
- aot_set_last_error("llvm build call failed.");
- return false;
- }
- BUILD_ICMP(LLVMIntNE, ret_value, I32_NEG_ONE, cmp, "atomic_wait_ret");
- ADD_BASIC_BLOCK(wait_fail, "atomic_wait_fail");
- ADD_BASIC_BLOCK(wait_success, "wait_success");
- LLVMMoveBasicBlockAfter(wait_fail, block_curr);
- LLVMMoveBasicBlockAfter(wait_success, block_curr);
- if (!LLVMBuildCondBr(comp_ctx->builder, cmp, wait_success, wait_fail)) {
- aot_set_last_error("llvm build cond br failed.");
- goto fail;
- }
- /* If atomic wait failed, return this function
- so the runtime can catch the exception */
- LLVMPositionBuilderAtEnd(comp_ctx->builder, wait_fail);
- if (!aot_build_zero_function_ret(comp_ctx, func_ctx, aot_func_type)) {
- goto fail;
- }
- LLVMPositionBuilderAtEnd(comp_ctx->builder, wait_success);
- PUSH_I32(ret_value);
- /* Insert suspend check point */
- if (comp_ctx->enable_thread_mgr) {
- if (!check_suspend_flags(comp_ctx, func_ctx, false))
- return false;
- }
- return true;
- fail:
- return false;
- }
- bool
- aot_compiler_op_atomic_notify(AOTCompContext *comp_ctx,
- AOTFuncContext *func_ctx, uint32 align,
- mem_offset_t offset, uint32 bytes)
- {
- LLVMValueRef maddr, value, count;
- LLVMValueRef param_values[3], ret_value, func;
- LLVMTypeRef param_types[3], ret_type, func_type, func_ptr_type;
- POP_I32(count);
- if (!(maddr = aot_check_memory_overflow(comp_ctx, func_ctx, offset, bytes,
- false, NULL)))
- return false;
- if (!check_memory_alignment(comp_ctx, func_ctx, maddr, align))
- return false;
- param_types[0] = INT8_PTR_TYPE;
- param_types[1] = INT8_PTR_TYPE;
- param_types[2] = I32_TYPE;
- ret_type = I32_TYPE;
- GET_AOT_FUNCTION(wasm_runtime_atomic_notify, 3);
- /* Call function wasm_runtime_atomic_notify() */
- param_values[0] = func_ctx->aot_inst;
- param_values[1] = maddr;
- param_values[2] = count;
- if (!(ret_value = LLVMBuildCall2(comp_ctx->builder, func_type, func,
- param_values, 3, "call"))) {
- aot_set_last_error("llvm build call failed.");
- return false;
- }
- PUSH_I32(ret_value);
- return true;
- fail:
- return false;
- }
- bool
- aot_compiler_op_atomic_fence(AOTCompContext *comp_ctx, AOTFuncContext *func_ctx)
- {
- return LLVMBuildFence(comp_ctx->builder,
- LLVMAtomicOrderingSequentiallyConsistent, false, "")
- ? true
- : false;
- }
- #endif /* end of WASM_ENABLE_SHARED_MEMORY */
|