Browse Source

Make wasi-nn backends as separated shared libraries (#3509)

- All files under *core/iwasm/libraries/wasi-nn* are compiled as shared libraries
- *wasi-nn.c* is shared between backends
- Every backend has a separated shared library
- If wasi-nn feature is enabled, iwasm will depend on shared library libiwasm.so
  instead of linking static library libvmlib.a
liang.he 1 năm trước cách đây
mục cha
commit
f844b33b2d

+ 0 - 3
core/iwasm/aot/aot_runtime.c

@@ -1944,9 +1944,6 @@ aot_deinstantiate(AOTModuleInstance *module_inst, bool is_sub_inst)
 #endif
 
     if (!is_sub_inst) {
-#if WASM_ENABLE_WASI_NN != 0
-        wasi_nn_destroy((WASMModuleInstanceCommon *)module_inst);
-#endif
         wasm_native_call_context_dtors((WASMModuleInstanceCommon *)module_inst);
     }
 

+ 0 - 4
core/iwasm/aot/aot_runtime.h

@@ -14,10 +14,6 @@
 #include "gc_export.h"
 #endif
 
-#if WASM_ENABLE_WASI_NN != 0
-#include "../libraries/wasi-nn/src/wasi_nn_private.h"
-#endif
-
 #ifdef __cplusplus
 extern "C" {
 #endif

+ 0 - 15
core/iwasm/common/wasm_native.c

@@ -33,9 +33,6 @@ get_spectest_export_apis(NativeSymbol **p_libc_builtin_apis);
 uint32
 get_libc_wasi_export_apis(NativeSymbol **p_libc_wasi_apis);
 
-uint32_t
-get_wasi_nn_export_apis(NativeSymbol **p_libc_wasi_apis);
-
 uint32
 get_base_lib_export_apis(NativeSymbol **p_base_lib_apis);
 
@@ -565,18 +562,6 @@ wasm_native_init()
         goto fail;
 #endif /* WASM_ENABLE_LIB_RATS */
 
-#if WASM_ENABLE_WASI_NN != 0
-    n_native_symbols = get_wasi_nn_export_apis(&native_symbols);
-#if WASM_ENABLE_WASI_EPHEMERAL_NN != 0
-#define wasi_nn_module_name "wasi_ephemeral_nn"
-#else /* WASM_ENABLE_WASI_EPHEMERAL_NN == 0 */
-#define wasi_nn_module_name "wasi_nn"
-#endif /* WASM_ENABLE_WASI_EPHEMERAL_NN != 0 */
-    if (!wasm_native_register_natives(wasi_nn_module_name, native_symbols,
-                                      n_native_symbols))
-        goto fail;
-#endif
-
 #if WASM_ENABLE_QUICK_AOT_ENTRY != 0
     if (!quick_aot_entry_init()) {
 #if WASM_ENABLE_SPEC_TEST != 0 || WASM_ENABLE_LIBC_BUILTIN != 0     \

+ 0 - 3
core/iwasm/interpreter/wasm_runtime.c

@@ -3181,9 +3181,6 @@ wasm_deinstantiate(WASMModuleInstance *module_inst, bool is_sub_inst)
         wasm_runtime_free(module_inst->c_api_func_imports);
 
     if (!is_sub_inst) {
-#if WASM_ENABLE_WASI_NN != 0
-        wasi_nn_destroy((WASMModuleInstanceCommon *)module_inst);
-#endif
         wasm_native_call_context_dtors((WASMModuleInstanceCommon *)module_inst);
     }
 

+ 0 - 4
core/iwasm/interpreter/wasm_runtime.h

@@ -13,10 +13,6 @@
 #include "../common/wasm_runtime_common.h"
 #include "../common/wasm_exec_env.h"
 
-#if WASM_ENABLE_WASI_NN != 0
-#include "../libraries/wasi-nn/src/wasi_nn_private.h"
-#endif
-
 #ifdef __cplusplus
 extern "C" {
 #endif

+ 35 - 16
core/iwasm/libraries/wasi-nn/README.md

@@ -2,15 +2,28 @@
 
 ## How to use
 
+### Host
+
 Enable WASI-NN in the WAMR by spefiying it in the cmake building configuration as follows,
 
-```
+```cmake
 set (WAMR_BUILD_WASI_NN  1)
 ```
 
-The definition of the functions provided by WASI-NN is in the header file `core/iwasm/libraries/wasi-nn/wasi_nn.h`.
+or in command line
+
+```bash
+$ cmake -DWAMR_BUILD_WASI_NN=1 <other options> ...
+```
+
+> ![Caution]
+> If enable `WAMR_BUID_WASI_NN`, iwasm will link a shared WAMR library instead of a static one. Wasi-nn backends will be loaded dynamically at runtime. Users shall specify the path of the backend library and register it to the iwasm runtime with `--native-lib=<path of backend library>`. All shared libraries should be placed in the `LD_LIBRARY_PATH`.
+
+### Wasm
+
+The definition of functions provided by WASI-NN (Wasm imports) is in the header file _core/iwasm/libraries/wasi-nn/wasi_nn.h_.
 
-By only including this file in your WASM application you will bind WASI-NN into your module.
+By only including this file in a WASM application you will bind WASI-NN into your module.
 
 ## Tests
 
@@ -27,9 +40,8 @@ Build the runtime image for your execution target type.
 - `vx-delegate`
 - `tpu`
 
-```
-EXECUTION_TYPE=cpu
-docker build -t wasi-nn-${EXECUTION_TYPE} -f core/iwasm/libraries/wasi-nn/test/Dockerfile.${EXECUTION_TYPE} .
+```bash
+EXECUTION_TYPE=cpu docker build -t wasi-nn-${EXECUTION_TYPE} -f core/iwasm/libraries/wasi-nn/test/Dockerfile.${EXECUTION_TYPE} .
 ```
 
 ### Build wasm app
@@ -50,15 +62,19 @@ If all the tests have run properly you will the the following message in the ter
 Tests: passed!
 ```
 
+> [!TIP]
+> Use _libwasi-nn-tflite.so_ as an example. You shall use whatever you have built.
+
 - CPU
 
-```
+```bash
 docker run \
     -v $PWD/core/iwasm/libraries/wasi-nn/test:/assets \
     -v $PWD/core/iwasm/libraries/wasi-nn/test/models:/models \
     wasi-nn-cpu \
     --dir=/ \
     --env="TARGET=cpu" \
+    --native-lib=/lib/libwasi-nn-tflite.so \
     /assets/test_tensorflow.wasm
 ```
 
@@ -66,7 +82,7 @@ docker run \
   - Requirements:
     - [NVIDIA docker](https://github.com/NVIDIA/nvidia-docker).
 
-```
+```bash
 docker run \
     --runtime=nvidia \
     -v $PWD/core/iwasm/libraries/wasi-nn/test:/assets \
@@ -74,17 +90,19 @@ docker run \
     wasi-nn-nvidia-gpu \
     --dir=/ \
     --env="TARGET=gpu" \
+    --native-lib=/lib/libwasi-nn-tflite.so \
     /assets/test_tensorflow.wasm
 ```
 
 - vx-delegate for NPU (x86 simulator)
 
-```
+```bash
 docker run \
     -v $PWD/core/iwasm/libraries/wasi-nn/test:/assets \
     wasi-nn-vx-delegate \
     --dir=/ \
     --env="TARGET=gpu" \
+    --native-lib=/lib/libwasi-nn-tflite.so \
     /assets/test_tensorflow_quantized.wasm
 ```
 
@@ -92,7 +110,7 @@ docker run \
   - Requirements:
     - [Coral USB](https://coral.ai/products/accelerator/).
 
-```
+```bash
 docker run \
     --privileged \
     --device=/dev/bus/usb:/dev/bus/usb \
@@ -100,6 +118,7 @@ docker run \
     wasi-nn-tpu \
     --dir=/ \
     --env="TARGET=tpu" \
+    --native-lib=/lib/libwasi-nn-tflite.so \
     /assets/test_tensorflow_quantized.wasm
 ```
 
@@ -120,20 +139,20 @@ Use [classification-example](https://github.com/bytecodealliance/wasi-nn/tree/ma
 
 ### Prepare the model and the wasm
 
-``` bash
+```bash
 $ pwd
 /workspaces/wasm-micro-runtime/core/iwasm/libraries/wasi-nn/test
 
 $ docker build -t wasi-nn-example:v1.0 -f Dockerfile.wasi-nn-example .
 ```
 
-There are model files(*mobilenet\**) and wasm files(*wasi-nn-example.wasm*) in the directory */workspaces/wasi-nn/rust/examples/classification-example/build* in the image of wasi-nn-example:v1.0.
+There are model files(\*mobilenet\**) and wasm files(*wasi-nn-example.wasm*) in the directory */workspaces/wasi-nn/rust/examples/classification-example/build\* in the image of wasi-nn-example:v1.0.
 
 ### build iwasm and test
 
-*TODO: May need alternative steps to build the iwasm and test in the container of wasi-nn-example:v1.0*
+_TODO: May need alternative steps to build the iwasm and test in the container of wasi-nn-example:v1.0_
 
-``` bash
+```bash
 $ pwd
 /workspaces/wasm-micro-runtime
 
@@ -143,9 +162,9 @@ $ docker run --rm -it -v $(pwd):/workspaces/wasm-micro-runtime wasi-nn-example:v
 > [!Caution]
 > The following steps are executed in the container of wasi-nn-example:v1.0.
 
-``` bash
+```bash
 $ cd /workspaces/wasm-micro-runtime/product-mini/platforms/linux
 $ cmake -S . -B build -DWAMR_BUILD_WASI_NN=1 -DWAMR_BUILD_WASI_EPHEMERAL_NN=1
 $ cmake --build build
 $ ./build/iwasm -v=5 --map-dir=/workspaces/wasi-nn/rust/examples/classification-example/build/::fixture /workspaces/wasi-nn/rust/examples/classification-example/build/wasi-nn-example.wasm
-```
+```

+ 45 - 11
core/iwasm/libraries/wasi-nn/cmake/wasi_nn.cmake

@@ -6,17 +6,51 @@ list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_LIST_DIR})
 # Find tensorflow-lite
 find_package(tensorflow_lite REQUIRED)
 
-set(WASI_NN_ROOT_DIR ${CMAKE_CURRENT_LIST_DIR}/..)
+set(WASI_NN_ROOT ${CMAKE_CURRENT_LIST_DIR}/..)
 
-include_directories (${WASI_NN_ROOT_DIR}/include)
-include_directories (${WASI_NN_ROOT_DIR}/src)
-include_directories (${WASI_NN_ROOT_DIR}/src/utils)
-
-set (
-  WASI_NN_SOURCES
-  ${WASI_NN_ROOT_DIR}/src/wasi_nn.c
-  ${WASI_NN_ROOT_DIR}/src/wasi_nn_tensorflowlite.cpp
-  ${WASI_NN_ROOT_DIR}/src/utils/wasi_nn_app_native.c
+#
+# wasi-nn general
+add_library(
+  wasi-nn-general
+  SHARED
+    ${WASI_NN_ROOT}/src/wasi_nn.c
+    ${WASI_NN_ROOT}/src/utils/wasi_nn_app_native.c
+)
+target_include_directories(
+  wasi-nn-general
+  PUBLIC
+    ${WASI_NN_ROOT}/include
+    ${WASI_NN_ROOT}/src
+    ${WASI_NN_ROOT}/src/utils
+)
+target_link_libraries(
+  wasi-nn-general
+  PUBLIC
+    libiwasm
+)
+target_compile_definitions(
+  wasi-nn-general
+  PUBLIC
+   $<$<CONFIG:Debug>:NN_LOG_LEVEL=0>
+   $<$<CONFIG:Release>:NN_LOG_LEVEL=2>
 )
 
-set (WASI_NN_LIBS tensorflow-lite)
+#
+# wasi-nn backends
+add_library(
+  wasi-nn-tflite
+  SHARED
+    ${WASI_NN_ROOT}/src/wasi_nn_tensorflowlite.cpp
+)
+#target_link_options(
+#  wasi-nn-tflite
+#  PRIVATE
+#    -Wl,--whole-archive libwasi-nn-general.a
+#    -Wl,--no-whole-archive
+#)
+target_link_libraries(
+  wasi-nn-tflite
+  PUBLIC
+    tensorflow-lite
+    wasi-nn-general
+)

+ 0 - 58
core/iwasm/libraries/wasi-nn/external/CMakeLists.txt

@@ -1,58 +0,0 @@
-# Copyright (C) 2019 Intel Corporation.  All rights reserved.
-# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
-
-cmake_minimum_required(VERSION 3.16)
-project(wasi-nn C CXX)
-
-set(CMAKE_POSITION_INDEPENDENT_CODE ON)
-
-set(WAMR_ROOT_DIR ${CMAKE_CURRENT_LIST_DIR}/../../../../..)
-set(WASI_NN_ROOT_DIR ${CMAKE_CURRENT_LIST_DIR}/..)
-
-if(NOT CMAKE_BUILD_TYPE)
-  set(CMAKE_BUILD_TYPE Debug)
-endif()
-
-#### libvmlib ####
-# NOTE: we build vmlib as a shared library here so that it can be
-# shared between iwasm and native libraries.
-include(${WASI_NN_ROOT_DIR}/cmake/iwasm_helper.cmake)
-include(${WAMR_ROOT_DIR}/build-scripts/runtime_lib.cmake)
-
-add_library(vmlib SHARED ${WAMR_RUNTIME_LIB_SOURCE})
-
-# iwasm
-include(${SHARED_DIR}/utils/uncommon/shared_uncommon.cmake)
-set(RUNTIME_SOURCE_ALL
-  ${WAMR_ROOT_DIR}/product-mini/platforms/${WAMR_BUILD_PLATFORM}/main.c
-  ${UNCOMMON_SHARED_SOURCE}
-)
-
-add_executable(iwasm ${RUNTIME_SOURCE_ALL})
-target_link_libraries(iwasm vmlib -lpthread -lm -ldl)
-
-#### TensorFlow ####
-
-include(${WASI_NN_ROOT_DIR}/cmake/wasi_nn.cmake)
-
-#### WASI-NN ####
-
-include_directories(
-  ${WAMR_ROOT_DIR}/core/iwasm/include
-  ${WAMR_ROOT_DIR}/core/shared/utils
-  ${WAMR_ROOT_DIR}/core/shared/platform/linux
-)
-
-add_library(wasi-nn SHARED
-  ${WASI_NN_SOURCES}
-)
-
-# Add `get_native_lib` symbol
-target_compile_definitions(wasi-nn PUBLIC
-  WASI_NN_SHARED
-)
-
-target_link_libraries(wasi-nn
-  ${WASI_NN_LIBS}
-  vmlib
-)

+ 0 - 13
core/iwasm/libraries/wasi-nn/external/README.md

@@ -1,13 +0,0 @@
-# wasi-nn as shared library
-
-Example on how to create libwasi-nn (external library) instead of embedding wasi-nn inside iwasm
-
-From folder `core/iwasm/libraries/wasi-nn/test`, build the test and run
-
-```sh
-../external/build/iwasm \
-    --dir=. \
-    --env="TARGET=cpu" \
-    --native-lib=../external/build/libwasi-nn.so \
-    test_tensorflow.wasm 
-```

+ 37 - 1
core/iwasm/libraries/wasi-nn/include/wasi_nn_types.h

@@ -9,6 +9,10 @@
 #include <stdint.h>
 #include <stdbool.h>
 
+#ifdef __cplusplus
+extern "C" {
+#endif
+
 /**
  * ERRORS
  *
@@ -100,7 +104,8 @@ typedef enum {
     onnx,
     tensorflow,
     pytorch,
-    tensorflowlite
+    tensorflowlite,
+    backend_amount
 } graph_encoding;
 
 // Define where the graph should be executed.
@@ -109,4 +114,35 @@ typedef enum execution_target { cpu = 0, gpu, tpu } execution_target;
 // Bind a `graph` to the input and output tensors for an inference.
 typedef uint32_t graph_execution_context;
 
+/* Definition of 'wasi_nn.h' structs in WASM app format (using offset) */
+
+typedef wasi_nn_error (*LOAD)(void *, graph_builder_array *, graph_encoding,
+                              execution_target, graph *);
+typedef wasi_nn_error (*INIT_EXECUTION_CONTEXT)(void *, graph,
+                                                graph_execution_context *);
+typedef wasi_nn_error (*SET_INPUT)(void *, graph_execution_context, uint32_t,
+                                   tensor *);
+typedef wasi_nn_error (*COMPUTE)(void *, graph_execution_context);
+typedef wasi_nn_error (*GET_OUTPUT)(void *, graph_execution_context, uint32_t,
+                                    tensor_data, uint32_t *);
+/* wasi-nn general APIs */
+typedef void (*BACKEND_INITIALIZE)(void **);
+typedef void (*BACKEND_DEINITIALIZE)(void *);
+
+typedef struct {
+    LOAD load;
+    INIT_EXECUTION_CONTEXT init_execution_context;
+    SET_INPUT set_input;
+    COMPUTE compute;
+    GET_OUTPUT get_output;
+    BACKEND_INITIALIZE init;
+    BACKEND_DEINITIALIZE deinit;
+} api_function;
+
+bool
+wasi_nn_register_backend(graph_encoding backend_code, api_function apis);
+
+#ifdef __cplusplus
+}
+#endif
 #endif

+ 1 - 1
core/iwasm/libraries/wasi-nn/src/utils/logger.h

@@ -21,7 +21,7 @@
     3 -> err
     4 -> NO LOGS
 */
-#define NN_LOG_LEVEL 0
+#define NN_LOG_LEVEL 2
 #endif
 
 // Definition of the levels

+ 81 - 51
core/iwasm/libraries/wasi-nn/src/wasi_nn.c

@@ -13,7 +13,6 @@
 
 #include "wasi_nn_private.h"
 #include "wasi_nn_app_native.h"
-#include "wasi_nn_tensorflowlite.hpp"
 #include "logger.h"
 
 #include "bh_platform.h"
@@ -21,45 +20,14 @@
 
 #define HASHMAP_INITIAL_SIZE 20
 
-/* Definition of 'wasi_nn.h' structs in WASM app format (using offset) */
-
-typedef wasi_nn_error (*LOAD)(void *, graph_builder_array *, graph_encoding,
-                              execution_target, graph *);
-typedef wasi_nn_error (*INIT_EXECUTION_CONTEXT)(void *, graph,
-                                                graph_execution_context *);
-typedef wasi_nn_error (*SET_INPUT)(void *, graph_execution_context, uint32_t,
-                                   tensor *);
-typedef wasi_nn_error (*COMPUTE)(void *, graph_execution_context);
-typedef wasi_nn_error (*GET_OUTPUT)(void *, graph_execution_context, uint32_t,
-                                    tensor_data, uint32_t *);
-
-typedef struct {
-    LOAD load;
-    INIT_EXECUTION_CONTEXT init_execution_context;
-    SET_INPUT set_input;
-    COMPUTE compute;
-    GET_OUTPUT get_output;
-} api_function;
-
 /* Global variables */
-
-static api_function lookup[] = {
-    { NULL, NULL, NULL, NULL, NULL },
-    { NULL, NULL, NULL, NULL, NULL },
-    { NULL, NULL, NULL, NULL, NULL },
-    { NULL, NULL, NULL, NULL, NULL },
-    { tensorflowlite_load, tensorflowlite_init_execution_context,
-      tensorflowlite_set_input, tensorflowlite_compute,
-      tensorflowlite_get_output }
-};
+static api_function lookup[backend_amount] = { 0 };
 
 static HashMap *hashmap;
 
 static void
 wasi_nn_ctx_destroy(WASINNContext *wasi_nn_ctx);
 
-/* Get wasi-nn context from module instance */
-
 static uint32
 hash_func(const void *key)
 {
@@ -105,7 +73,16 @@ wasi_nn_initialize_context()
         return NULL;
     }
     wasi_nn_ctx->is_model_loaded = false;
-    tensorflowlite_initialize(&wasi_nn_ctx->tflite_ctx);
+    /* only one backend can be registered */
+    {
+        unsigned i;
+        for (i = 0; i < sizeof(lookup) / sizeof(lookup[0]); i++) {
+            if (lookup[i].init) {
+                lookup[i].init(&wasi_nn_ctx->backend_ctx);
+                break;
+            }
+        }
+    }
     return wasi_nn_ctx;
 }
 
@@ -123,6 +100,7 @@ wasi_nn_initialize()
     return true;
 }
 
+/* Get wasi-nn context from module instance */
 static WASINNContext *
 wasm_runtime_get_wasi_nn_ctx(wasm_module_inst_t instance)
 {
@@ -155,16 +133,30 @@ wasi_nn_ctx_destroy(WASINNContext *wasi_nn_ctx)
     NN_DBG_PRINTF("Freeing wasi-nn");
     NN_DBG_PRINTF("-> is_model_loaded: %d", wasi_nn_ctx->is_model_loaded);
     NN_DBG_PRINTF("-> current_encoding: %d", wasi_nn_ctx->current_encoding);
-    tensorflowlite_destroy(wasi_nn_ctx->tflite_ctx);
+    /* only one backend can be registered */
+    {
+        unsigned i;
+        for (i = 0; i < sizeof(lookup) / sizeof(lookup[0]); i++) {
+            if (lookup[i].deinit) {
+                lookup[i].deinit(wasi_nn_ctx->backend_ctx);
+                break;
+            }
+        }
+    }
     wasm_runtime_free(wasi_nn_ctx);
 }
 
+static void
+wasi_nn_ctx_destroy_helper(void *instance, void *wasi_nn_ctx, void *user_data)
+{
+    wasi_nn_ctx_destroy((WASINNContext *)wasi_nn_ctx);
+}
+
 void
-wasi_nn_destroy(wasm_module_inst_t instance)
+wasi_nn_destroy()
 {
-    WASINNContext *wasi_nn_ctx = wasm_runtime_get_wasi_nn_ctx(instance);
-    bh_hash_map_remove(hashmap, (void *)instance, NULL, NULL);
-    wasi_nn_ctx_destroy(wasi_nn_ctx);
+    bh_hash_map_traverse(hashmap, wasi_nn_ctx_destroy_helper, NULL);
+    bh_hash_map_destroy(hashmap);
 }
 
 /* Utils */
@@ -233,7 +225,7 @@ wasi_nn_load(wasm_exec_env_t exec_env, graph_builder_array_wasm *builder,
     }
 
     WASINNContext *wasi_nn_ctx = wasm_runtime_get_wasi_nn_ctx(instance);
-    res = lookup[encoding].load(wasi_nn_ctx->tflite_ctx, &builder_native,
+    res = lookup[encoding].load(wasi_nn_ctx->backend_ctx, &builder_native,
                                 encoding, target, g);
 
     NN_DBG_PRINTF("wasi_nn_load finished with status %d [graph=%d]", res, *g);
@@ -270,7 +262,7 @@ wasi_nn_init_execution_context(wasm_exec_env_t exec_env, graph g,
     }
 
     res = lookup[wasi_nn_ctx->current_encoding].init_execution_context(
-        wasi_nn_ctx->tflite_ctx, g, ctx);
+        wasi_nn_ctx->backend_ctx, g, ctx);
 
     NN_DBG_PRINTF(
         "wasi_nn_init_execution_context finished with status %d [ctx=%d]", res,
@@ -300,7 +292,7 @@ wasi_nn_set_input(wasm_exec_env_t exec_env, graph_execution_context ctx,
         return res;
 
     res = lookup[wasi_nn_ctx->current_encoding].set_input(
-        wasi_nn_ctx->tflite_ctx, ctx, index, &input_tensor_native);
+        wasi_nn_ctx->backend_ctx, ctx, index, &input_tensor_native);
 
     // XXX: Free intermediate structure pointers
     if (input_tensor_native.dimensions)
@@ -323,8 +315,8 @@ wasi_nn_compute(wasm_exec_env_t exec_env, graph_execution_context ctx)
     if (success != (res = is_model_initialized(wasi_nn_ctx)))
         return res;
 
-    res = lookup[wasi_nn_ctx->current_encoding].compute(wasi_nn_ctx->tflite_ctx,
-                                                        ctx);
+    res = lookup[wasi_nn_ctx->current_encoding].compute(
+        wasi_nn_ctx->backend_ctx, ctx);
     NN_DBG_PRINTF("wasi_nn_compute finished with status %d", res);
     return res;
 }
@@ -360,11 +352,13 @@ wasi_nn_get_output(wasm_exec_env_t exec_env, graph_execution_context ctx,
 
 #if WASM_ENABLE_WASI_EPHEMERAL_NN != 0
     res = lookup[wasi_nn_ctx->current_encoding].get_output(
-        wasi_nn_ctx->tflite_ctx, ctx, index, output_tensor, &output_tensor_len);
+        wasi_nn_ctx->backend_ctx, ctx, index, output_tensor,
+        &output_tensor_len);
     *output_tensor_size = output_tensor_len;
 #else  /* WASM_ENABLE_WASI_EPHEMERAL_NN == 0 */
     res = lookup[wasi_nn_ctx->current_encoding].get_output(
-        wasi_nn_ctx->tflite_ctx, ctx, index, output_tensor, output_tensor_size);
+        wasi_nn_ctx->backend_ctx, ctx, index, output_tensor,
+        output_tensor_size);
 #endif /* WASM_ENABLE_WASI_EPHEMERAL_NN != 0 */
     NN_DBG_PRINTF("wasi_nn_get_output finished with status %d [data_size=%d]",
                   res, *output_tensor_size);
@@ -397,17 +391,53 @@ static NativeSymbol native_symbols_wasi_nn[] = {
 uint32_t
 get_wasi_nn_export_apis(NativeSymbol **p_native_symbols)
 {
-    if (!wasi_nn_initialize())
-        return 0;
     *p_native_symbols = native_symbols_wasi_nn;
     return sizeof(native_symbols_wasi_nn) / sizeof(NativeSymbol);
 }
 
-#if defined(WASI_NN_SHARED)
-uint32_t
+__attribute__((used)) uint32_t
 get_native_lib(char **p_module_name, NativeSymbol **p_native_symbols)
 {
+    NN_DBG_PRINTF("--|> get_native_lib");
+
+#if WASM_ENABLE_WASI_EPHEMERAL_NN != 0
+    *p_module_name = "wasi_ephemeral_nn";
+#else  /* WASM_ENABLE_WASI_EPHEMERAL_NN == 0 */
     *p_module_name = "wasi_nn";
+#endif /* WASM_ENABLE_WASI_EPHEMERAL_NN != 0 */
+
     return get_wasi_nn_export_apis(p_native_symbols);
 }
-#endif
+
+__attribute__((used)) int
+init_native_lib()
+{
+    NN_DBG_PRINTF("--|> init_native_lib");
+
+    if (!wasi_nn_initialize())
+        return 1;
+
+    return 0;
+}
+
+__attribute__((used)) void
+deinit_native_lib()
+{
+    NN_DBG_PRINTF("--|> deinit_native_lib");
+
+    wasi_nn_destroy();
+}
+
+__attribute__((used)) bool
+wasi_nn_register_backend(graph_encoding backend_code, api_function apis)
+{
+    NN_DBG_PRINTF("--|> wasi_nn_register_backend");
+
+    if (backend_code >= sizeof(lookup) / sizeof(lookup[0])) {
+        NN_ERR_PRINTF("Invalid backend code");
+        return false;
+    }
+
+    lookup[backend_code] = apis;
+    return true;
+}

+ 1 - 9
core/iwasm/libraries/wasi-nn/src/wasi_nn_private.h

@@ -12,15 +12,7 @@
 typedef struct {
     bool is_model_loaded;
     graph_encoding current_encoding;
-    void *tflite_ctx;
+    void *backend_ctx;
 } WASINNContext;
 
-/**
- * @brief Destroy wasi-nn on app exists
- *
- */
-
-void
-wasi_nn_destroy(wasm_module_inst_t instance);
-
 #endif

+ 15 - 1
core/iwasm/libraries/wasi-nn/src/wasi_nn_tensorflowlite.cpp

@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  */
 
-#include "wasi_nn_types.h"
 #include "wasi_nn_tensorflowlite.hpp"
 #include "logger.h"
 
@@ -487,3 +486,18 @@ tensorflowlite_destroy(void *tflite_ctx)
     delete tfl_ctx;
     NN_DBG_PRINTF("Memory free'd.");
 }
+
+__attribute__((constructor(200))) void
+tflite_register_backend()
+{
+    api_function apis = {
+        .load = tensorflowlite_load,
+        .init_execution_context = tensorflowlite_init_execution_context,
+        .set_input = tensorflowlite_set_input,
+        .compute = tensorflowlite_compute,
+        .get_output = tensorflowlite_get_output,
+        .init = tensorflowlite_initialize,
+        .deinit = tensorflowlite_destroy,
+    };
+    wasi_nn_register_backend(tensorflowlite, apis);
+}

+ 17 - 20
core/iwasm/libraries/wasi-nn/test/Dockerfile.cpu

@@ -5,31 +5,28 @@ FROM ubuntu:20.04 AS base
 
 ENV DEBIAN_FRONTEND=noninteractive
 
-# hadolint ignore=DL3008
-RUN apt-get update && apt-get install -y \
-  cmake build-essential git --no-install-recommends
+# hadolint ignore=DL3008,DL3009
+RUN apt-get update \
+  && apt-get install -y --no-install-recommends\
+       ca-certificates cmake build-essential git wget
 
-WORKDIR /home/wamr
+WORKDIR /usr/local/share/ca-certificates/cacert.org
+RUN wget -qP /usr/local/share/ca-certificates/cacert.org http://www.cacert.org/certs/root.crt http://www.cacert.org/certs/class3.crt \
+  && update-ca-certificates
 
+WORKDIR /home/wamr
 COPY . .
+RUN git config --global http.sslCAinfo /etc/ssl/certs/ca-certificates.crt
 
-WORKDIR /home/wamr/product-mini/platforms/linux/build
-
-# hadolint ignore=DL3008
-RUN apt-get install -y wget ca-certificates --no-install-recommends \
-  && mkdir /usr/local/share/ca-certificates/cacert.org \
-  && wget -qP /usr/local/share/ca-certificates/cacert.org http://www.cacert.org/certs/root.crt http://www.cacert.org/certs/class3.crt \
-  && update-ca-certificates \
-  && git config --global http.sslCAinfo /etc/ssl/certs/ca-certificates.crt
-
-RUN cmake \
-  -DWAMR_BUILD_WASI_NN=1 \
-  ..
-
-RUN make -j "$(grep -c ^processor /proc/cpuinfo)"
+WORKDIR /home/wamr/product-mini/platforms/linux
+RUN rm -rf build \
+  && cmake -S . -B build -DWAMR_BUILD_WASI_NN=1 \
+  && cmake --build build -j "$(grep -c ^processor /proc/cpuinfo)"
 
 FROM ubuntu:22.04
 
-COPY --from=base /home/wamr/product-mini/platforms/linux/build/iwasm /iwasm
+COPY --from=base /home/wamr/product-mini/platforms/linux/build/iwasm /usr/bin/iwasm
+COPY --from=base /home/wamr/product-mini/platforms/linux/build/libiwasm.so /lib/libiwasm.so
+COPY --from=base /home/wamr/product-mini/platforms/linux/build/libwasi-nn-*.so /lib/
 
-ENTRYPOINT [ "/iwasm" ]
+ENTRYPOINT [ "iwasm" ]

+ 18 - 20
core/iwasm/libraries/wasi-nn/test/Dockerfile.nvidia-gpu

@@ -5,29 +5,25 @@ FROM ubuntu:20.04 AS base
 
 ENV DEBIAN_FRONTEND=noninteractive
 
-# hadolint ignore=DL3008
-RUN apt-get update && apt-get install -y \
-    cmake build-essential git --no-install-recommends
+# hadolint ignore=DL3008,DL3009
+RUN apt-get update \
+  && apt-get install -y --no-install-recommends\
+       ca-certificates cmake build-essential git wget
 
-WORKDIR /home/wamr
+WORKDIR /usr/local/share/ca-certificates/cacert.org
+RUN wget -qP /usr/local/share/ca-certificates/cacert.org http://www.cacert.org/certs/root.crt http://www.cacert.org/certs/class3.crt \
+  && update-ca-certificates
 
+WORKDIR /home/wamr
 COPY . .
+RUN git config --global http.sslCAinfo /etc/ssl/certs/ca-certificates.crt
 
 WORKDIR /home/wamr/product-mini/platforms/linux/build
-
-# hadolint ignore=DL3008
-RUN apt-get install -y wget ca-certificates --no-install-recommends \
-  && mkdir /usr/local/share/ca-certificates/cacert.org \
-  && wget -qP /usr/local/share/ca-certificates/cacert.org http://www.cacert.org/certs/root.crt http://www.cacert.org/certs/class3.crt \
-  && update-ca-certificates \
-  && git config --global http.sslCAinfo /etc/ssl/certs/ca-certificates.crt
-
-RUN cmake \
-    -DWAMR_BUILD_WASI_NN=1 \
-    -DWAMR_BUILD_WASI_NN_ENABLE_GPU=1 \
-    ..
-
-RUN make -j "$(grep -c ^processor /proc/cpuinfo)"
+RUN rm -rf build \
+  && cmake -S . -B build \
+       -DWAMR_BUILD_WASI_NN=1 \
+       -DWAMR_BUILD_WASI_NN_ENABLE_GPU=1 \
+  && cmake --build build -j "$(grep -c ^processor /proc/cpuinfo)"
 
 FROM nvidia/cuda:11.3.0-runtime-ubuntu20.04
 
@@ -44,6 +40,8 @@ RUN mkdir -p /etc/OpenCL/vendors && \
 ENV NVIDIA_VISIBLE_DEVICES=all
 ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility
 
-COPY --from=base /home/wamr/product-mini/platforms/linux/build/iwasm /iwasm
+COPY --from=base /home/wamr/product-mini/platforms/linux/build/iwasm /usr/bin/iwasm
+COPY --from=base /home/wamr/product-mini/platforms/linux/build/libiwasm.so /lib/libiwasm.so
+COPY --from=base /home/wamr/product-mini/platforms/linux/build/libwasi-nn-*.so /lib/
 
-ENTRYPOINT [ "/iwasm" ]
+ENTRYPOINT [ "iwasm" ]

+ 20 - 22
core/iwasm/libraries/wasi-nn/test/Dockerfile.tpu

@@ -5,33 +5,31 @@ FROM ubuntu:20.04 AS base
 
 ENV DEBIAN_FRONTEND=noninteractive
 
-# hadolint ignore=DL3008
-RUN apt-get update && apt-get install -y \
-    cmake build-essential git curl gnupg --no-install-recommends && \
-    rm -rf /var/lib/apt/lists/*
+# hadolint ignore=DL3008,DL3009
+RUN apt-get update \
+  && apt-get install -y --no-install-recommends\
+       ca-certificates cmake build-essential git wget
 
-# hadolint ignore=DL3008,DL4006
-RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list && \
-    curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \
-    apt-get update && apt-get install -y libedgetpu1-std --no-install-recommends && \
-    rm -rf /var/lib/apt/lists/*
+WORKDIR /usr/local/share/ca-certificates/cacert.org
+RUN wget -qP /usr/local/share/ca-certificates/cacert.org http://www.cacert.org/certs/root.crt http://www.cacert.org/certs/class3.crt \
+  && update-ca-certificates
 
 WORKDIR /home/wamr
-
 COPY . .
+RUN git config --global http.sslCAinfo /etc/ssl/certs/ca-certificates.crt
 
-WORKDIR /home/wamr/product-mini/platforms/linux/build
-
-RUN cmake \
-  -DWAMR_BUILD_WASI_NN=1 \
-  -DWAMR_BUILD_WASI_NN_ENABLE_EXTERNAL_DELEGATE=1 \
-  -DWAMR_BUILD_WASI_NN_EXTERNAL_DELEGATE_PATH="libedgetpu.so.1.0" \
-  -DWAMR_BUILD_WASI_NN_ENABLE_GPU=1 \
-  ..
+WORKDIR /home/wamr/product-mini/platforms/linux
+RUN rm -rf build \
+  && cmake -S . -B build -DWAMR_BUILD_WASI_NN=1 \
+    -DWAMR_BUILD_WASI_NN=1 \
+    -DWAMR_BUILD_WASI_NN_ENABLE_EXTERNAL_DELEGATE=1 \
+    -DWAMR_BUILD_WASI_NN_EXTERNAL_DELEGATE_PATH="libedgetpu.so.1.0" \
+    -DWAMR_BUILD_WASI_NN_ENABLE_GPU=1 \
+  && cmake --build build -j "$(grep -c ^processor /proc/cpuinfo)"
 
-RUN make -j "$(grep -c ^processor /proc/cpuinfo)" && \
-    cp /home/wamr/product-mini/platforms/linux/build/iwasm /iwasm
+RUN cp /home/wamr/product-mini/platforms/linux/build/iwasm /usr/bin/iwasm \
+  && cp /home/wamr/product-mini/platforms/linux/build/libiwasm.so /lib/libiwasm.so \
+  && cp /home/wamr/product-mini/platforms/linux/build/libwasi-nn-*.so /lib/
 
 WORKDIR /assets
-
-ENTRYPOINT [ "/iwasm" ]
+ENTRYPOINT [ "iwasm" ]

+ 1 - 1
core/iwasm/libraries/wasi-nn/test/utils.h

@@ -8,7 +8,7 @@
 
 #include <stdint.h>
 
-#include "wasi_nn.h"
+#include "wasi_nn_types.h"
 
 #define MAX_MODEL_SIZE 85000000
 #define MAX_OUTPUT_TENSOR_SIZE 1000000

+ 22 - 3
product-mini/platforms/linux/CMakeLists.txt

@@ -123,6 +123,18 @@ if (WAMR_BUILD_DEBUG_INTERP EQUAL 1)
   set (WAMR_BUILD_SIMD 0)
 endif ()
 
+# if enable wasi-nn, both wasi-nn-backends and iwasm
+# need to use same WAMR (dynamic) libraries
+if (WAMR_BUILD_WASI_NN EQUAL 1)
+  set (WAMR_BUILD_SHARED 1)
+endif ()
+
+if (NOT DEFINED WAMR_BUILD_SHARED)
+  set (WAMR_BUILD_SHARED 0)
+elseif (WAMR_BUILD_SHARED EQUAL 1)
+  message ("build WAMR as shared libraries")
+endif ()
+
 set (WAMR_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../..)
 
 include (${WAMR_ROOT_DIR}/build-scripts/runtime_lib.cmake)
@@ -160,9 +172,16 @@ add_executable (iwasm main.c ${UNCOMMON_SHARED_SOURCE})
 
 set_target_properties (iwasm PROPERTIES POSITION_INDEPENDENT_CODE ON)
 
-install (TARGETS iwasm DESTINATION bin)
+target_link_libraries(iwasm
+  $<$<BOOL:${WAMR_BUILD_SHARED}>:libiwasm> $<$<NOT:$<BOOL:${WAMR_BUILD_SHARED}>>:vmlib>
+  ${LLVM_AVAILABLE_LIBS}
+  ${UV_A_LIBS}
+  -lm
+  -ldl
+  -lpthread
+)
 
-target_link_libraries (iwasm vmlib ${LLVM_AVAILABLE_LIBS} ${UV_A_LIBS} ${WASI_NN_LIBS} -lm -ldl -lpthread)
+install (TARGETS iwasm DESTINATION bin)
 
 add_library (libiwasm SHARED ${WAMR_RUNTIME_LIB_SOURCE})
 
@@ -170,4 +189,4 @@ install (TARGETS libiwasm DESTINATION lib)
 
 set_target_properties (libiwasm PROPERTIES OUTPUT_NAME iwasm)
 
-target_link_libraries (libiwasm ${LLVM_AVAILABLE_LIBS} ${UV_A_LIBS} ${WASI_NN_LIBS} -lm -ldl -lpthread)
+target_link_libraries (libiwasm ${LLVM_AVAILABLE_LIBS} ${UV_A_LIBS} -lm -ldl -lpthread)

+ 2 - 1
product-mini/platforms/posix/main.c

@@ -295,7 +295,8 @@ load_native_lib(const char *name)
     /* open the native library */
     if (!(lib->handle = dlopen(name, RTLD_NOW | RTLD_GLOBAL))
         && !(lib->handle = dlopen(name, RTLD_LAZY))) {
-        LOG_WARNING("warning: failed to load native library %s", name);
+        LOG_WARNING("warning: failed to load native library %s. %s", name,
+                    dlerror());
         goto fail;
     }