Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion build-scripts/config_common.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -511,7 +511,8 @@ if (WAMR_BUILD_WASI_NN EQUAL 1)
# Variant backends
if (NOT WAMR_BUILD_WASI_NN_TFLITE EQUAL 1 AND
NOT WAMR_BUILD_WASI_NN_OPENVINO EQUAL 1 AND
NOT WAMR_BUILD_WASI_NN_LLAMACPP EQUAL 1)
NOT WAMR_BUILD_WASI_NN_LLAMACPP EQUAL 1 AND
NOT WAMR_BUILD_WASI_NN_ONNXRUNTIME EQUAL 1)
message (FATAL_ERROR " Need to select a backend for WASI-NN")
endif ()

Expand All @@ -527,6 +528,10 @@ if (WAMR_BUILD_WASI_NN EQUAL 1)
message (" WASI-NN: backend llamacpp enabled")
add_definitions (-DWASM_ENABLE_WASI_NN_LLAMACPP)
endif ()
if (WAMR_BUILD_WASI_NN_ONNXRUNTIME EQUAL 1)
message (" WASI-NN: backend onnxruntime enabled")
add_definitions (-DWASM_ENABLE_WASI_NN_ONNXRUNTIME)
endif ()
# Variant devices
if (WAMR_BUILD_WASI_NN_ENABLE_GPU EQUAL 1)
message (" WASI-NN: GPU enabled")
Expand Down
3 changes: 2 additions & 1 deletion core/iwasm/libraries/wasi-nn/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ $ cmake -DWAMR_BUILD_WASI_NN=1 <other options> ...
- `WAMR_BUILD_WASI_NN_TFLITE`. This option designates TensorFlow Lite as the backend.
- `WAMR_BUILD_WASI_NN_OPENVINO`. This option designates OpenVINO as the backend.
- `WAMR_BUILD_WASI_NN_LLAMACPP`. This option designates Llama.cpp as the backend.
- `WAMR_BUILD_WASI_NN_ONNXRUNTIME`. This option designates ONNX Runtime as the backend.

### Wasm

Expand Down Expand Up @@ -151,7 +152,7 @@ docker run \

Supported:

- Graph encoding: `tensorflowlite`, `openvino` and `ggml`
- Graph encoding: `tensorflowlite`, `openvino`, `ggml` and `onnx`
- Execution target: `cpu` for all. `gpu` and `tpu` for `tensorflowlite`.
- Tensor type: `fp32`.

Expand Down
85 changes: 85 additions & 0 deletions core/iwasm/libraries/wasi-nn/cmake/Findonnxruntime.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
# Copyright (C) 2019 Intel Corporation. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception

# Find ONNX Runtime library
#
# This module defines the following variables:
#
# ::
#
# onnxruntime_FOUND - True if onnxruntime is found
# onnxruntime_INCLUDE_DIRS - Include directories for onnxruntime
# onnxruntime_LIBRARIES - List of libraries for onnxruntime
# onnxruntime_VERSION - Version of onnxruntime
#
# ::
#
# Example usage:
#
# find_package(onnxruntime)
# if(onnxruntime_FOUND)
# target_link_libraries(app onnxruntime)
# endif()

# First try to find ONNX Runtime using the CMake config file
find_package(onnxruntime CONFIG QUIET)
if(onnxruntime_FOUND)
message(STATUS "Found ONNX Runtime via CMake config: ${onnxruntime_DIR}")
return()
endif()

# If not found via CMake config, try to find manually
find_path(onnxruntime_INCLUDE_DIR
NAMES onnxruntime_c_api.h
PATHS
/usr/include
/usr/local/include
/opt/onnxruntime/include
$ENV{ONNXRUNTIME_ROOT}/include
${CMAKE_CURRENT_LIST_DIR}/../../../../..
/home/ubuntu/onnxruntime/onnxruntime-linux-x64-1.16.3/include
PATH_SUFFIXES onnxruntime
)

find_library(onnxruntime_LIBRARY
NAMES onnxruntime
PATHS
/usr/lib
/usr/local/lib
/opt/onnxruntime/lib
$ENV{ONNXRUNTIME_ROOT}/lib
${CMAKE_CURRENT_LIST_DIR}/../../../../..
/home/ubuntu/onnxruntime/onnxruntime-linux-x64-1.16.3/lib
)

# Try to determine version from header file
if(onnxruntime_INCLUDE_DIR)
file(STRINGS "${onnxruntime_INCLUDE_DIR}/onnxruntime_c_api.h" onnxruntime_version_str
REGEX "^#define[\t ]+ORT_API_VERSION[\t ]+[0-9]+")

if(onnxruntime_version_str)
string(REGEX REPLACE "^#define[\t ]+ORT_API_VERSION[\t ]+([0-9]+)" "\\1"
onnxruntime_VERSION "${onnxruntime_version_str}")
endif()
endif()

include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(onnxruntime
REQUIRED_VARS onnxruntime_LIBRARY onnxruntime_INCLUDE_DIR
VERSION_VAR onnxruntime_VERSION
)

if(onnxruntime_FOUND)
set(onnxruntime_LIBRARIES ${onnxruntime_LIBRARY})
set(onnxruntime_INCLUDE_DIRS ${onnxruntime_INCLUDE_DIR})

if(NOT TARGET onnxruntime)
add_library(onnxruntime UNKNOWN IMPORTED)
set_target_properties(onnxruntime PROPERTIES
IMPORTED_LOCATION "${onnxruntime_LIBRARY}"
INTERFACE_INCLUDE_DIRECTORIES "${onnxruntime_INCLUDE_DIRS}"
)
endif()
endif()

mark_as_advanced(onnxruntime_INCLUDE_DIR onnxruntime_LIBRARY)
27 changes: 27 additions & 0 deletions core/iwasm/libraries/wasi-nn/cmake/wasi_nn.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -109,3 +109,30 @@ if(WAMR_BUILD_WASI_NN_LLAMACPP EQUAL 1)

install(TARGETS wasi_nn_llamacpp DESTINATION lib)
endif()

# - onnxruntime
if(WAMR_BUILD_WASI_NN_ONNXRUNTIME EQUAL 1)
find_package(onnxruntime REQUIRED)
enable_language(CXX)

add_library(
wasi_nn_onnxruntime
SHARED
${WASI_NN_ROOT}/src/wasi_nn_onnxruntime.cpp
)

target_include_directories(
wasi_nn_onnxruntime
PUBLIC
${onnxruntime_INCLUDE_DIRS}
)

target_link_libraries(
wasi_nn_onnxruntime
PUBLIC
vmlib
onnxruntime
)

install(TARGETS wasi_nn_onnxruntime DESTINATION lib)
endif()
14 changes: 14 additions & 0 deletions core/iwasm/libraries/wasi-nn/src/wasi_nn.c
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
#define TFLITE_BACKEND_LIB "libwasi_nn_tflite.so"
#define OPENVINO_BACKEND_LIB "libwasi_nn_openvino.so"
#define LLAMACPP_BACKEND_LIB "libwasi_nn_llamacpp.so"
#define ONNXRUNTIME_BACKEND_LIB "libwasi_nn_onnxruntime.so"

/* Global variables */
struct backends_api_functions {
Expand Down Expand Up @@ -212,6 +213,17 @@ choose_a_backend()
return openvino;
}

#ifndef NDEBUG
NN_WARN_PRINTF("%s", dlerror());
#endif

handle = dlopen(ONNXRUNTIME_BACKEND_LIB, RTLD_LAZY);
if (handle) {
NN_INFO_PRINTF("Using onnxruntime backend");
dlclose(handle);
return onnx;
}

#ifndef NDEBUG
NN_WARN_PRINTF("%s", dlerror());
#endif
Expand Down Expand Up @@ -335,6 +347,8 @@ graph_encoding_to_backend_lib_name(graph_encoding encoding)
return TFLITE_BACKEND_LIB;
case ggml:
return LLAMACPP_BACKEND_LIB;
case onnx:
return ONNXRUNTIME_BACKEND_LIB;
default:
return NULL;
}
Expand Down
Loading
Loading