Skip to content

Commit a6ca33c

Browse files
CraigacpRyanUnderhill
authored andcommitted
Java API for onnxruntime (#2215)
1 parent 2058d63 commit a6ca33c

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+41792
-3
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,3 +39,6 @@ onnxprofile_profile_test_*.json
3939
/csharp/packages
4040
/csharp/src/Microsoft.ML.OnnxRuntime/Microsoft.ML.OnnxRuntime.targets
4141
/csharp/src/Microsoft.ML.OnnxRuntime/Microsoft.ML.OnnxRuntime.props
42+
# Java specific ignores
43+
java/src/main/native/ai_onnxruntime_*.h
44+
java/.gradle

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -88,6 +88,7 @@ Additional dockerfiles can be found [here](./dockerfiles).
8888
* [C](docs/C_API.md)
8989
* [C#](docs/CSharp_API.md)
9090
* [C++](./include/onnxruntime/core/session/onnxruntime_cxx_api.h)
91+
* [Java](docs/Java_API.md)
9192
* [Ruby](https://github.com/ankane/onnxruntime) (external project)
9293

9394
### Official Builds
@@ -107,6 +108,7 @@ system.
107108
* Version: **CUDA 10.0** and **cuDNN 7.6**
108109
* Older ONNX Runtime releases: used **CUDA 9.1** and **cuDNN 7.1** - please refer to [prior release notes](https://github.com/microsoft/onnxruntime/releases) for more details.
109110
* Python binaries are compatible with **Python 3.5-3.7**. See [Python Dev Notes](./docs/Python_Dev_Notes.md). If using `pip` to be download the Python binaries, run `pip install --upgrade pip` prior to downloading.
111+
* The Java API is compatible with **Java 8-13**.
110112
* Certain operators makes use of system locales. Installation of the **English language package** and configuring `en_US.UTF-8 locale` is required.
111113
* For Ubuntu install [language-pack-en package](https://packages.ubuntu.com/search?keywords=language-pack-en)
112114
* Run the following commands:

cmake/CMakeLists.txt

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -726,6 +726,11 @@ if (onnxruntime_BUILD_SERVER)
726726
include(onnxruntime_server.cmake)
727727
endif()
728728

729+
if (onnxruntime_BUILD_JAVA)
730+
message(STATUS "Java Build is enabled")
731+
include(onnxruntime_java.cmake)
732+
endif()
733+
729734
# some of the tests rely on the shared libs to be
730735
# built; hence the ordering
731736
if (onnxruntime_BUILD_UNIT_TESTS)
@@ -756,3 +761,4 @@ if (onnxruntime_BUILD_CSHARP)
756761
# set_property(GLOBAL PROPERTY VS_DOTNET_TARGET_FRAMEWORK_VERSION "netstandard2.0")
757762
include(onnxruntime_csharp.cmake)
758763
endif()
764+

cmake/onnxruntime_java.cmake

Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
# Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved.
2+
# Licensed under the MIT License.
3+
4+
#set(CMAKE_VERBOSE_MAKEFILE on)
5+
6+
# Setup Java compilation
7+
include(FindJava)
8+
find_package(Java REQUIRED)
9+
find_package(JNI REQUIRED)
10+
include(UseJava)
11+
include_directories(${JNI_INCLUDE_DIRS})
12+
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c11")
13+
14+
set(JAVA_ROOT ${REPO_ROOT}/java)
15+
set(CMAKE_JAVA_COMPILE_FLAGS "-source" "1.8" "-target" "1.8" "-encoding" "UTF-8")
16+
if (onnxruntime_RUN_ONNX_TESTS)
17+
set(JAVA_DEPENDS onnxruntime ${test_data_target})
18+
else()
19+
set(JAVA_DEPENDS onnxruntime)
20+
endif()
21+
22+
# Specify the Java source files
23+
set(onnxruntime4j_src
24+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/MapInfo.java
25+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/NodeInfo.java
26+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OnnxRuntime.java
27+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OnnxJavaType.java
28+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OnnxMap.java
29+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OnnxSequence.java
30+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OnnxTensor.java
31+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OnnxValue.java
32+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OrtAllocator.java
33+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OrtEnvironment.java
34+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OrtException.java
35+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OrtSession.java
36+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OrtUtil.java
37+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/package-info.java
38+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/SequenceInfo.java
39+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/TensorInfo.java
40+
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/ValueInfo.java
41+
)
42+
43+
# Build the jar and generate the native headers
44+
add_jar(onnxruntime4j SOURCES ${onnxruntime4j_src} VERSION ${ORT_VERSION} GENERATE_NATIVE_HEADERS onnxruntime4j_generated DESTINATION ${REPO_ROOT}/java/src/main/native/)
45+
46+
# Specify the native sources (without the generated headers)
47+
file(GLOB onnxruntime4j_native_src
48+
"${REPO_ROOT}/java/src/main/native/*.c"
49+
"${REPO_ROOT}/java/src/main/native/OrtJniUtil.h"
50+
"${REPO_ROOT}/include/onnxruntime/core/session/*.h"
51+
)
52+
53+
# Build the JNI library
54+
add_library(onnxruntime4j_jni SHARED ${onnxruntime4j_native_src} ${onnxruntime4j_generated})
55+
onnxruntime_add_include_to_target(onnxruntime4j_jni onnxruntime_session)
56+
target_include_directories(onnxruntime4j_jni PRIVATE ${REPO_ROOT}/include ${REPO_ROOT}/java/src/main/native)
57+
target_link_libraries(onnxruntime4j_jni PUBLIC ${JNI_LIBRARIES} onnxruntime onnxruntime4j_generated)
58+
59+
# Now the jar, jni binary and shared lib binary have been built, now to build the jar with the binaries added.
60+
61+
# This blob creates the new jar name
62+
get_property(onnxruntime_jar_name TARGET onnxruntime4j PROPERTY JAR_FILE)
63+
get_filename_component(onnxruntime_jar_abs ${onnxruntime_jar_name} ABSOLUTE)
64+
get_filename_component(jar_path ${onnxruntime_jar_abs} DIRECTORY)
65+
set(onnxruntime_jar_binaries_name "${jar_path}/onnxruntime4j-${ORT_VERSION}-with-binaries.jar")
66+
set(onnxruntime_jar_binaries_platform "$<SHELL_PATH:${onnxruntime_jar_binaries_name}>")
67+
68+
# Copy the current jar
69+
add_custom_command(TARGET onnxruntime4j_jni PRE_BUILD
70+
COMMAND ${CMAKE_COMMAND} -E copy
71+
${onnxruntime_jar_name}
72+
${onnxruntime_jar_binaries_platform})
73+
74+
# Make a temp directory to store the binaries
75+
add_custom_command(TARGET onnxruntime4j_jni POST_BUILD
76+
COMMAND ${CMAKE_COMMAND} -E make_directory "${CMAKE_CURRENT_BINARY_DIR}/java-libs/lib")
77+
78+
# Copy the binaries
79+
add_custom_command(TARGET onnxruntime4j_jni POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy "$<TARGET_FILE:onnxruntime4j_jni>" ${CMAKE_CURRENT_BINARY_DIR}/java-libs/lib/)
80+
81+
if (WIN32)
82+
add_custom_command(TARGET onnxruntime4j_jni POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy "$<TARGET_FILE:onnxruntime>" ${CMAKE_CURRENT_BINARY_DIR}/java-libs/lib/)
83+
# Update the with-binaries jar so it includes the binaries
84+
add_custom_command(
85+
TARGET onnxruntime4j_jni POST_BUILD
86+
COMMAND ${Java_JAR_EXECUTABLE} -uf ${onnxruntime_jar_binaries_platform} -C ${CMAKE_CURRENT_BINARY_DIR}/java-libs lib/$<TARGET_FILE_NAME:onnxruntime4j_jni> -C ${CMAKE_CURRENT_BINARY_DIR}/java-libs lib/$<TARGET_FILE_NAME:onnxruntime>
87+
DEPENDS onnxruntime4j
88+
COMMENT "Rebuilding Java archive ${_JAVA_TARGET_OUTPUT_NAME}"
89+
VERBATIM
90+
)
91+
else ()
92+
add_custom_command(TARGET onnxruntime4j_jni POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy "$<TARGET_LINKER_FILE:onnxruntime>" ${CMAKE_CURRENT_BINARY_DIR}/java-libs/lib/)
93+
# Update the with-binaries jar so it includes the binaries
94+
add_custom_command(
95+
TARGET onnxruntime4j_jni POST_BUILD
96+
COMMAND ${Java_JAR_EXECUTABLE} -uf ${onnxruntime_jar_binaries_platform} -C ${CMAKE_CURRENT_BINARY_DIR}/java-libs lib/$<TARGET_FILE_NAME:onnxruntime4j_jni> -C ${CMAKE_CURRENT_BINARY_DIR}/java-libs lib/$<TARGET_LINKER_FILE_NAME:onnxruntime>
97+
DEPENDS onnxruntime4j
98+
COMMENT "Rebuilding Java archive ${_JAVA_TARGET_OUTPUT_NAME}"
99+
VERBATIM
100+
)
101+
endif()
102+
103+
create_javadoc(onnxruntime4j_javadoc
104+
FILES ${onnxruntime4j_src}
105+
DOCTITLE "Onnx Runtime Java API"
106+
WINDOWTITLE "OnnxRuntime-Java-API"
107+
AUTHOR FALSE
108+
USE TRUE
109+
VERSION FALSE
110+
)

cmake/onnxruntime_unittests.cmake

Lines changed: 35 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -800,7 +800,6 @@ list(APPEND onnxruntime_mlas_test_libs Threads::Threads)
800800
target_link_libraries(onnxruntime_mlas_test PRIVATE ${onnxruntime_mlas_test_libs})
801801
set_target_properties(onnxruntime_mlas_test PROPERTIES FOLDER "ONNXRuntimeTest")
802802

803-
804803
add_library(custom_op_library SHARED ${REPO_ROOT}/onnxruntime/test/testdata/custom_op_library/custom_op_library.cc)
805804
target_include_directories(custom_op_library PRIVATE ${REPO_ROOT}/include)
806805
if(UNIX)
@@ -814,3 +813,38 @@ else()
814813
# need to ignore the linker warning 4199, due to some global linker flags failing here
815814
endif()
816815
set_property(TARGET custom_op_library APPEND_STRING PROPERTY LINK_FLAGS ${ONNXRUNTIME_CUSTOM_OP_LIB_LINK_FLAG})
816+
817+
if (onnxruntime_BUILD_JAVA)
818+
message(STATUS "Running Java tests")
819+
# Build and run tests
820+
set(onnxruntime4j_test_src
821+
${REPO_ROOT}/java/src/test/java/ai/onnxruntime/InferenceTest.java
822+
${REPO_ROOT}/java/src/test/java/ai/onnxruntime/TestHelpers.java
823+
${REPO_ROOT}/java/src/test/java/ai/onnxruntime/OnnxMl.java
824+
${REPO_ROOT}/java/src/test/java/ai/onnxruntime/UtilTest.java
825+
)
826+
827+
# Create test directories
828+
file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/java-tests/")
829+
file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/java-tests/results")
830+
831+
# Download test dependencies
832+
if (NOT EXISTS ${CMAKE_CURRENT_BINARY_DIR}/java-tests/junit-platform-console-standalone-1.5.2.jar)
833+
message("Downloading JUnit 5")
834+
file(DOWNLOAD https://repo1.maven.org/maven2/org/junit/platform/junit-platform-console-standalone/1.5.2/junit-platform-console-standalone-1.5.2.jar ${CMAKE_CURRENT_BINARY_DIR}/java-tests/junit-platform-console-standalone-1.5.2.jar EXPECTED_HASH SHA1=8d937d2b461018a876836362b256629f4da5feb1)
835+
endif()
836+
837+
if (NOT EXISTS ${CMAKE_CURRENT_BINARY_DIR}/java-tests/protobuf-java-3.10.0.jar)
838+
message("Downloading protobuf-java 3.10.0")
839+
file(DOWNLOAD https://repo1.maven.org/maven2/com/google/protobuf/protobuf-java/3.10.0/protobuf-java-3.10.0.jar ${CMAKE_CURRENT_BINARY_DIR}/java-tests/protobuf-java-3.10.0.jar EXPECTED_HASH SHA1=410b61dd0088aab4caa05739558d43df248958c9)
840+
endif()
841+
842+
# Build the test jar
843+
add_jar(onnxruntime4j_test SOURCES ${onnxruntime4j_test_src} VERSION ${ORT_VERSION} INCLUDE_JARS ${onnxruntime_jar_name} ${CMAKE_CURRENT_BINARY_DIR}/java-tests/junit-platform-console-standalone-1.5.2.jar ${CMAKE_CURRENT_BINARY_DIR}/java-tests/protobuf-java-3.10.0.jar)
844+
845+
add_dependencies(onnxruntime4j_test onnxruntime4j_jni onnxruntime4j)
846+
get_property(onnxruntime_test_jar_name TARGET onnxruntime4j_test PROPERTY JAR_FILE)
847+
848+
# Run the tests with JUnit's console launcher
849+
add_test(NAME java-api COMMAND ${Java_JAVA_EXECUTABLE} -jar ${CMAKE_CURRENT_BINARY_DIR}/java-tests/junit-platform-console-standalone-1.5.2.jar -cp ${CMAKE_CURRENT_BINARY_DIR}/java-tests/protobuf-java-3.10.0.jar -cp ${onnxruntime_test_jar_name} -cp ${onnxruntime_jar_binaries_platform} --scan-class-path --fail-if-no-tests --reports-dir=${CMAKE_CURRENT_BINARY_DIR}/java-tests/results --disable-banner WORKING_DIRECTORY ${REPO_ROOT})
850+
endif()

docs/Java_API.md

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
# ONNX Runtime Java API
2+
The ONNX runtime provides a Java binding for running inference on ONNX models on a JVM, using Java 8 or newer.
3+
4+
Two jar files are created during the build process, one contains the onnxruntime shared library, the JNI binding and the Java class files, and the other only contains the class files. By default the shared libraries are loaded from the classpath in a folder called `/lib`, if you wish to have them load from `java.library.path` then supply `-DORT_LOAD_FROM_LIBRARY_PATH` to the JVM at runtime.
5+
6+
## Sample Code
7+
8+
The unit tests contain several examples of loading models, inspecting input/output node shapes and types, as well as constructing tensors for scoring.
9+
10+
* [../java/src/test/java/ai/onnxruntime/InferenceTest.java#L66](../java/src/test/java/ai/onnxruntime/InferenceTest.java#L66)
11+
12+
## Getting Started
13+
Here is simple tutorial for getting started with running inference on an existing ONNX model for a given input data. The model is typically trained using any of the well-known training frameworks and exported into the ONNX format.
14+
Note the code presented below uses syntax available from Java 10 onwards. The Java 8 syntax is similar but more verbose.
15+
To start a scoring session, first create the `OrtEnvironment`, then open a session using the `OrtSession` class, passing in the file path to the model as a parameter.
16+
17+
var env = OrtEnvironment.getEnvironment();
18+
var session = env.createSession("model.onnx",new OrtSession.SessionOptions());
19+
20+
Once a session is created, you can execute queries using the `run` method of the `OrtSession` object.
21+
At the moment we support `OnnxTensor` inputs, and models can produce `OnnxTensor`, `OnnxSequence` or `OnnxMap` outputs. The latter two are more likely when scoring models produced by frameworks like scikit-learn.
22+
The run call expects a `Map<String,OnnxTensor>` where the keys match input node names stored in the model. These can be viewed by calling `session.getInputNames()` or `session.getInputInfo()` on an instantiated session.
23+
The run call produces a `Result` object, which contains a `Map<String,OnnxValue>` representing the output. The `Result` object is `AutoCloseable` and can be used in a try-with-resources statement to
24+
prevent references from leaking out. Once the `Result` object is closed, all it's child `OnnxValue`s are closed too.
25+
26+
OnnxTensor t1,t2;
27+
var inputs = Map.of("name1",t1,"name2",t2);
28+
try (var results = session.run(inputs)) {
29+
// manipulate the results
30+
}
31+
32+
You can load your input data into OnnxTensor objects in several ways. The most efficient way is to use a `java.nio.Buffer`, but it's possible to use multidimensional arrays too. If constructed using arrays the arrays must not be ragged.
33+
34+
FloatBuffer sourceData; // assume your data is loaded into a FloatBuffer
35+
long[] dimensions; // and the dimensions of the input are stored here
36+
var tensorFromBuffer = OnnxTensor.createTensor(env,sourceData,dimensions);
37+
38+
float[][] sourceArray = new float[28][28]; // assume your data is loaded into a float array
39+
var tensorFromArray = OnnxTensor.createTensor(env,sourceArray);
40+
41+
Here is a [complete sample program](../java/sample/ScoreMNIST.java) that runs inference on a pretrained MNIST model.
42+
43+
## Running on a GPU or with another provider (Optional)
44+
To enable other execution providers like GPUs simply turn on the appropriate flag on SessionOptions when creating an OrtSession.
45+
46+
int gpuDeviceId = 0; // The GPU device ID to execute on
47+
var sessionOptions = new OrtSession.SessionOptions();
48+
sessionOptions.addCUDA(gpuDeviceId);
49+
var session = environment.createSession("model.onnx", sessionOptions);
50+
51+
The execution providers are preferred in the order they were enabled.
52+
53+
## API Reference
54+
55+
The Javadoc is available [here](https://microsoft.github.io/onnxruntime/java/index.html).
56+

0 commit comments

Comments
 (0)