diff --git a/modules/java_api/gradle.properties b/modules/java_api/gradle.properties index b5519a5f4..35a26a601 100644 --- a/modules/java_api/gradle.properties +++ b/modules/java_api/gradle.properties @@ -1,3 +1,4 @@ build_benchmark_app=false build_java_samples=false build_kotlin_samples=false +build_hello_query_device=false \ No newline at end of file diff --git a/modules/java_api/samples/README.md b/modules/java_api/samples/README.md index 311239a45..660f63583 100644 --- a/modules/java_api/samples/README.md +++ b/modules/java_api/samples/README.md @@ -1,3 +1,11 @@ +# Samples + +The OpenVINO samples are simple console applications that show how to utilize specific OpenVINO API capabilities within an application. The following samples are available +- [Benchmark Application](#benchmark-application) +- [Face Detection Java samples](#face-detection-java-samples) +- [Face Detection Kotlin sample](./face_detection_kotlin_sample/README.md) +- [Hello Query Device](#hello-query-device-sample) + # Benchmark Application This guide describes how to run the benchmark applications. @@ -153,3 +161,67 @@ The application will show the image with detected objects enclosed in rectangles ### For ```face_detection_sample_async``` The application will show the video with detected objects enclosed in rectangles in new window. + + +# Hello Query Device Sample + +## How It Works + +This sample demonstrates how to show OpenVINO Runtime devices and print their metrics and default configuration values using the Query Device API feature. + +## Build + +Use Gradle to build **openvino-x-x-x.jar** with OpenVINO Java bindings in `java_api/build/libs` and **hello_query_device.jar** in `java_api/samples/hello_query_device/build/libs`: +```bash +cd openvino_contrib/modules/java_api +gradle build -Pbuild_hello_query_device=true +``` + +## Running + +To run the sample use: +```bash +java -cp ".:${OV_JAVA_DIR}/openvino-x-x-x.jar:samples/hello_query_device/build/libs/hello_query_device.jar" Main +``` + +## Running in Idea IntelliJ +- Import the project in IntelliJ IDEA. See [here](../README.md#import-to-intellij-idea) for instructions. +- In **Run/Debug Configurations** dropdown, click on **Edit Configurations**. +- Click on **Add New Configuration** and select **Gradle** from the dropdown menu. +- Give the configuration an appropriate name: "HelloQueryDeviceSample", and enter the following command in the **Tasks and arguments** input box. + ```bash + :samples:hello_query_device:run -Pbuild_hello_query_device=true + ``` +- Under **Environment Variables**, select **Edit environment variables** and add the following environment variables: + + `INTEL_OPENVINO_DIR=` +- Click on **OK** to save the configuration. +- Select the saved configuration from the **Run/Debug Configurations**. Click on the **Run** button to run or the **Debug** button to run in debug mode. + +## Sample Output + +Below is a sample output for CPU device: + +``` +[INFO] Available devices: +[INFO] CPU: +[INFO] SUPPORTED_PROPERTIES: +[INFO] AVAILABLE_DEVICES: +[INFO] RANGE_FOR_ASYNC_INFER_REQUESTS: 1 1 1 +[INFO] RANGE_FOR_STREAMS: 1 20 +[INFO] FULL_DEVICE_NAME: 12th Gen Intel(R) Core(TM) i7-12700H +[INFO] OPTIMIZATION_CAPABILITIES: FP32 FP16 INT8 BIN EXPORT_IMPORT +[INFO] CACHING_PROPERTIES: FULL_DEVICE_NAME +[INFO] NUM_STREAMS: 1 +[INFO] AFFINITY: HYBRID_AWARE +[INFO] INFERENCE_NUM_THREADS: 0 +[INFO] PERF_COUNT: NO +[INFO] INFERENCE_PRECISION_HINT: f32 +[INFO] PERFORMANCE_HINT: LATENCY +[INFO] EXECUTION_MODE_HINT: PERFORMANCE +[INFO] PERFORMANCE_HINT_NUM_REQUESTS: 0 +[INFO] ENABLE_CPU_PINNING: YES +[INFO] SCHEDULING_CORE_TYPE: ANY_CORE +[INFO] ENABLE_HYPER_THREADING: YES +[INFO] DEVICE_ID: +``` \ No newline at end of file diff --git a/modules/java_api/samples/hello_query_device/build.gradle b/modules/java_api/samples/hello_query_device/build.gradle new file mode 100644 index 000000000..2bfdd3b61 --- /dev/null +++ b/modules/java_api/samples/hello_query_device/build.gradle @@ -0,0 +1,17 @@ +plugins { + id 'java' + id 'application' +} + +sourceSets { + main { + java { + srcDirs = ["src/main/java"] + } + } +} +mainClassName = 'Main' + +dependencies { + implementation rootProject +} diff --git a/modules/java_api/samples/hello_query_device/settings.gradle b/modules/java_api/samples/hello_query_device/settings.gradle new file mode 100644 index 000000000..d4217b395 --- /dev/null +++ b/modules/java_api/samples/hello_query_device/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'hello_query_device' diff --git a/modules/java_api/samples/hello_query_device/src/main/java/Main.java b/modules/java_api/samples/hello_query_device/src/main/java/Main.java new file mode 100644 index 000000000..504fb1e35 --- /dev/null +++ b/modules/java_api/samples/hello_query_device/src/main/java/Main.java @@ -0,0 +1,64 @@ +import org.intel.openvino.Core; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.logging.ConsoleHandler; +import java.util.logging.LogRecord; +import java.util.logging.Logger; +import java.util.logging.SimpleFormatter; + +/* +This sample queries all available OpenVINO Runtime Devices and prints +their supported metrics and plugin configuration parameters using Query +Device API feature. The application prints all available devices with +their supported metrics and default values for configuration parameters. + +The sample takes no command-line parameters. +*/ +public class Main { + + private static Logger logger; + + static { + logger = Logger.getLogger(Main.class.getName()); + logger.setUseParentHandlers(false); + ConsoleHandler handler = new ConsoleHandler(); + handler.setFormatter( + new SimpleFormatter() { + private static final String format = "[%1$s] %2$s%n"; + + @Override + public synchronized String format(LogRecord lr) { + return String.format( + format, lr.getLevel().getLocalizedName(), lr.getMessage()); + } + }); + logger.addHandler(handler); + } + + public static void main(String[] args) throws IOException { + List excludedProperties = + Arrays.asList("SUPPORTED_METRICS", "SUPPORTED_CONFIG_KEYS", "SUPPORTED_PROPERTIES"); + + Core core = new Core(); + + logger.info("Available devices:"); + for (String device : core.get_available_devices()) { + logger.info(String.format("%s:", device)); + logger.info("\tSUPPORTED_PROPERTIES:"); + + for (String propertyKey : core.get_property(device, "SUPPORTED_PROPERTIES").asList()) { + if (!excludedProperties.contains(propertyKey)) { + String propertyVal; + try { + propertyVal = core.get_property(device, propertyKey).asString(); + } catch (Exception e) { + propertyVal = "UNSUPPORTED TYPE"; + } + logger.info(String.format("\t\t%s: %s", propertyKey, propertyVal)); + } + } + } + } +} diff --git a/modules/java_api/settings.gradle b/modules/java_api/settings.gradle index 1b6caa8c7..b7c378eed 100644 --- a/modules/java_api/settings.gradle +++ b/modules/java_api/settings.gradle @@ -9,3 +9,6 @@ if (build_java_samples.toBoolean()) { if (build_kotlin_samples.toBoolean()) { include 'samples:face_detection_kotlin_sample' } +if (build_hello_query_device.toBoolean()) { + include 'samples:hello_query_device' +} \ No newline at end of file diff --git a/modules/java_api/src/main/cpp/any.cpp b/modules/java_api/src/main/cpp/any.cpp index 3ea40f51d..7d27df4cd 100644 --- a/modules/java_api/src/main/cpp/any.cpp +++ b/modules/java_api/src/main/cpp/any.cpp @@ -17,6 +17,38 @@ JNIEXPORT jint JNICALL Java_org_intel_openvino_Any_asInt(JNIEnv *env, jobject ob return 0; } +JNIEXPORT jobject JNICALL Java_org_intel_openvino_Any_asList(JNIEnv *env, jobject obj, jlong addr) { + JNI_METHOD("asList", + Any *obj = (Any *)addr; + + if (obj->is>()) { + jclass arrayClass = env->FindClass("java/util/ArrayList"); + jmethodID arrayInit = env->GetMethodID(arrayClass, "", "()V"); + jobject arrayObj = env->NewObject(arrayClass, arrayInit); + jmethodID arrayAdd = env->GetMethodID(arrayClass, "add", "(Ljava/lang/Object;)Z"); + + for (const auto& it : obj->as>()) { + std::string property_name = it; + jstring string = env->NewStringUTF(property_name.c_str()); + env->CallObjectMethod(arrayObj, arrayAdd, string); + } + + return arrayObj; + } + return vectorToJavaList(env, obj->as>()); + ) + return 0; +} + +JNIEXPORT jstring JNICALL Java_org_intel_openvino_Any_asString(JNIEnv *env, jobject obj, jlong addr) { + JNI_METHOD("asString", + Any *obj = (Any *)addr; + std::string n_string = obj->as(); + return env->NewStringUTF(n_string.c_str()); + ) + return 0; +} + JNIEXPORT void JNICALL Java_org_intel_openvino_Any_delete(JNIEnv *, jobject, jlong addr) { Any *obj = (Any *)addr; diff --git a/modules/java_api/src/main/cpp/compiled_model.cpp b/modules/java_api/src/main/cpp/compiled_model.cpp index 57bd333e6..d0e060a68 100644 --- a/modules/java_api/src/main/cpp/compiled_model.cpp +++ b/modules/java_api/src/main/cpp/compiled_model.cpp @@ -22,6 +22,60 @@ JNIEXPORT jlong JNICALL Java_org_intel_openvino_CompiledModel_CreateInferRequest return 0; } +JNIEXPORT jobject JNICALL Java_org_intel_openvino_CompiledModel_GetInputs(JNIEnv * env, jobject obj, jlong modelAddr) { + JNI_METHOD("GetInputs", + CompiledModel *compiled_model = (CompiledModel *) modelAddr; + const std::vector>& inputs_vec = compiled_model->inputs(); + + jclass arrayClass = env->FindClass("java/util/ArrayList"); + jmethodID arrayInit = env->GetMethodID(arrayClass, "", "()V"); + jobject arrayObj = env->NewObject(arrayClass, arrayInit); + jmethodID arrayAdd = env->GetMethodID(arrayClass, "add", "(Ljava/lang/Object;)Z"); + + jclass outputClass = env->FindClass("org/intel/openvino/Output"); + jmethodID outputConstructor = env->GetMethodID(outputClass,"","(J)V"); + + Output *input; + for (const auto &item : inputs_vec) { + input = new Output; + *input = item; + + jobject inputObj = env->NewObject(outputClass, outputConstructor, (jlong)(input)); + env->CallObjectMethod(arrayObj, arrayAdd, inputObj); + } + + return arrayObj; + ) + return 0; +} + +JNIEXPORT jobject JNICALL Java_org_intel_openvino_CompiledModel_GetOutputs(JNIEnv * env, jobject obj, jlong modelAddr) { + JNI_METHOD("GetOutputs", + CompiledModel *compiled_model = (CompiledModel *) modelAddr; + const std::vector>& outputs_vec = compiled_model->outputs(); + + jclass arrayClass = env->FindClass("java/util/ArrayList"); + jmethodID arrayInit = env->GetMethodID(arrayClass, "", "()V"); + jobject arrayObj = env->NewObject(arrayClass, arrayInit); + jmethodID arrayAdd = env->GetMethodID(arrayClass, "add", "(Ljava/lang/Object;)Z"); + + jclass outputClass = env->FindClass("org/intel/openvino/Output"); + jmethodID outputConstructor = env->GetMethodID(outputClass,"","(J)V"); + + Output *output; + for (const auto &item : outputs_vec) { + output = new Output; + *output = item; + + jobject outputObj = env->NewObject(outputClass, outputConstructor, (jlong)(output)); + env->CallObjectMethod(arrayObj, arrayAdd, outputObj); + } + + return arrayObj; + ) + return 0; +} + JNIEXPORT void JNICALL Java_org_intel_openvino_CompiledModel_delete(JNIEnv *, jobject, jlong addr) { CompiledModel *compiled_model = (CompiledModel *)addr; diff --git a/modules/java_api/src/main/cpp/core.cpp b/modules/java_api/src/main/cpp/core.cpp index 6301a19e0..526a506ab 100644 --- a/modules/java_api/src/main/cpp/core.cpp +++ b/modules/java_api/src/main/cpp/core.cpp @@ -76,6 +76,74 @@ JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_CompileModel(JNIEnv *env, j return 0; } +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_CompileModel1(JNIEnv *env, jobject obj, jlong coreAddr, jstring path) +{ + JNI_METHOD("CompileModel1", + std::string n_path = jstringToString(env, path); + + Core *core = (Core *)coreAddr; + CompiledModel *compiled_model = new CompiledModel(); + *compiled_model = core->compile_model(n_path); + + return (jlong)compiled_model; + ) + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_CompileModel2(JNIEnv *env, jobject obj, jlong coreAddr, jstring path, jstring device) +{ + JNI_METHOD("CompileModel2", + std::string n_device = jstringToString(env, device); + std::string n_path = jstringToString(env, path); + + Core *core = (Core *)coreAddr; + CompiledModel *compiled_model = new CompiledModel(); + *compiled_model = core->compile_model(n_path, n_device); + + return (jlong)compiled_model; + ) + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_CompileModel3(JNIEnv *env, jobject obj, jlong coreAddr, jstring path, jstring device, jobject props) +{ + JNI_METHOD("CompileModel3", + std::string n_device = jstringToString(env, device); + std::string n_path = jstringToString(env, path); + AnyMap map; + for (const auto& it : javaMapToMap(env, props)) { + map[it.first] = it.second; + } + + Core *core = (Core *)coreAddr; + // AnyMap will be copied inside compile_model, so we don't have to track the lifetime of this object + CompiledModel *compiled_model = new CompiledModel(); + *compiled_model = core->compile_model(n_path, n_device, map); + + return (jlong)compiled_model; + ) + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_CompileModel4(JNIEnv *env, jobject obj, jlong coreAddr, jlong modelAddr, jstring device, jobject props) +{ + JNI_METHOD("CompileModel4", + std::string n_device = jstringToString(env, device); + std::shared_ptr *model = reinterpret_cast *>(modelAddr); + AnyMap map; + for (const auto& it : javaMapToMap(env, props)) { + map[it.first] = it.second; + } + + Core *core = (Core *)coreAddr; + CompiledModel *compiled_model = new CompiledModel(); + *compiled_model = core->compile_model(*model, n_device, map); + + return (jlong)compiled_model; + ) + return 0; +} + JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_GetProperty(JNIEnv *env, jobject obj, jlong coreAddr, jstring device, jstring name) { JNI_METHOD("GetProperty", @@ -104,6 +172,26 @@ JNIEXPORT void JNICALL Java_org_intel_openvino_Core_SetProperty(JNIEnv *env, job ) } +JNIEXPORT jobject JNICALL Java_org_intel_openvino_Core_GetAvailableDevices(JNIEnv *env, jobject obj, jlong coreAddr) { + JNI_METHOD("GetAvailableDevices", + Core *core = (Core *)coreAddr; + const std::vector& devices_vec = core->get_available_devices(); + + jclass arrayClass = env->FindClass("java/util/ArrayList"); + jmethodID arrayInit = env->GetMethodID(arrayClass, "", "()V"); + jobject arrayObj = env->NewObject(arrayClass, arrayInit); + jmethodID arrayAdd = env->GetMethodID(arrayClass, "add", "(Ljava/lang/Object;)Z"); + + for (const std::string &item : devices_vec) { + jstring device = env->NewStringUTF(item.c_str()); + env->CallObjectMethod(arrayObj, arrayAdd, device); + } + + return arrayObj; + ) + return 0; +} + JNIEXPORT void JNICALL Java_org_intel_openvino_Core_delete(JNIEnv *, jobject, jlong addr) { Core *core = (Core *)addr; diff --git a/modules/java_api/src/main/cpp/infer_request.cpp b/modules/java_api/src/main/cpp/infer_request.cpp index 8e217b277..e24eff15d 100644 --- a/modules/java_api/src/main/cpp/infer_request.cpp +++ b/modules/java_api/src/main/cpp/infer_request.cpp @@ -67,7 +67,7 @@ JNIEXPORT jlong JNICALL Java_org_intel_openvino_InferRequest_GetTensor(JNIEnv *e InferRequest *infer_request = (InferRequest *)addr; Tensor *output_tensor = new Tensor(); - std::string c_tensorName= jstringToString(env, tensorName); + std::string c_tensorName = jstringToString(env, tensorName); *output_tensor = infer_request->get_tensor(c_tensorName); return (jlong)output_tensor; @@ -75,6 +75,17 @@ JNIEXPORT jlong JNICALL Java_org_intel_openvino_InferRequest_GetTensor(JNIEnv *e return 0; } +JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_SetTensor(JNIEnv *env, jobject obj, jlong addr, jstring tensorName, jlong tensorAddr) +{ + JNI_METHOD("SetTensor", + InferRequest *infer_request = (InferRequest *)addr; + + std::string c_tensorName = jstringToString(env, tensorName); + const Tensor *tensor = (Tensor *)tensorAddr; + infer_request->set_tensor(c_tensorName, *tensor); + ) +} + JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_delete(JNIEnv *env, jobject obj, jlong addr) { jclass cls = env->GetObjectClass(obj); diff --git a/modules/java_api/src/main/cpp/jni_common.hpp b/modules/java_api/src/main/cpp/jni_common.hpp index 47b14db62..10c08e8a2 100644 --- a/modules/java_api/src/main/cpp/jni_common.hpp +++ b/modules/java_api/src/main/cpp/jni_common.hpp @@ -202,3 +202,32 @@ static std::vector jintArrayToVector(JNIEnv *env, jintArray dims) return std::vector(); } + +static jobject vectorToJavaList(JNIEnv *env, std::vector items) +{ + static const char method_name[] = "vectorToJavaList"; + try + { + jclass arrayClass = env->FindClass("java/util/ArrayList"); + jmethodID arrayInit = env->GetMethodID(arrayClass, "", "()V"); + jobject arrayObj = env->NewObject(arrayClass, arrayInit); + jmethodID arrayAdd = env->GetMethodID(arrayClass, "add", "(Ljava/lang/Object;)Z"); + + for (const auto& item : items) { + jstring string = env->NewStringUTF(item.c_str()); + env->CallObjectMethod(arrayObj, arrayAdd, string); + } + + return arrayObj; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + + return nullptr; +} diff --git a/modules/java_api/src/main/cpp/openvino.cpp b/modules/java_api/src/main/cpp/openvino.cpp new file mode 100644 index 000000000..8f55853fc --- /dev/null +++ b/modules/java_api/src/main/cpp/openvino.cpp @@ -0,0 +1,22 @@ +// Copyright (C) 2020-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 + +#include // JNI header provided by JDK +#include "openvino/openvino.hpp" +#include "openvino/core/graph_util.hpp" + +#include "openvino_java.hpp" +#include "jni_common.hpp" + +using namespace ov; + +JNIEXPORT void JNICALL Java_org_intel_openvino_Openvino_serialize(JNIEnv *env, jobject obj, jlong modelAddr, jstring xmlPath, jstring binPath) +{ + JNI_METHOD("serialize", + std::string xml_path = jstringToString(env, xmlPath); + std::string bin_path = jstringToString(env, binPath); + std::shared_ptr *model = reinterpret_cast *>(modelAddr); + + serialize(*model, xml_path, bin_path); + ) +} diff --git a/modules/java_api/src/main/cpp/openvino_java.hpp b/modules/java_api/src/main/cpp/openvino_java.hpp index 9ad6ac173..9708bfb8c 100644 --- a/modules/java_api/src/main/cpp/openvino_java.hpp +++ b/modules/java_api/src/main/cpp/openvino_java.hpp @@ -127,18 +127,28 @@ extern "C" /* -------------------------------------- API 2.0 ------------------------------------------*/ + //ov + JNIEXPORT void JNICALL Java_org_intel_openvino_Openvino_serialize(JNIEnv *, jobject, jlong, jstring, jstring); + // ov::Core JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_GetCore(JNIEnv *, jobject); JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_GetCore1(JNIEnv *, jobject, jstring); JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_ReadModel(JNIEnv *, jobject, jlong, jstring); JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_ReadModel1(JNIEnv *, jobject, jlong, jstring, jstring); JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_CompileModel(JNIEnv *, jobject, jlong, jlong, jstring); + JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_CompileModel1(JNIEnv *, jobject, jlong, jstring); + JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_CompileModel2(JNIEnv *, jobject, jlong, jstring, jstring); + JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_CompileModel3(JNIEnv *, jobject, jlong, jstring, jstring, jobject); + JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_CompileModel4(JNIEnv *, jobject, jlong, jlong, jstring, jobject); JNIEXPORT jlong JNICALL Java_org_intel_openvino_Core_GetProperty(JNIEnv *, jobject, jlong, jstring, jstring); JNIEXPORT void JNICALL Java_org_intel_openvino_Core_SetProperty(JNIEnv *, jobject, jlong, jstring, jobject); + JNIEXPORT jobject JNICALL Java_org_intel_openvino_Core_GetAvailableDevices(JNIEnv *, jobject, jlong); JNIEXPORT void JNICALL Java_org_intel_openvino_Core_delete(JNIEnv *, jobject, jlong); // ov::Any JNIEXPORT jint JNICALL Java_org_intel_openvino_Any_asInt(JNIEnv *, jobject, jlong); + JNIEXPORT jobject JNICALL Java_org_intel_openvino_Any_asList(JNIEnv *, jobject, jlong); + JNIEXPORT jstring JNICALL Java_org_intel_openvino_Any_asString(JNIEnv *, jobject, jlong); // ov::Model JNIEXPORT jstring JNICALL Java_org_intel_openvino_Model_getName(JNIEnv *, jobject, jlong); @@ -152,6 +162,8 @@ extern "C" // ov::CompiledModel JNIEXPORT jlong JNICALL Java_org_intel_openvino_CompiledModel_CreateInferRequest(JNIEnv *, jobject, jlong); JNIEXPORT void JNICALL Java_org_intel_openvino_CompiledModel_delete(JNIEnv *, jobject, jlong); + JNIEXPORT jobject JNICALL Java_org_intel_openvino_CompiledModel_GetInputs(JNIEnv *, jobject, jlong); + JNIEXPORT jobject JNICALL Java_org_intel_openvino_CompiledModel_GetOutputs(JNIEnv *, jobject, jlong); // ov::InferRequest JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_Infer(JNIEnv *, jobject, jlong); @@ -161,14 +173,18 @@ extern "C" JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_SetOutputTensor(JNIEnv *, jobject, jlong, jlong); JNIEXPORT jlong JNICALL Java_org_intel_openvino_InferRequest_GetOutputTensor(JNIEnv *, jobject, jlong); JNIEXPORT jlong JNICALL Java_org_intel_openvino_InferRequest_GetTensor(JNIEnv *, jobject, jlong, jstring); + JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_SetTensor(JNIEnv *, jobject, jlong, jstring, jlong); JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_delete(JNIEnv *, jobject, jlong); // ov::Tensor JNIEXPORT jlong JNICALL Java_org_intel_openvino_Tensor_TensorCArray(JNIEnv *, jobject, jint, jintArray, jlong); JNIEXPORT jlong JNICALL Java_org_intel_openvino_Tensor_TensorFloat(JNIEnv *, jobject, jintArray, jfloatArray); + JNIEXPORT jlong JNICALL Java_org_intel_openvino_Tensor_TensorInt(JNIEnv *, jobject, jintArray, jintArray); + JNIEXPORT jlong JNICALL Java_org_intel_openvino_Tensor_TensorLong(JNIEnv *, jobject, jintArray, jlongArray); JNIEXPORT jint JNICALL Java_org_intel_openvino_Tensor_GetSize(JNIEnv *, jobject, jlong); JNIEXPORT jintArray JNICALL Java_org_intel_openvino_Tensor_GetShape(JNIEnv *, jobject, jlong); JNIEXPORT jfloatArray JNICALL Java_org_intel_openvino_Tensor_asFloat(JNIEnv *, jobject, jlong); + JNIEXPORT jintArray JNICALL Java_org_intel_openvino_Tensor_asInt(JNIEnv *, jobject, jlong); JNIEXPORT void JNICALL Java_org_intel_openvino_Tensor_delete(JNIEnv *, jobject, jlong); // ov::PrePostProcessor @@ -216,6 +232,7 @@ extern "C" // ov::Output JNIEXPORT jstring JNICALL Java_org_intel_openvino_Output_GetAnyName(JNIEnv *, jobject, jlong); JNIEXPORT jintArray JNICALL Java_org_intel_openvino_Output_GetShape(JNIEnv *, jobject, jlong); + JNIEXPORT int JNICALL Java_org_intel_openvino_Output_GetElementType(JNIEnv *, jobject, jlong); JNIEXPORT void JNICALL Java_org_intel_openvino_Output_delete(JNIEnv *, jobject, jlong); #ifdef __cplusplus diff --git a/modules/java_api/src/main/cpp/output.cpp b/modules/java_api/src/main/cpp/output.cpp index 4a5424f32..02f32cd4d 100644 --- a/modules/java_api/src/main/cpp/output.cpp +++ b/modules/java_api/src/main/cpp/output.cpp @@ -36,6 +36,17 @@ JNIEXPORT jintArray JNICALL Java_org_intel_openvino_Output_GetShape(JNIEnv *env, return 0; } +JNIEXPORT int JNICALL Java_org_intel_openvino_Output_GetElementType(JNIEnv *env, jobject obj, jlong addr) { + JNI_METHOD("GetElementType", + Output *output = (Output *)addr; + + element::Type_t t_type = output->get_element_type(); + jint type = static_cast(t_type); + return type; + ) + return 0; +} + JNIEXPORT void JNICALL Java_org_intel_openvino_Output_delete(JNIEnv *, jobject, jlong addr) { Output *obj = (Output *)addr; diff --git a/modules/java_api/src/main/cpp/tensor.cpp b/modules/java_api/src/main/cpp/tensor.cpp index 5cb85c35a..61f1dde60 100644 --- a/modules/java_api/src/main/cpp/tensor.cpp +++ b/modules/java_api/src/main/cpp/tensor.cpp @@ -55,6 +55,34 @@ JNIEXPORT jlong JNICALL Java_org_intel_openvino_Tensor_TensorFloat(JNIEnv *env, return 0; } +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Tensor_TensorInt(JNIEnv *env, jobject, jintArray shape, jintArray data) +{ + JNI_METHOD( + "TensorInt", + Shape input_shape = jintArrayToVector(env, shape); + Tensor *ov_tensor = new Tensor(element::i32, input_shape); + + env->GetIntArrayRegion(data, 0, ov_tensor->get_size(), (jint*)ov_tensor->data()); + + return (jlong)ov_tensor; + ); + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Tensor_TensorLong(JNIEnv *env, jobject, jintArray shape, jlongArray data) +{ + JNI_METHOD( + "TensorLong", + Shape input_shape = jintArrayToVector(env, shape); + Tensor *ov_tensor = new Tensor(element::i64, input_shape); + + env->GetLongArrayRegion(data, 0, ov_tensor->get_size(), (jlong*)ov_tensor->data()); + + return (jlong)ov_tensor; + ); + return 0; +} + JNIEXPORT jint JNICALL Java_org_intel_openvino_Tensor_GetSize(JNIEnv *env, jobject, jlong addr) { JNI_METHOD( @@ -109,6 +137,29 @@ JNIEXPORT jfloatArray JNICALL Java_org_intel_openvino_Tensor_asFloat(JNIEnv *env return 0; } +JNIEXPORT jintArray JNICALL Java_org_intel_openvino_Tensor_asInt(JNIEnv *env, jobject, jlong addr) +{ + JNI_METHOD( + "asInt", + Tensor *ov_tensor = (Tensor *)addr; + + size_t size = ov_tensor->get_size(); + const int *data = ov_tensor->data(); + + jintArray result = env->NewIntArray(size); + if (!result) { + throw std::runtime_error("Out of memory!"); + } jint *arr = env->GetIntArrayElements(result, nullptr); + + for (size_t i = 0; i < size; ++i) + arr[i] = data[i]; + + env->ReleaseIntArrayElements(result, arr, 0); + return result; + ) + return 0; +} + JNIEXPORT void JNICALL Java_org_intel_openvino_Tensor_delete(JNIEnv *, jobject, jlong addr) { Tensor *tensor = (Tensor *)addr; diff --git a/modules/java_api/src/main/java/org/intel/openvino/Any.java b/modules/java_api/src/main/java/org/intel/openvino/Any.java index 1fe772313..39c0ea178 100644 --- a/modules/java_api/src/main/java/org/intel/openvino/Any.java +++ b/modules/java_api/src/main/java/org/intel/openvino/Any.java @@ -3,6 +3,8 @@ package org.intel.openvino; +import java.util.List; + /** This class represents an object to work with different types. */ public class Any extends Wrapper { @@ -14,9 +16,21 @@ public int asInt() { return asInt(nativeObj); } + public String asString() { + return asString(nativeObj); + } + + public List asList() { + return asList(nativeObj); + } + /*----------------------------------- native methods -----------------------------------*/ private static native int asInt(long addr); + private static native String asString(long addr); + + private static native List asList(long addr); + @Override protected native void delete(long nativeObj); } diff --git a/modules/java_api/src/main/java/org/intel/openvino/CompiledModel.java b/modules/java_api/src/main/java/org/intel/openvino/CompiledModel.java index 370f52b54..c9b32eadd 100644 --- a/modules/java_api/src/main/java/org/intel/openvino/CompiledModel.java +++ b/modules/java_api/src/main/java/org/intel/openvino/CompiledModel.java @@ -3,6 +3,8 @@ package org.intel.openvino; +import java.util.List; + /** * This class represents a compiled model. * @@ -25,9 +27,33 @@ public InferRequest create_infer_request() { return new InferRequest(CreateInferRequest(nativeObj)); } + /** + * Gets all inputs of a compiled model. They contain information about input tensors such as + * tensor shape, names, and element type. + * + * @return List of model inputs. + */ + public List inputs() { + return GetInputs(nativeObj); + } + + /** + * Gets all outputs of a compiled model. They contain information about output tensors such as + * tensor shape, name, and element type. + * + * @return List of model outputs. + */ + public List outputs() { + return GetOutputs(nativeObj); + } + /*----------------------------------- native methods -----------------------------------*/ private static native long CreateInferRequest(long addr); + private static native List GetInputs(long addr); + + private static native List GetOutputs(long addr); + @Override protected native void delete(long nativeObj); } diff --git a/modules/java_api/src/main/java/org/intel/openvino/Core.java b/modules/java_api/src/main/java/org/intel/openvino/Core.java index c53f384f7..918bea677 100644 --- a/modules/java_api/src/main/java/org/intel/openvino/Core.java +++ b/modules/java_api/src/main/java/org/intel/openvino/Core.java @@ -3,6 +3,7 @@ package org.intel.openvino; +import java.util.List; import java.util.Map; import java.util.logging.Logger; @@ -55,6 +56,51 @@ public Model read_model(final String modelPath, final String weightPath) { return new Model(ReadModel1(nativeObj, modelPath, weightPath)); } + /** + * Reads and loads a compiled model from the IR/ONNX/PDPD file to the default OpenVINO device + * selected by the AUTO plugin. + * + *

This can be more efficient, especially when caching is enabled and a cached model is + * available, than using the read_model followed by compile_model flow. + * + * @param path Path to a model. + * @return A compiled model. + */ + public CompiledModel compile_model(final String path) { + return new CompiledModel(CompileModel1(nativeObj, path)); + } + + /** + * Reads and loads a compiled model from the IR/ONNX/PDPD file. + * + *

This can be more efficient, especially when caching is enabled and a cached model is + * available, than using the read_model followed by compile_model flow. + * + * @param path Path to a model. + * @param device Name of a device to load a model to. + * @return A compiled model. + */ + public CompiledModel compile_model(final String path, final String device) { + return new CompiledModel(CompileModel2(nativeObj, path, device)); + } + + /** + * Reads and loads a compiled model from the IR/ONNX/PDPD file. + * + *

This can be more efficient, especially when caching is enabled and a cached model is + * available, than using the read_model followed by compile_model flow. + * + * @param path Path to a model. + * @param device Name of a device to load a model to. + * @param properties Map of pairs: (property name, property value) relevant only for this load + * operation. + * @return A compiled model. + */ + public CompiledModel compile_model( + final String path, final String device, final Map properties) { + return new CompiledModel(CompileModel3(nativeObj, path, device, properties)); + } + /** * Creates a compiled model from a source model object. * @@ -69,6 +115,24 @@ public CompiledModel compile_model(Model model, final String device) { return new CompiledModel(CompileModel(nativeObj, model.getNativeObjAddr(), device)); } + /** + * Creates a compiled model from a source model object. + * + *

Users can create as many compiled models as they need and use them simultaneously (up to + * the limitation of the hardware resources). + * + * @param model Model object acquired from {@link Core#read_model}. + * @param device Name of a device to load a model to. + * @param properties properties – Map of pairs: (property name, property value) relevant only + * for this load operation. + * @return A compiled model. + */ + public CompiledModel compile_model( + Model model, final String device, final Map properties) { + return new CompiledModel( + CompileModel4(nativeObj, model.getNativeObjAddr(), device, properties)); + } + /** * Gets properties related to device behaviour. * @@ -93,6 +157,19 @@ public void set_property(final String device, final Map prop) { SetProperty(nativeObj, device, prop); } + /** + * Returns the devices available for inference. + * + * @return List of devices. + *

The devices are returned as { CPU, GPU.0, GPU.1, GNA }. If there is more than one + * device of a specific type, they are enumerated with the .# suffix. Such enumerated device + * can later be used as a device name in all Core methods like {@link Core#compile_model}, + * {@link Core#set_property} and so on. + */ + public List get_available_devices() { + return GetAvailableDevices(nativeObj); + } + /*----------------------------------- native methods -----------------------------------*/ private static native long GetCore(); @@ -106,11 +183,27 @@ private static native long ReadModel1( private static native long CompileModel(long core, long net, final String device); + private static native long CompileModel1(long core, final String device); + + private static native long CompileModel2( + long core, final String modelPath, final String device); + + private static native long CompileModel4( + long core, long net, final String device, final Map properties); + + private static native long CompileModel3( + long core, + final String modelPath, + final String device, + final Map props); + private static native long GetProperty(long core, final String device, final String name); private static native void SetProperty( long core, final String device, final Map prop); + private static native List GetAvailableDevices(long core); + @Override protected native void delete(long nativeObj); } diff --git a/modules/java_api/src/main/java/org/intel/openvino/InferRequest.java b/modules/java_api/src/main/java/org/intel/openvino/InferRequest.java index 6944520df..5266bdd2a 100644 --- a/modules/java_api/src/main/java/org/intel/openvino/InferRequest.java +++ b/modules/java_api/src/main/java/org/intel/openvino/InferRequest.java @@ -80,9 +80,19 @@ public Tensor get_tensor(String tensorName) { } /** - * Detele the native object to release resources. + * Sets an input/output tensor to infer on. * - *

This mehtod is protected from double deallocation + * @param tensorName Name of the tensor. + * @param tensor The tensor to set. + */ + public void set_tensor(String tensorName, Tensor tensor) { + SetTensor(nativeObj, tensorName, tensor.nativeObj); + } + + /** + * Delete the native object to release resources. + * + *

This method is protected from double deallocation */ public void release() { delete(nativeObj); @@ -104,6 +114,8 @@ public void release() { private static native long GetTensor(long addr, String tensorName); + private static native void SetTensor(long addr, String tensorName, long tensor); + @Override protected native void delete(long nativeObj); } diff --git a/modules/java_api/src/main/java/org/intel/openvino/Openvino.java b/modules/java_api/src/main/java/org/intel/openvino/Openvino.java new file mode 100644 index 000000000..bebcc7660 --- /dev/null +++ b/modules/java_api/src/main/java/org/intel/openvino/Openvino.java @@ -0,0 +1,31 @@ +// Copyright (C) 2020-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 + +package org.intel.openvino; + +/** OpenVINO Runtime utility methods. */ +public class Openvino extends Wrapper { + + protected Openvino(long addr) { + super(addr); + } + + /** + * Serialize given model into IR. + * + *

The generated .xml and .bin files will be saved into provided paths.This method serializes + * model "as-is" that means no weights compression is applied. + * + * @param model Model which will be converted to IR representation. + * @param xmlPath Path where .xml file will be saved. + * @param binPath Path where .bin file will be saved. + */ + public static void serialize(Model model, final String xmlPath, final String binPath) { + serialize(model.nativeObj, xmlPath, binPath); + } + + /*----------------------------------- native methods -----------------------------------*/ + + private static native void serialize( + long modelAddr, final String xmlPath, final String binPath); +} diff --git a/modules/java_api/src/main/java/org/intel/openvino/Output.java b/modules/java_api/src/main/java/org/intel/openvino/Output.java index 0b11854b8..8b77f3a80 100644 --- a/modules/java_api/src/main/java/org/intel/openvino/Output.java +++ b/modules/java_api/src/main/java/org/intel/openvino/Output.java @@ -20,11 +20,18 @@ public int[] get_shape() { return GetShape(nativeObj); } + /** Returns the element type of the output referred to by this output handle. */ + public ElementType get_element_type() { + return ElementType.valueOf(GetElementType(nativeObj)); + } + /*----------------------------------- native methods -----------------------------------*/ private static native String GetAnyName(long addr); private static native int[] GetShape(long addr); + private static native int GetElementType(long addr); + @Override protected native void delete(long nativeObj); } diff --git a/modules/java_api/src/main/java/org/intel/openvino/Tensor.java b/modules/java_api/src/main/java/org/intel/openvino/Tensor.java index 97a85b9b7..b3236e26a 100644 --- a/modules/java_api/src/main/java/org/intel/openvino/Tensor.java +++ b/modules/java_api/src/main/java/org/intel/openvino/Tensor.java @@ -22,6 +22,26 @@ public Tensor(int[] dims, float[] data) { super(TensorFloat(dims, data)); } + /** + * Constructs an Integer {@link Tensor} from the given int array. + * + * @param dims shape of the tensor + * @param data an integer array containing the tensor data + */ + public Tensor(int[] dims, int[] data) { + super(TensorInt(dims, data)); + } + + /** + * Constructs a Long {@link Tensor} from the given long array. + * + * @param dims shape of the tensor + * @param data a long array containing the tensor data + */ + public Tensor(int[] dims, long[] data) { + super(TensorLong(dims, data)); + } + /** * Returns the total number of elements (a product of all the dims or 1 for scalar) * @@ -41,15 +61,26 @@ public float[] data() { return asFloat(nativeObj); } + /** Returns the tensor data as an integer array. */ + public int[] as_int() { + return asInt(nativeObj); + } + /*----------------------------------- native methods -----------------------------------*/ private static native long TensorCArray(int type, int[] shape, long cArray); private static native long TensorFloat(int[] shape, float[] data); + private static native long TensorInt(int[] shape, int[] data); + + private static native long TensorLong(int[] shape, long[] data); + private static native int[] GetShape(long addr); private static native float[] asFloat(long addr); + private static native int[] asInt(long addr); + private static native int GetSize(long addr); @Override diff --git a/modules/java_api/src/test/java/org/intel/openvino/CompiledModelTests.java b/modules/java_api/src/test/java/org/intel/openvino/CompiledModelTests.java new file mode 100644 index 000000000..ce978ac4d --- /dev/null +++ b/modules/java_api/src/test/java/org/intel/openvino/CompiledModelTests.java @@ -0,0 +1,42 @@ +package org.intel.openvino; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +import org.junit.Before; +import org.junit.Test; + +import java.util.List; + +public class CompiledModelTests extends OVTest { + + private CompiledModel model; + + @Before + public void init() { + Core core = new Core(); + model = core.compile_model(modelXml, device); + } + + @Test + public void testInputs() { + List inputs = model.inputs(); + + assertEquals("data", inputs.get(0).get_any_name()); + assertEquals(ElementType.f32, inputs.get(0).get_element_type()); + + int[] shape = new int[] {1, 3, 32, 32}; + assertArrayEquals("Shape", shape, inputs.get(0).get_shape()); + } + + @Test + public void testOutputs() { + List outputs = model.outputs(); + + assertEquals("fc_out", outputs.get(0).get_any_name()); + assertEquals(ElementType.f32, outputs.get(0).get_element_type()); + + int[] shape = new int[] {1, 10}; + assertArrayEquals("Shape", shape, outputs.get(0).get_shape()); + } +} diff --git a/modules/java_api/src/test/java/org/intel/openvino/CoreTests.java b/modules/java_api/src/test/java/org/intel/openvino/CoreTests.java index e26f87211..63151b85e 100644 --- a/modules/java_api/src/test/java/org/intel/openvino/CoreTests.java +++ b/modules/java_api/src/test/java/org/intel/openvino/CoreTests.java @@ -5,6 +5,7 @@ import org.junit.Test; import java.util.HashMap; +import java.util.List; import java.util.Map; public class CoreTests extends OVTest { @@ -16,6 +17,26 @@ public void testReadModel() { assertTrue(!net.get_name().isEmpty()); } + @Test + public void testCompileModelFromFileDeviceAuto() { + CompiledModel model = core.compile_model(modelXml); + assertTrue(model instanceof CompiledModel); + } + + @Test + public void testCompileModelFromFile() { + CompiledModel model = core.compile_model(modelXml, device); + assertTrue(model instanceof CompiledModel); + } + + @Test + public void testCompileModelWithProps() { + Map properties = new HashMap<>(); + properties.put("INFERENCE_NUM_THREADS", "1"); + CompiledModel model = core.compile_model(modelXml, device, properties); + assertTrue(model instanceof CompiledModel); + } + @Test public void testReadNetworkXmlOnly() { Model net = core.read_model(modelXml); @@ -72,4 +93,10 @@ public void testProperty() { config.put("CPU_THROUGHPUT_STREAMS", "1"); core.set_property("CPU", config); // Restore } + + @Test + public void testAvailableDevices() { + List availableDevices = core.get_available_devices(); + assertNotNull(availableDevices); + } } diff --git a/modules/java_api/src/test/java/org/intel/openvino/ModelTests.java b/modules/java_api/src/test/java/org/intel/openvino/ModelTests.java index df3d628c5..2debe3e32 100644 --- a/modules/java_api/src/test/java/org/intel/openvino/ModelTests.java +++ b/modules/java_api/src/test/java/org/intel/openvino/ModelTests.java @@ -31,6 +31,12 @@ public void testOutputName() { assertEquals("Output name", "fc_out", output.get_any_name()); } + @Test + public void testOutputType() { + Output output = net.output(); + assertEquals("Output element type", ElementType.f32, output.get_element_type()); + } + @Test public void testGetShape() { ArrayList outputs = net.outputs(); diff --git a/modules/java_api/src/test/java/org/intel/openvino/OpenvinoTests.java b/modules/java_api/src/test/java/org/intel/openvino/OpenvinoTests.java new file mode 100644 index 000000000..b51483e0e --- /dev/null +++ b/modules/java_api/src/test/java/org/intel/openvino/OpenvinoTests.java @@ -0,0 +1,35 @@ +package org.intel.openvino; + +import static org.junit.Assert.assertTrue; + +import org.junit.Before; +import org.junit.Test; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; + +public class OpenvinoTests extends OVTest { + + private Model model; + + @Before + public void init() { + Core core = new Core(); + model = core.read_model(modelXml); + } + + @Test + public void testSerialize() throws IOException { + File tmp = Files.createTempDirectory("ovtest").toFile(); + File xmlPath = Paths.get(tmp.getAbsolutePath(), "saved_model.xml").toFile(); + File binPath = Paths.get(tmp.getAbsolutePath(), "saved_model.bin").toFile(); + xmlPath.deleteOnExit(); + binPath.deleteOnExit(); + tmp.deleteOnExit(); + + Openvino.serialize(model, xmlPath.getAbsolutePath(), binPath.getAbsolutePath()); + assertTrue(xmlPath.exists() && binPath.exists()); + } +} diff --git a/modules/java_api/src/test/java/org/intel/openvino/TensorTests.java b/modules/java_api/src/test/java/org/intel/openvino/TensorTests.java index c9448c4f1..45a0f3cd8 100644 --- a/modules/java_api/src/test/java/org/intel/openvino/TensorTests.java +++ b/modules/java_api/src/test/java/org/intel/openvino/TensorTests.java @@ -4,6 +4,8 @@ import org.junit.Test; +import java.util.Arrays; + public class TensorTests extends OVTest { int[] dimsArr = {1, 3, 2, 2}; float[] data = {0.0f, 1.1f, 2.2f, 3.3f, 4.4f, 5.5f, 6.6f, 7.7f, 8.8f, 9.9f, 1.1f, 2.2f}; @@ -15,4 +17,29 @@ public void testGetTensorFromFloat() { assertArrayEquals(tensor.get_shape(), dimsArr); assertArrayEquals(tensor.data(), data, 0.0f); } + + @Test + public void testGetTensorFromInt() { + int size = Arrays.stream(dimsArr).reduce((i, j) -> i * j).orElse(1); + int[] inputData = new int[size]; + Arrays.fill(inputData, 1); + + Tensor tensor = new Tensor(dimsArr, inputData); + + assertArrayEquals(dimsArr, tensor.get_shape()); + assertArrayEquals(inputData, tensor.as_int()); + assertEquals(size, tensor.get_size()); + } + + @Test + public void testGetTensorFromLong() { + int size = Arrays.stream(dimsArr).reduce((i, j) -> i * j).orElse(1); + long[] inputData = new long[size]; + Arrays.fill(inputData, 1L); + + Tensor tensor = new Tensor(dimsArr, inputData); + + assertArrayEquals(dimsArr, tensor.get_shape()); + assertEquals(size, tensor.get_size()); + } }