From 249dfa9b622e68bf35f57d01bfb8ff111a874c20 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Mon, 25 Sep 2023 21:13:32 +0800 Subject: [PATCH 01/40] Add and init modules.csharp_api project, provide methods for using the CSharp API --- modules/csharp_api/README.md | 68 ++ modules/csharp_api/README_cn.md | 73 ++ modules/csharp_api/csharp/CSharpAPI.csproj | 80 +++ modules/csharp_api/csharp/base.cs | 70 ++ .../csharp/build/OpenVINO.CSharp.win.targets | 10 + modules/csharp_api/csharp/common/common.cs | 173 +++++ .../csharp_api/csharp/common/element_type.cs | 441 ++++++++++++ modules/csharp_api/csharp/common/version.cs | 75 ++ .../csharp_api/csharp/core/compiled_model.cs | 358 ++++++++++ modules/csharp_api/csharp/core/core.cs | 325 +++++++++ modules/csharp_api/csharp/core/dimension.cs | 84 +++ .../csharp_api/csharp/core/infer_request.cs | 383 ++++++++++ modules/csharp_api/csharp/core/layout.cs | 94 +++ modules/csharp_api/csharp/core/model.cs | 514 ++++++++++++++ modules/csharp_api/csharp/core/node.cs | 148 ++++ modules/csharp_api/csharp/core/node_input.cs | 69 ++ modules/csharp_api/csharp/core/node_output.cs | 71 ++ .../csharp_api/csharp/core/partial_shape.cs | 279 ++++++++ .../csharp_api/csharp/core/remote_context.cs | 37 + modules/csharp_api/csharp/core/shape.cs | 127 ++++ modules/csharp_api/csharp/core/tensor.cs | 313 +++++++++ .../csharp_api/csharp/exception/exception.cs | 46 ++ .../csharp/exception/handle_exception.cs | 236 +++++++ modules/csharp_api/csharp/model/Yolov8.cs | 665 ++++++++++++++++++ .../csharp/native_methods/ov_base.cs | 16 + .../csharp/native_methods/ov_common.cs | 30 + .../native_methods/ov_compiled_model.cs | 202 ++++++ .../csharp/native_methods/ov_core.cs | 332 +++++++++ .../csharp/native_methods/ov_dimension.cs | 27 + .../csharp/native_methods/ov_infer_request.cs | 278 ++++++++ .../csharp/native_methods/ov_layout.cs | 41 ++ .../csharp/native_methods/ov_model.cs | 304 ++++++++ .../csharp/native_methods/ov_node.cs | 89 +++ .../csharp/native_methods/ov_partial_shape.cs | 117 +++ .../native_methods/ov_prepostprocess.cs | 462 ++++++++++++ .../csharp/native_methods/ov_rank.cs | 24 + .../csharp/native_methods/ov_shape.cs | 37 + .../csharp/native_methods/ov_tensor.cs | 121 ++++ modules/csharp_api/csharp/ov/ov.cs | 33 + modules/csharp_api/csharp/ov/ov_struct.cs | 170 +++++ modules/csharp_api/csharp/preprocess/OvMat.cs | 63 ++ .../csharp_api/csharp/preprocess/common.cs | 73 ++ .../csharp/preprocess/input_info.cs | 106 +++ .../csharp/preprocess/input_model_info.cs | 77 ++ .../csharp/preprocess/input_tensor_info.cs | 189 +++++ .../csharp/preprocess/output_info.cs | 74 ++ .../csharp/preprocess/output_tensor_info.cs | 72 ++ .../csharp/preprocess/prepost_processor.cs | 178 +++++ .../csharp/preprocess/preprocess_steps.cs | 238 +++++++ modules/csharp_api/demos/yolov8/Program.cs | 200 ++++++ .../yolov8/Properties/launchSettings.json | 11 + modules/csharp_api/demos/yolov8/README.md | 328 +++++++++ modules/csharp_api/demos/yolov8/README_cn.md | 332 +++++++++ modules/csharp_api/demos/yolov8/yolov8.csproj | 15 + modules/csharp_api/docs/cn/linux_install.md | 134 ++++ modules/csharp_api/docs/cn/windows_install.md | 32 + modules/csharp_api/docs/en/linux_install.md | 132 ++++ modules/csharp_api/docs/en/windows_install.md | 32 + 58 files changed, 9308 insertions(+) create mode 100644 modules/csharp_api/README.md create mode 100644 modules/csharp_api/README_cn.md create mode 100644 modules/csharp_api/csharp/CSharpAPI.csproj create mode 100644 modules/csharp_api/csharp/base.cs create mode 100644 modules/csharp_api/csharp/build/OpenVINO.CSharp.win.targets create mode 100644 modules/csharp_api/csharp/common/common.cs create mode 100644 modules/csharp_api/csharp/common/element_type.cs create mode 100644 modules/csharp_api/csharp/common/version.cs create mode 100644 modules/csharp_api/csharp/core/compiled_model.cs create mode 100644 modules/csharp_api/csharp/core/core.cs create mode 100644 modules/csharp_api/csharp/core/dimension.cs create mode 100644 modules/csharp_api/csharp/core/infer_request.cs create mode 100644 modules/csharp_api/csharp/core/layout.cs create mode 100644 modules/csharp_api/csharp/core/model.cs create mode 100644 modules/csharp_api/csharp/core/node.cs create mode 100644 modules/csharp_api/csharp/core/node_input.cs create mode 100644 modules/csharp_api/csharp/core/node_output.cs create mode 100644 modules/csharp_api/csharp/core/partial_shape.cs create mode 100644 modules/csharp_api/csharp/core/remote_context.cs create mode 100644 modules/csharp_api/csharp/core/shape.cs create mode 100644 modules/csharp_api/csharp/core/tensor.cs create mode 100644 modules/csharp_api/csharp/exception/exception.cs create mode 100644 modules/csharp_api/csharp/exception/handle_exception.cs create mode 100644 modules/csharp_api/csharp/model/Yolov8.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_base.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_common.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_compiled_model.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_core.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_dimension.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_infer_request.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_layout.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_model.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_node.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_partial_shape.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_prepostprocess.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_rank.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_shape.cs create mode 100644 modules/csharp_api/csharp/native_methods/ov_tensor.cs create mode 100644 modules/csharp_api/csharp/ov/ov.cs create mode 100644 modules/csharp_api/csharp/ov/ov_struct.cs create mode 100644 modules/csharp_api/csharp/preprocess/OvMat.cs create mode 100644 modules/csharp_api/csharp/preprocess/common.cs create mode 100644 modules/csharp_api/csharp/preprocess/input_info.cs create mode 100644 modules/csharp_api/csharp/preprocess/input_model_info.cs create mode 100644 modules/csharp_api/csharp/preprocess/input_tensor_info.cs create mode 100644 modules/csharp_api/csharp/preprocess/output_info.cs create mode 100644 modules/csharp_api/csharp/preprocess/output_tensor_info.cs create mode 100644 modules/csharp_api/csharp/preprocess/prepost_processor.cs create mode 100644 modules/csharp_api/csharp/preprocess/preprocess_steps.cs create mode 100644 modules/csharp_api/demos/yolov8/Program.cs create mode 100644 modules/csharp_api/demos/yolov8/Properties/launchSettings.json create mode 100644 modules/csharp_api/demos/yolov8/README.md create mode 100644 modules/csharp_api/demos/yolov8/README_cn.md create mode 100644 modules/csharp_api/demos/yolov8/yolov8.csproj create mode 100644 modules/csharp_api/docs/cn/linux_install.md create mode 100644 modules/csharp_api/docs/cn/windows_install.md create mode 100644 modules/csharp_api/docs/en/linux_install.md create mode 100644 modules/csharp_api/docs/en/windows_install.md diff --git a/modules/csharp_api/README.md b/modules/csharp_api/README.md new file mode 100644 index 000000000..0a068cf7e --- /dev/null +++ b/modules/csharp_api/README.md @@ -0,0 +1,68 @@ +# OpenVINO™ C# API + + + +[简体中文](README_cn.md) | English + +## 📚 What is OpenVINO™ C# API ? + +[OpenVINO™](www.openvino.ai) is an open-source toolkit for optimizing and deploying AI inference. + +- Boost deep learning performance in computer vision, automatic speech recognition, natural language processing and other common tasks +- Use models trained with popular frameworks like TensorFlow, PyTorch and more +- Reduce resource demands and efficiently deploy on a range of Intel® platforms from edge to cloud + +  This project is mainly based on OpenVINO ™ OpenVINO launched by tool kit ™ C # API, aimed at driving OpenVINO ™ Application on the C # platform. + +  OpenVINO ™ The C # API is based on OpenVINO ™ C API development, supported platforms and OpenVINO ™ Consistent, please refer to OpenVINO for specific information ™。 + +## NuGet Package + +  C # supports NuGet Package installation and one-stop installation on platforms such as Linux and Window. Therefore, in order to facilitate more users, a NuGet Package for use on the Window platform has been released for the convenience of everyone. + +| Package | Description | Link | +| ----------------------- | ------------------------------------------------------------ | ------------------------------------------------------------ | +| **OpenVINO.CSharp.win** | OpenVINO™ C# API core libraries,comes with a complete OpenVINO 2023.0 dependency library | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.win.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.win/) | + +## ⚙ How to install OpenVINO™ C# API? + +The following article provides installation methods for OpenVINO™ C# API on different platforms, which can be installed according to your own platform. + +- [Windows](docs/en/windows_install.md) + +- [Linux](docs/en/linux_install.md) + +## 🏷How to use OpenVINO™ C# API? + +- **Quick start** + - [Deploying the Yolov8 full series model using OpenVINO™ C# API](demos/yolov8/README.md) + +- **Simple usage** + +If you don't know how to use it, simply understand the usage method through the following code. + +```c# +namespace test +{ + internal class Program + { + static void Main(string[] args) + { + Core core = new Core(); + Model model = core.read_model("./model.xml"); + CompiledModel compiled_model = core.compiled_model(model, "AUTO"); + InferRequest infer_request = compiled_model.create_infer_request(); + Tensor input_tensor = infer_request.get_tensor("images"); + infer_request.infer(); + Tensor output_tensor = infer_request.get_tensor("output0"); + core.free(); + } + } +} +``` + +The classes and objects encapsulated in the project, such as Core, Model, Tensor, etc., are implemented by calling the C API interface and have unmanaged resources. They need to be handled by calling the **dispose() ** method, otherwise memory leakage may occur. + +## 🗂 API Reference + +If you want to learn more information, you can refer to: [OpenVINO™ C# API API Documented](https://guojin-yan.github.io/OpenVINO-CSharp-API.docs/) diff --git a/modules/csharp_api/README_cn.md b/modules/csharp_api/README_cn.md new file mode 100644 index 000000000..67a77952a --- /dev/null +++ b/modules/csharp_api/README_cn.md @@ -0,0 +1,73 @@ +# OpenVINO™ C# API + + + + +简体中文| [English](README.md) + +## 📚 简介 + +[OpenVINO™ ](www.openvino.ai)是一个用于优化和部署 AI 推理的开源工具包。 + +- 提升深度学习在计算机视觉、自动语音识别、自然语言处理和其他常见任务中的性能 +- 使用流行框架(如TensorFlow,PyTorch等)训练的模型 +- 减少资源需求,并在从边缘到云的一系列英特尔®平台上高效部署 + +  该项目主要是基于OpenVINO™工具套件推出的 OpenVINO™ C# API,旨在推动 OpenVINO™ 在C#平台的应用。 + +  OpenVINO™ C# API 由于是基于 OpenVINO™ C API 开发,所支持的平台与OpenVINO™ 一致,具体信息可以参考 OpenVINO™。 + +## NuGet Package + +C# 支持 NuGet Package 方式安装程序包,在Linux、Window 等平台支持一站式安装使用,因此为了方便更多用户使用,目前发行了 Window 平台下使用的 NuGet Package ,方便大家使用。 + +| Package | Description | Link | +| ----------------------- | ------------------------------------------------------------ | ------------------------------------------------------------ | +| **OpenVINO.CSharp.win** | OpenVINO™ C# API core libraries,附带完整的OpenVINO 2023.1依赖库 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.win.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.win/) | + +## ⚙ 如何安装 + +以下文章提供了OpenVINO™ C# API在不同平台的安装方法,可以根据自己使用平台进行安装。 + +- [Windows](docs/cn/windows_install.md) + +- [Linux](docs/cn/linux_install.md) + +## 🏷开始使用 + +- **快速体验** + + [使用OpenVINO™ C# API部署Yolov8全系列模型](demos/yolov8/README_cn.md) + +- **使用方法** + +如果你不知道如何使用,通过下面代码简单了解使用方法。 + +```c# +namespace test +{ + internal class Program + { + static void Main(string[] args) + { + Core core = new Core(); // 初始化 Core 核心 + Model model = core.read_model("./model.xml"); // 读取模型文件 + CompiledModel compiled_model = core.compiled_model(model, "AUTO"); // 将模型加载到设备 + InferRequest infer_request = compiled_model.create_infer_request(); // 创建推理通道 + Tensor input_tensor = infer_request.get_tensor("images"); // 获取输入节点Tensor + infer_request.infer(); // 模型推理 + Tensor output_tensor = infer_request.get_tensor("output0"); // 获取输出节点Tensor + core.free(); // 清理 Core 非托管内存 + } + } +} +``` + +项目中所封装的类、对象例如Core、Model、Tensor等,通过调用 C api 接口实现,具有非托管资源,需要调用**dispose()**方法处理,否则就会出现内存泄漏。 + +## 🗂 API 文档 + +如果想了解更多信息,可以参阅:[OpenVINO™ C# API API Documented](https://guojin-yan.github.io/OpenVINO-CSharp-API.docs/) + + + diff --git a/modules/csharp_api/csharp/CSharpAPI.csproj b/modules/csharp_api/csharp/CSharpAPI.csproj new file mode 100644 index 000000000..ab608ca1e --- /dev/null +++ b/modules/csharp_api/csharp/CSharpAPI.csproj @@ -0,0 +1,80 @@ + + + + + net6.0;net48 + True + True + OpenVINO.CSharp.win + + OpenVINO C# API + 3.0.122-test-8 + Guojin Yan + Guojin Yan + OpenVINO C# API + 基于C#平台调用OpenVINO套件部署深度学习模型。 +Based on the C # platform, call the OpenVINO suite to deploy a deep learning model. +目前版本为测试版本,会存在相关的问题,待后续更新会修改相应的错误;如有其他问题请联系作者解决。 + https://github.com/guojin-yan/OpenVINO-CSharp-API + https://github.com/guojin-yan/OpenVINO-CSharp-API + git + ../../nuget + zh + NuGet.png + README.md + 该版本为OpenVINO™ C# API 3.0 预发行版本,功能还未完善,如使用中有问题,欢迎与我沟通联系。 +This version is a pre release version of OpenVINO™ C# API 3.0 and its features are not yet fully developed. If there are any issues during use, please feel free to contact me. + OpenVinoSharp + OpenVINOCSharp + + + + + + + true + build\openvino2023.0\%(Filename)%(Extension) + + + True + \ + + + True + \ + + + + + + + + + + + + + + + + + + + + + + true + build\ + + + + + + + + + $(DefineConstants);DOTNET_FRAMEWORK; + + + + diff --git a/modules/csharp_api/csharp/base.cs b/modules/csharp_api/csharp/base.cs new file mode 100644 index 000000000..b7f35c2ce --- /dev/null +++ b/modules/csharp_api/csharp/base.cs @@ -0,0 +1,70 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + + +namespace OpenVinoSharp +{ + + /// + /// OpenVINO wrapper for .NET. + /// This is the basic namespace of OpenVINO in Cshrp, + /// and all classes and methods are within this method. + /// OpenVinoSharp. + /// + [System.Runtime.CompilerServices.CompilerGeneratedAttribute()] + class NamespaceDoc + { + } + + namespace element { + /// + /// OpenVINO wrapper for .NET. + /// Define elements in OpenVINO. + /// OpenVinoSharp.element. + /// + [System.Runtime.CompilerServices.CompilerGeneratedAttribute()] + class NamespaceDoc + { + } + } + + + namespace preprocess { + /// + /// Mainly defined the data processing methods in OpenVINO. + /// OpenVinoSharp.preprocess. + /// + [System.Runtime.CompilerServices.CompilerGeneratedAttribute()] + class NamespaceDoc + { + } + } + + + namespace model + { + /// + /// Processing methods for main common models. + /// OpenVinoSharp.model. + /// + [System.Runtime.CompilerServices.CompilerGeneratedAttribute()] + class NamespaceDoc + { + } + namespace Yolov8 { + /// + /// The processing methods of the main Yolov8 model. + /// OpenVinoSharp.model.Yolov8. + /// + [System.Runtime.CompilerServices.CompilerGeneratedAttribute()] + class NamespaceDoc + { + } + } + } +} + + diff --git a/modules/csharp_api/csharp/build/OpenVINO.CSharp.win.targets b/modules/csharp_api/csharp/build/OpenVINO.CSharp.win.targets new file mode 100644 index 000000000..33800aebc --- /dev/null +++ b/modules/csharp_api/csharp/build/OpenVINO.CSharp.win.targets @@ -0,0 +1,10 @@ + + + + + %(RecursiverDir)openvino2023.0/%(Filename)%(Extension) + PreserveNewest + + + + \ No newline at end of file diff --git a/modules/csharp_api/csharp/common/common.cs b/modules/csharp_api/csharp/common/common.cs new file mode 100644 index 000000000..86b426de3 --- /dev/null +++ b/modules/csharp_api/csharp/common/common.cs @@ -0,0 +1,173 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + /// + /// This enum contains codes for all possible return values of the interface functions + /// + public enum ExceptionStatus : int + { + /// + /// SUCCESS! + /// + OK = 0, + // map exception to C++ interface + /// + /// GENERAL_ERROR + /// + GENERAL_ERROR = -1, + /// + /// NOT_IMPLEMENTED + /// + NOT_IMPLEMENTED = -2, + /// + /// NETWORK_NOT_LOADED + /// + NETWORK_NOT_LOADED = -3, + /// + /// PARAMETER_MISMATCH + /// + PARAMETER_MISMATCH = -4, + /// + /// NOT_FOUND + /// + NOT_FOUND = -5, + /// + /// OUT_OF_BOUNDS + /// + OUT_OF_BOUNDS = -6, + + // exception not of std::exception derived type was thrown + /// + /// UNEXPECTED + /// + UNEXPECTED = -7, + /// + /// REQUEST_BUSY + /// + REQUEST_BUSY = -8, + /// + /// RESULT_NOT_READY + /// + RESULT_NOT_READY = -9, + /// + /// NOT_ALLOCATED + /// + NOT_ALLOCATED = -10, + /// + /// INFER_NOT_STARTED + /// + INFER_NOT_STARTED = -11, + /// + /// NETWORK_NOT_READ + /// + NETWORK_NOT_READ = -12, + /// + /// INFER_CANCELLED + /// + INFER_CANCELLED = -13, + + // exception in C wrapper + + /// + /// INVALID_C_PARAM + /// + INVALID_C_PARAM = -14, + /// + /// UNKNOWN_C_ERROR + /// + UNKNOWN_C_ERROR = -15, + /// + /// NOT_IMPLEMENT_C_METHOD + /// + NOT_IMPLEMENT_C_METHOD = -16, + /// + /// UNKNOW_EXCEPTION + /// + UNKNOW_EXCEPTION = -17, + } + + /// + /// This enum contains codes for element type. + /// + public enum ElementType : uint + { + /// + /// Undefined element type + /// + UNDEFINED = 0U, + /// + /// Dynamic element type + /// + DYNAMIC, + /// + /// boolean element type + /// + BOOLEAN, + /// + /// bf16 element type + /// + BF16, + /// + /// f16 element type + /// + F16, + /// + /// f32 element type + /// + F32, + /// + /// f64 element type + /// + F64, + /// + /// i4 element type + /// + I4, + /// + /// i8 element type + /// + I8, + /// + /// i16 element type + /// + I16, + /// + /// i32 element type + /// + I32, + /// + /// i64 element type + /// + I64, + /// + /// binary element type + /// + U1, + /// + /// u4 element type + /// + U4, + /// + /// u8 element type + /// + U8, + /// + /// u16 element type + /// + U16, + /// + /// u32 element type + /// + U32, + /// + /// u64 element type + /// + U64, + }; + +} diff --git a/modules/csharp_api/csharp/common/element_type.cs b/modules/csharp_api/csharp/common/element_type.cs new file mode 100644 index 000000000..dc20b464c --- /dev/null +++ b/modules/csharp_api/csharp/common/element_type.cs @@ -0,0 +1,441 @@ +using System; +using System.Collections.Generic; +using System.Data; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using System.Xml.Linq; + +namespace OpenVinoSharp +{ + /// + /// The class of data type, mainly used for model data types. + /// + /// + /// OvType inherits from element. Type + /// + public class OvType : element.Type + { + /// + /// OvType constructor, initializing element. Type + /// + /// ElementType data + public OvType(ElementType t) : base(t) { } + /// + /// OvType copy constructor, initializing element. Type + /// + /// OvType data + public OvType(OvType t) : base(t.m_type) { } + /// + /// OvType constructor, initializing element.Type through data type string + /// + /// data type string + public OvType(string type) : base (type) { } + + }; + namespace element + { + + /// + /// Enum to define possible element types + /// ov_element_c#_api + /// + public enum Type_t + { + /// + /// Undefined element type + /// + undefined, + /// + /// Dynamic element type + /// + dynamic, + /// + /// boolean element type + /// + boolean, + /// + /// bf16 element type + /// + bf16, + /// + /// f16 element type + /// + f16, + /// + /// f32 element type + /// + f32, + /// + /// f64 element type + /// + f64, + /// + /// i4 element type + /// + i4, + /// + /// i8 element type + /// + i8, + /// + /// i16 element type + /// + i16, + /// + /// i32 element type + /// + i32, + /// + /// i64 element type + /// + i64, + /// + /// binary element type + /// + u1, + /// + /// u4 element type + /// + u4, + /// + /// u8 element type + /// + u8, + /// + /// u16 element type + /// + u16, + /// + /// u32 element type + /// + u32, + /// + /// u64 element type + /// + u64 + }; + + /// + /// [struct] Type information storage struct. + /// + struct TypeInfo + { + /// + /// data length. + /// + public ulong m_bitwidth; + /// + /// real number flag + /// + public bool m_is_real; + /// + /// signed number flag + /// + public bool m_is_signed; + /// + /// quantize number flag + /// + public bool m_is_quantized; + /// + /// type name full name string + /// + public string m_cname; + /// + /// type name abbreviation string + /// + public string m_type_name; + /// + /// Structure constructor + /// + /// data length. + /// real number flag + /// signed number flag + /// quantize number flag + /// type name full name string + /// type name abbreviation string + public TypeInfo(ulong bitwidth, bool is_real, bool is_signed, bool is_quantized, string cname, string type_name) + { + m_bitwidth = bitwidth; + m_is_real = is_real; + m_is_signed = is_signed; + m_is_quantized = is_quantized; + m_cname = cname; + m_type_name = type_name; + } + } + /// + /// Base class to define element type + /// ov_element_c#_api + /// + public class Type { + /// + /// data type, defined based on Type_t. + /// + protected Type_t m_type = Type_t.undefined ; + /// + /// OvType constructor, by Type_t initialize the Type class + /// + /// Type_t data + public Type(Type_t t) { m_type = t; } + /// + /// OvType constructor, by ElementType initialize the Type class + /// + /// ElementType data + public Type(ElementType t) { m_type = (Type_t)t; } + /// + /// OvType copy constructor, by Type initialize the Type class + /// + /// Type data + public Type(Type t) { + m_type = t.m_type; + } + /// + /// OvType constructor, initializing element.Type through data type string + /// + /// data type string + public Type(string type) { + new Type(type_from_string(type)); + } + /// + /// Get data type. + /// + /// ElementType type + public ElementType get_type() { + return (ElementType)m_type; + } + /// + /// Get type full name string. + /// + /// full name string + public string c_type_string() + { + return get_type_info(m_type).m_cname; + } + /// + /// Get data type length. + /// + /// type length + public ulong size() + { + return (bitwidth() + 7) >> 3; + } + /// + /// Get type number. + /// + /// type number + public ulong hash() + { + return (ulong)(m_type); + } + /// + /// Get abbreviated name. + /// + /// abbreviated name + public string get_type_name() + { + return to_string(); + } + /// + /// Determine whether it is a real number + /// + /// true: is real; false: not real + public bool is_integral() + { + return !is_real(); + } + /// + /// Convert data type to string + /// + /// data type string + public string to_string() + { + return get_type_info(m_type).m_type_name; + } + /// + /// Determine whether the current data type is static. + /// + /// true : is static; false : not static + public bool is_static() + { + return get_type_info(m_type).m_bitwidth != 0; + } + /// + /// Determine whether the current data type is real. + /// + /// true : is real; false : not real + public bool is_real() + { + return get_type_info(m_type).m_is_real; + } + /// + /// Determine whether the current data type is integral number. + /// + /// true : is integral number; false : not integral number + public bool is_integral_number() + { + return is_integral() && (m_type != Type_t.boolean); + } + /// + /// Determine whether the current data type is signed. + /// + /// true : is signed; false : not signed + public bool is_signed() + { + return get_type_info(m_type).m_is_signed; + } + /// + /// Determine whether the current data is of quantum type + /// + /// true : is quantized; false : not quantized + public bool is_quantized() + { + return get_type_info(m_type).m_is_quantized; + } + /// + /// Obtain the size of the current data type + /// + /// the size of the current data type + public ulong bitwidth() + { + return get_type_info(m_type).m_bitwidth; + } + + /// + /// Get the current type of the Type_ Info + /// + /// Type_t + /// TypeInfo data + TypeInfo get_type_info(element.Type_t type) + { + switch (type) + { + case element.Type_t.undefined: + return new TypeInfo(10000, false, false, false, "undefined", "undefined"); + case element.Type_t.dynamic: + return new TypeInfo(0, false, false, false, "dynamic", "dynamic"); + case element.Type_t.boolean: + return new TypeInfo(8, false, true, false, "char", "boolean"); + case element.Type_t.bf16: + return new TypeInfo(16, true, true, false, "bfloat16", "bf16"); + case element.Type_t.f16: + return new TypeInfo(16, true, true, false, "float16", "f16"); + case element.Type_t.f32: + return new TypeInfo(32, true, true, false, "float", "f32"); + case element.Type_t.f64: + return new TypeInfo(64, true, true, false, "double", "f64"); + case element.Type_t.i4: + return new TypeInfo(4, false, true, true, "int4_t", "i4"); + case element.Type_t.i8: + return new TypeInfo(8, false, true, true, "int8_t", "i8"); + case element.Type_t.i16: + return new TypeInfo(16, false, true, false, "int16_t", "i16"); + case element.Type_t.i32: + return new TypeInfo(32, false, true, true, "int32_t", "i32"); + case element.Type_t.i64: + return new TypeInfo(64, false, true, false, "int64_t", "i64"); + case element.Type_t.u1: + return new TypeInfo(1, false, false, false, "uint1_t", "u1"); + case element.Type_t.u4: + return new TypeInfo(4, false, false, false, "uint4_t", "u4"); + case element.Type_t.u8: + return new TypeInfo(8, false, false, true, "uint8_t", "u8"); + case element.Type_t.u16: + return new TypeInfo(16, false, false, false, "uint16_t", "u16"); + case element.Type_t.u32: + return new TypeInfo(32, false, false, false, "uint32_t", "u32"); + case element.Type_t.u64: + return new TypeInfo(64, false, false, false, "uint64_t", "u64"); + default: + return new TypeInfo(100000, false, false, false, "default", "default"); + } + } + /// + /// Convert type string to Type class + /// + /// type string + /// Type class + Type type_from_string(string type) + { + if (type == "f16" || type == "FP16") + { + return new Type(Type_t.f16); + } + else if (type == "f32" || type == "FP32") + { + return new Type(Type_t.f32); + } + else if (type == "bf16" || type == "BF16") + { + return new Type(Type_t.bf16); + } + else if (type == "f64" || type == "FP64") + { + return new Type(Type_t.f64); + } + else if (type == "i4" || type == "I4") + { + return new Type(Type_t.i4); + } + else if (type == "i8" || type == "I8") + { + return new Type(Type_t.i8); + } + else if (type == "i16" || type == "I16") + { + return new Type(Type_t.i16); + } + else if (type == "i32" || type == "I32") + { + return new Type(Type_t.i32); + } + else if (type == "i64" || type == "I64") + { + return new Type(Type_t.i64); + } + else if (type == "u1" || type == "U1" || type == "BIN" || type == "bin") + { + return new Type(Type_t.u1); + } + else if (type == "u4" || type == "U4") + { + return new Type(Type_t.u4); + } + else if (type == "u8" || type == "U8") + { + return new Type(Type_t.u8); + } + else if (type == "u16" || type == "U16") + { + return new Type(Type_t.u16); + } + else if (type == "u32" || type == "U32") + { + return new Type(Type_t.u32); + } + else if (type == "u64" || type == "U64") + { + return new Type(Type_t.u64); + } + else if (type == "boolean" || type == "BOOL") + { + return new Type(Type_t.boolean); + } + else if (type == "undefined" || type == "UNSPECIFIED") + { + return new Type(Type_t.undefined); + } + else if (type == "dynamic") + { + return new Type(Type_t.dynamic); + } + else + { + return new Type(Type_t.undefined); + } + } + }; + + + } +} diff --git a/modules/csharp_api/csharp/common/version.cs b/modules/csharp_api/csharp/common/version.cs new file mode 100644 index 000000000..0fafe4f0d --- /dev/null +++ b/modules/csharp_api/csharp/common/version.cs @@ -0,0 +1,75 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + /// + /// [struct] Represents version information that describes plugins and the OpemVINO library + /// + /// ov_runtime_c#_api + public struct Version + { + /// + /// A null terminated string with build number + /// + public string buildNumber; + /// + /// A null terminated description string + /// + public string description; + + public Version(string buildNumber, string description) { + this.buildNumber = buildNumber; + this.description = description; + } + + /// + /// Convert Version to output string + /// + /// Output string + public string to_string() + { + string str = ""; + str += description; + str += "\r\n Version : "; + str += buildNumber.Substring(0, buildNumber.IndexOf("-")); + str += "\r\n Build : "; + str += buildNumber; + return str; + } + } + /// + /// [struct] Represents version information that describes device and ov runtime library + /// + public struct CoreVersion + { + /// + /// A device name + /// + public string device_name; + public Version version; + } + /// + /// [struct] Represents version information that describes all devices and ov runtime library + /// + public struct CoreVersionList + { + /// + /// An array of device versions + /// + public IntPtr core_version; + /// + /// A number of versions in the array + /// + public ulong size; + } + + + + + +} diff --git a/modules/csharp_api/csharp/core/compiled_model.cs b/modules/csharp_api/csharp/core/compiled_model.cs new file mode 100644 index 000000000..917457782 --- /dev/null +++ b/modules/csharp_api/csharp/core/compiled_model.cs @@ -0,0 +1,358 @@ +using OpenCvSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + /// + /// This class represents a compiled model. + /// + /// ov_runtime_c#_api + /// + /// A model is compiled by a specific device by applying multiple optimization + /// transformations, then mapping to compute kernels. + /// + public class CompiledModel + { + /// + /// [private]CompiledModel class pointer. + /// + private IntPtr m_ptr; + /// + /// [private]CompiledModel class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + /// + /// Constructs CompiledModel from the initialized ptr. + /// + /// + public CompiledModel(IntPtr ptr) + { + this.m_ptr = ptr; + } + /// + /// CompiledModel()'s destructor + /// + ~CompiledModel() + { + dispose(); + } + /// + /// Release unmanaged resources + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_core_free(m_ptr); + + m_ptr = IntPtr.Zero; + } + /// + /// Creates an inference request object used to infer the compiled model. + /// The created request has allocated input and output tensors (which can be changed later). + /// + /// InferRequest object + public InferRequest create_infer_request() + { + IntPtr infer_request_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_compiled_model_create_infer_request(m_ptr, ref infer_request_ptr)); + return new InferRequest(infer_request_ptr); + } + + /// + /// Get a const single input port of compiled_model, which only support single input compiled_model. + /// + /// The input port of compiled_model. + public Node get_input() + { + IntPtr port_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_compiled_model_input(m_ptr, ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_const); + } + + /// + /// Get a const input port of compiled_model by name. + /// + /// input tensor name (string). + /// The input port of compiled_model. + public Node get_input(string tensor_name) + { + IntPtr port_ptr = IntPtr.Zero; + sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(tensor_name)); + HandleException.handler( + NativeMethods.ov_compiled_model_input_by_name(m_ptr, ref c_tensor_name[0], ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_const); + } + + /// + /// Get a const input port of compiled_model by port index. + /// + /// input tensor index. + /// The input port of compiled_model. + public Node get_input(ulong index) + { + IntPtr port_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_compiled_model_input_by_index(m_ptr, index, ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_const); + } + + /// + /// Get a const single output port of compiled_model, which only support single output model. + /// + /// The output port of compiled_model. + public Node get_output() + { + IntPtr port_ptr = IntPtr.Zero; + HandleException.handler(NativeMethods.ov_compiled_model_output(m_ptr, ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_const); + } + /// + /// Get a const output port of compiled_model by name. + /// + /// output tensor name (string). + /// The output port of compiled_model. + public Node get_output(string tensor_name) + { + IntPtr port_ptr = IntPtr.Zero; + sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(tensor_name)); + HandleException.handler( + NativeMethods.ov_compiled_model_output_by_name(m_ptr, ref c_tensor_name[0], ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_const); + } + /// + /// Get a const output port of compiled_model by port index. + /// + /// input tensor index. + /// The output port of compiled_model. + public Node get_output(ulong index) + { + IntPtr port_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_compiled_model_output_by_index(m_ptr, index, ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_const); + } + /// + /// Get the input size of compiled_model. + /// + /// The input size of compiled_model. + public ulong get_inputs_size() + { + ulong input_size = 0; + HandleException.handler( + NativeMethods.ov_compiled_model_inputs_size(m_ptr, ref input_size)); + return input_size; + } + /// + /// Get the output size of compiled_model. + /// + /// The output size. + public ulong get_outputs_size() + { + ulong output_size = 0; + HandleException.handler( + NativeMethods.ov_compiled_model_outputs_size(m_ptr, ref output_size)); + return output_size; + } + + /// + /// Gets a single input of a compiled model. + /// + /// + /// The input is represented as an output of the ov::op::v0::Parameter operation. + /// The input contains information about input tensor such as tensor shape, names, and element type. + /// + /// Compiled model input. + /// If a model has more than one input, this method throws ov::Exception. + public Input input() + { + Node node = get_input(); + return new Input(node, 0); + } + /// + /// Gets input of a compiled model identified by @p index. + /// + /// The input contains information about input tensor such as tensor shape, names, and element type. + /// Index of input. + /// Compiled model input. + /// The method throws ov::Exception if input with the specified index @p i is not found. + public Input input(ulong index) + { + Node node = get_input(index); + return new Input(node, index); + } + /// + /// Gets input of a compiled model identified by @p tensor_name. + /// + /// The input contains information about input tensor such as tensor shape, names, and element type. + /// Output tensor name. + /// Compiled model input. + /// The method throws ov::Exception if input with the specified tensor name @p tensor_name is not found. + public Input input(string tensor_name) + { + Node node = get_input(tensor_name); + return new Input(node, 0); + } + + /// + /// Gets a single output of a compiled model. + /// + /// + /// The output is represented as an output from the ov::op::v0::Result operation. + /// The output contains information about output tensor such as tensor shape, names, and element type. + /// + /// Compiled model output. + /// If a model has more than one output, this method throws ov::Exception. + public Output output() + { + Node node = get_output(); + return new Output(node, 0); + } + /// + /// Gets output of a compiled model identified by @p index. + /// + /// The output contains information about output tensor such as tensor shape, names, and element type. + /// Index of output. + /// Compiled model output. + /// The method throws ov::Exception if output with the specified index @p index is not found. + public Output output(ulong index) + { + Node node = get_output(index); + return new Output(node, index); + } + /// + /// Gets output of a compiled model identified by @p tensor_name. + /// + /// The output contains information about output tensor such as tensor shape, names, and element type. + /// Output tensor name. + /// Compiled model output. + /// The method throws ov::Exception if output with the specified tensor name @p tensor_name is not found. + public Output output(string tensor_name) + { + Node node = get_output(tensor_name); + return new Output(node, 0); + } + + /// + /// Gets all inputs of a compiled model. + /// + /// + /// Inputs are represented as a vector of outputs of the ov::op::v0::Parameter operations. + /// They contain information about input tensors such as tensor shape, names, and element type. + /// + /// List of model inputs. + public List inputs() + { + ulong input_size = get_inputs_size(); + List inputs = new List(); + for (ulong index = 0; index < input_size; ++index) + { + inputs.Add(input(index)); + } + return inputs; + } + + /// + /// Get all outputs of a compiled model. + /// + /// + /// Outputs are represented as a vector of output from the ov::op::v0::Result operations. + /// Outputs contain information about output tensors such as tensor shape, names, and element type. + /// + /// List of model outputs. + public List outputs() + { + ulong output_size = get_outputs_size(); + List outputs = new List(); + for (ulong index = 0; index < output_size; ++index) + { + outputs.Add(output(index)); + } + return outputs; + } + /// + /// Gets runtime model information from a device. + /// + /// + /// This object represents an internal device-specific model that is optimized for a particular + /// accelerator. It contains device-specific nodes, runtime information and can be used only + /// to understand how the source model is optimized and which kernels, element types, and layouts + /// are selected for optimal inference. + /// + /// + public Model get_runtime_model() + { + IntPtr model_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_compiled_model_get_runtime_model(m_ptr, ref model_ptr)); + return new Model(model_ptr); + } + + /// + /// Exports the current compiled model to an output model_path. + /// The exported model can also be imported via the ov::Core::import_model method. + /// + /// Output path to store the model to. + public void export_model(string model_path) + { + sbyte[] c_model_path = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(model_path)); + HandleException.handler( + NativeMethods.ov_compiled_model_export_model(m_ptr, ref c_model_path[0])); + } + + /// + /// Sets properties for the current compiled model. + /// + /// Map of pairs: (property name, property value). + public void set_property(KeyValuePair properties) + { + IntPtr property_key = Marshal.StringToHGlobalAnsi(properties.Key); + IntPtr property_value = Marshal.StringToHGlobalAnsi(properties.Value); + HandleException.handler( + NativeMethods.ov_compiled_model_set_property(m_ptr, property_key, property_value)); + } + /// + /// Gets properties for current compiled model + /// + /// + /// The method is responsible for extracting information that affects compiled model inference. + /// The list of supported configuration values can be extracted via CompiledModel::get_property + /// with the ov::supported_properties key, but some of these keys cannot be changed dynamically, + /// for example, ov::device::id cannot be changed if a compiled model has already been compiled + /// for a particular device. + /// + /// Property key, can be found in openvino/runtime/properties.hpp. + /// Property value. + public string get_property(string property_key) + { + sbyte[] c_property_key = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(property_key)); + IntPtr property_value_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_compiled_model_get_property(m_ptr, ref c_property_key[0], + ref property_value_ptr)); + return Marshal.PtrToStringAnsi(property_value_ptr); + } + + /// + /// Returns pointer to device-specific shared context on a remote accelerator device that was used + /// to create this CompiledModel. + /// + /// A context. + public RemoteContext get_context() { + IntPtr context_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_compiled_model_get_context(m_ptr, ref context_ptr)); + return new RemoteContext(context_ptr); + } + } +} diff --git a/modules/csharp_api/csharp/core/core.cs b/modules/csharp_api/csharp/core/core.cs new file mode 100644 index 000000000..1ac5501cb --- /dev/null +++ b/modules/csharp_api/csharp/core/core.cs @@ -0,0 +1,325 @@ +using System; +using System.Collections.Generic; +using System.Runtime.InteropServices; + + +namespace OpenVinoSharp +{ + + /// + /// This class represents an OpenVINO runtime Core entity. + /// ov_runtime_c#_api + /// + /// User applications can create several Core class instances, but in this case the underlying plugins + /// are created multiple times and not shared between several Core instances.The recommended way is to have + /// a single Core instance per application. + /// + public class Core + { + /// + /// [private]Core class pointer. + /// + private IntPtr m_ptr = IntPtr.Zero; + /// + /// [public]Core class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + /// + /// Represent all available devices. + /// + struct ov_available_devices_t + { + /// + /// devices' name + /// + public IntPtr devices; + /// + /// devices' number + /// + public ulong size; + } + + /// + /// Constructs an OpenVINO Core instance with devices and their plugins description. + /// There are two ways how to configure device plugins: + /// 1. (default) Use XML configuration file in case of dynamic libraries build; + /// 2. Use strictly defined configuration in case of static libraries build. + /// + /// + /// Path to the .xml file with plugins to load from. If the XML configuration file is not + /// specified, default OpenVINO Runtime plugins are loaded from: + /// 1. (dynamic build) default `plugins.xml` file located in the same folder as OpenVINO runtime shared library; + /// 2. (static build) statically defined configuration.In this case path to the.xml file is ignored. + /// + public Core(string xml_config_file = null) + { + if (!String.IsNullOrEmpty(xml_config_file)) + { + HandleException.handler( + NativeMethods.ov_core_create_with_config(xml_config_file, ref m_ptr)); + } + else + { + HandleException.handler( + NativeMethods.ov_core_create(ref m_ptr)); + } + + + } + /// + /// Core's destructor + /// + ~Core() { dispose(); } + /// + /// Release unmanaged resources + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_core_free(m_ptr); + + m_ptr = IntPtr.Zero; + } + /// + /// Returns device plugins version information. + /// + /// Device name to identify a plugin. + /// A vector of versions. + /// + /// Device name can be complex and identify multiple devices at once like `HETERO:CPU,GPU`; + /// in this case, std::map contains multiple entries, each per device. + /// + public KeyValuePair get_versions(string device_name) + { + if (string.IsNullOrEmpty(device_name)) + { + throw new ArgumentNullException(nameof(device_name)); + } + ExceptionStatus status; + int l = Marshal.SizeOf(typeof(CoreVersionList)); + IntPtr ptr_core_version_s = Marshal.AllocHGlobal(l); + sbyte[] c_device_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); + HandleException.handler( + NativeMethods.ov_core_get_versions_by_device_name(m_ptr, ref c_device_name[0], ptr_core_version_s)); + var temp1 = Marshal.PtrToStructure(ptr_core_version_s, typeof(CoreVersionList)); + CoreVersionList core_version_s = (CoreVersionList)temp1; + var temp2 = Marshal.PtrToStructure(core_version_s.core_version, typeof(CoreVersion)); + CoreVersion core_version = (CoreVersion)temp2; + KeyValuePair value = new KeyValuePair(core_version.device_name, core_version.version); + NativeMethods.ov_core_versions_free(ptr_core_version_s); + + return value; + } + + + /// + /// Reads models from IR / ONNX / PDPD / TF / TFLite file formats. + /// + /// Path to a model. + /// Path to a data file. + /// A model. + /// + /// + /// For IR format (*.bin): + /// if `bin_path` is empty, will try to read a bin file with the same name as xml and + /// if the bin file with the same name is not found, will load IR without weights. + /// For the following file formats the `bin_path` parameter is not used: + /// + /// ONNX format (*.onnx) + /// PDPD(*.pdmodel) + /// TF(*.pb) + /// TFLite(*.tflite) + /// + public Model read_model(string model_path, string bin_path = "") + { + if (string.IsNullOrEmpty(model_path)) + { + throw new ArgumentNullException(nameof(model_path)); + } + IntPtr model_ptr = new IntPtr(); + sbyte[] c_model_path = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(model_path)); + + if (bin_path == "") + { + sbyte c_bin_path = new sbyte(); + HandleException.handler( + NativeMethods.ov_core_read_model(m_ptr, ref c_model_path[0], ref c_bin_path, ref model_ptr)); + } + else + { + sbyte[] c_bin_path = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(bin_path)); + HandleException.handler( + NativeMethods.ov_core_read_model(m_ptr, ref c_model_path[0], ref c_bin_path[0], ref model_ptr)); + } + + return new Model(model_ptr); + } + + /// + /// Reads models from IR / ONNX / PDPD / TF / TFLite formats. + /// + /// String with a model in IR / ONNX / PDPD / TF / TFLite format. + /// Shared pointer to a constant tensor with weights. + /// + /// Created model object shares the weights with the @p weights object. + /// Thus, do not create @p weights on temporary data that can be freed later, since the model constant data will point to an invalid memory. + /// + /// A model. + public Model read_model(string model_path, Tensor weights) + { + if (string.IsNullOrEmpty(model_path)) + { + throw new ArgumentNullException(nameof(model_path)); + } + if (weights == null) + { + throw new ArgumentNullException(nameof(weights)); + } + IntPtr model_ptr = new IntPtr(); + sbyte[] c_model_path = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(model_path)); + HandleException.handler( + NativeMethods.ov_core_read_model_from_memory(m_ptr, ref c_model_path[0], weights.Ptr, ref model_ptr)); + return new Model(model_ptr); + } + + /// + /// Creates a compiled model from a source model object. + /// + /// Model object acquired from Core::read_model. + /// A compiled model. + /// + /// Users can create as many compiled models as they need and use + /// them simultaneously (up to the limitation of the hardware resources). + /// + public CompiledModel compile_model(Model model) + { + if (model == null) + { + throw new ArgumentNullException(nameof(model)); + } + IntPtr compiled_model_ptr = new IntPtr(); + string device_name = "AUTO"; + sbyte[] c_device = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); + HandleException.handler( + NativeMethods.ov_core_compile_model(m_ptr, model.m_ptr, ref c_device[0], 0, ref compiled_model_ptr)); + return new CompiledModel(compiled_model_ptr); + } + + /// + /// Creates and loads a compiled model from a source model to the default OpenVINO device selected by the AUTO + /// + /// Model object acquired from Core::read_model. + /// Name of a device to load a model to. + /// A compiled model. + /// + /// Users can create as many compiled models as they need and use + /// them simultaneously (up to the limitation of the hardware resources). + /// + public CompiledModel compile_model(Model model, string device_name) + { + if (model == null) + { + throw new ArgumentNullException(nameof(model)); + } + if (string.IsNullOrEmpty(device_name)) + { + throw new ArgumentNullException(nameof(device_name)); + } + IntPtr compiled_model_ptr = new IntPtr(); + sbyte[] c_device = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); + HandleException.handler( + NativeMethods.ov_core_compile_model(m_ptr, model.m_ptr, ref c_device[0], 0, ref compiled_model_ptr)); + return new CompiledModel(compiled_model_ptr); + } + + /// + /// Reads and loads a compiled model from the IR/ONNX/PDPD file to the default OpenVINO device selected by the AUTO plugin. + /// + /// Path to a model. + /// + /// This can be more efficient than using the Core::read_model + Core::compile_model(model_in_memory_object) flow, + /// especially for cases when caching is enabled and a cached model is availab + /// + /// A compiled model. + public CompiledModel compile_model(string model_path) + { + if (string.IsNullOrEmpty(model_path)) + { + throw new ArgumentNullException(nameof(model_path)); + } + IntPtr compiled_model_ptr = new IntPtr(); + string device_name = "AUTO"; + sbyte[] c_model = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(model_path)); + sbyte[] c_device = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); + HandleException.handler( + NativeMethods.ov_core_compile_model_from_file(m_ptr, ref c_model[0], ref c_device[0], 0, ref compiled_model_ptr)); + return new CompiledModel(compiled_model_ptr); + } + + + /// + /// Reads a model and creates a compiled model from the IR/ONNX/PDPD file. + /// + /// Path to a model. + /// Name of a device to load a model to. + /// + /// This can be more efficient than using the Core::read_model + Core::compile_model(model_in_memory_object) flow, + /// especially for cases when caching is enabled and a cached model is availab + /// + /// A compiled model. + public CompiledModel compile_model(string model_path, string device_name) + { + if (string.IsNullOrEmpty(model_path)) + { + throw new ArgumentNullException(nameof(model_path)); + } + if (string.IsNullOrEmpty(device_name)) + { + throw new ArgumentNullException(nameof(device_name)); + } + IntPtr compiled_model_ptr = new IntPtr(); + sbyte[] c_model = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(model_path)); + sbyte[] c_device = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); + HandleException.handler( + NativeMethods.ov_core_compile_model_from_file(m_ptr, ref c_model[0], ref c_device[0], 0, ref compiled_model_ptr)); + return new CompiledModel(compiled_model_ptr); + } + + + /// + /// Returns devices available for inference. + /// Core objects go over all registered plugins and ask about available devices. + /// + /// A vector of devices. The devices are returned as { CPU, GPU.0, GPU.1, GNA }. + /// + /// If there is more than one device of a specific type, they are enumerated with the .# suffix. + /// Such enumerated device can later be used as a device name in all Core methods like Core::compile_model, + /// Core::query_model, Core::set_property and so on. + /// + public List get_available_devices() + { + int l = Marshal.SizeOf(typeof(ov_available_devices_t)); + IntPtr devices_ptr = Marshal.AllocHGlobal(l); + HandleException.handler( + NativeMethods.ov_core_get_available_devices(m_ptr, devices_ptr)); + + var temp1 = Marshal.PtrToStructure(devices_ptr, typeof(ov_available_devices_t)); + + ov_available_devices_t devices_s = (ov_available_devices_t)temp1; + IntPtr[] devices_ptrs = new IntPtr[devices_s.size]; + Marshal.Copy(devices_s.devices, devices_ptrs, 0, (int)devices_s.size); + List devices = new List(); + for (int i = 0; i < (int)devices_s.size; ++i) + { + devices.Add(Marshal.PtrToStringAnsi(devices_ptrs[i])); + } + NativeMethods.ov_available_devices_free(devices_ptr); + return devices; + } + } +} + diff --git a/modules/csharp_api/csharp/core/dimension.cs b/modules/csharp_api/csharp/core/dimension.cs new file mode 100644 index 000000000..afe6176ff --- /dev/null +++ b/modules/csharp_api/csharp/core/dimension.cs @@ -0,0 +1,84 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using static OpenVinoSharp.Ov; +using ov_dimension = OpenVinoSharp.Ov.ov_dimension; + +namespace OpenVinoSharp +{ + /// + /// Class representing a dimension, which may be dynamic (undetermined until runtime), + /// in a shape or shape-like object. + /// + /// Static dimensions may be implicitly converted from value_type. + /// A dynamic dimension is constructed with Dimension() or Dimension::dynamic(). + public class Dimension + { + /// + /// The ov_dimension struct. + /// + ov_dimension m_dimension; + /// + /// Construct a static dimension. + /// + /// Value of the dimension. + public Dimension(long dimension) + { + m_dimension.min = dimension; + m_dimension.max = dimension; + } + /// + /// Construct a dynamic dimension with ov_dimension struct. + /// + /// The ov_dimension struct. + public Dimension(ov_dimension dimension) + { + m_dimension = dimension; + } + /// + /// Construct a dynamic dimension with bounded range + /// + /// The lower inclusive limit for the dimension + /// The upper inclusive limit for the dimension + public Dimension(long min_dimension, long max_dimension) + { + m_dimension.min = min_dimension; + m_dimension.max = max_dimension; + } + /// + /// Get ov_dimension struct. + /// + /// Return ov_dimension struct. + public ov_dimension get_dimension() + { + return m_dimension; + } + /// + /// Get max. + /// + /// Dimension max. + public long get_max() + { + return m_dimension.max; + } + + /// + /// Get min. + /// + /// Dimension min. + public long get_min() + { + return m_dimension.min; + } + /// + /// Check this dimension whether is dynamic + /// + /// Boolean, true is dynamic and false is static. + public bool is_dynamic() + { + return NativeMethods.ov_dimension_is_dynamic(m_dimension); + } + } +} diff --git a/modules/csharp_api/csharp/core/infer_request.cs b/modules/csharp_api/csharp/core/infer_request.cs new file mode 100644 index 000000000..09465db8d --- /dev/null +++ b/modules/csharp_api/csharp/core/infer_request.cs @@ -0,0 +1,383 @@ +using OpenCvSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Numerics; +using System.Reflection; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; +using static OpenVinoSharp.Ov; + +namespace OpenVinoSharp +{ + /// + /// This is a class of infer request that can be run in asynchronous or synchronous manners. + /// + /// ov_runtime_c#_api + public class InferRequest + { + /// + /// [private]InferRequest class pointer. + /// + public IntPtr m_ptr = IntPtr.Zero; + + /// + /// [public]InferRequest class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + /// + /// Constructs InferRequest from the initialized IntPtr. + /// + /// + public InferRequest(IntPtr ptr) + { + this.m_ptr = ptr; + } + /// + /// InferRequest's destructor + /// + ~InferRequest() + { + dispose(); + } + /// + /// Release unmanaged resources + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_core_free(m_ptr); + + m_ptr = IntPtr.Zero; + } + /// + /// Sets an input/output tensor to infer on. + /// + /// Name of the input or output tensor. + /// Reference to the tensor. The element_type and shape of the tensor must match + /// the model's input/output element_type and size. + public void set_tensor(string tensor_name,Tensor tensor) + { + sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(tensor_name)); + HandleException.handler( + NativeMethods.ov_infer_request_set_tensor( + m_ptr, ref c_tensor_name[0], tensor.Ptr)); + } + /// + /// Sets an input/output tensor to infer. + /// + /// Node of the input or output tensor. + /// Reference to a tensor. The element_type and shape of a tensor must match + /// the model's input/output element_type and size. + public void set_tensor(Node node, Tensor tensor) + { + if (node.node_type == Node.NodeType.e_const) + { + HandleException.handler( + NativeMethods.ov_infer_request_set_tensor_by_const_port( + m_ptr, node.Ptr, tensor.Ptr)); + } + else { + HandleException.handler( + NativeMethods.ov_infer_request_set_tensor_by_port( + m_ptr, node.Ptr, tensor.Ptr)); + } + } + /// + /// Sets an input/output tensor to infer. + /// + /// + /// Port of the input or output tensor. Use the following methods to get the ports: + /// - Model.input() + /// - Model.const_input() + /// - Model.inputs() + /// - Model.const_inputs() + /// - Model.output() + /// - Model.const_output() + /// - Model.outputs() + /// - Model.const_outputs() + /// - CompiledModel.input() + /// - CompiledModel.const_input() + /// - CompiledModel.inputs() + /// - CompiledModel.const_inputs() + /// - CompiledModel.output() + /// - CompiledModel.const_output() + /// - CompiledModel.outputs() + /// - CompiledModel.const_outputs() + /// + /// Reference to a tensor. The element_type and shape of a tensor must match + /// the model's input/output element_type and size. + public void set_tensor(Output port, Tensor tensor) + { + if (port.get_node().node_type == Node.NodeType.e_const) + { + HandleException.handler( + NativeMethods.ov_infer_request_set_tensor_by_const_port( + m_ptr, port.get_node().Ptr, tensor.Ptr)); + } + else + { + HandleException.handler( + NativeMethods.ov_infer_request_set_tensor_by_port( + m_ptr, port.get_node().Ptr, tensor.Ptr)); + } + } + + /// + /// Sets an input tensor to infer. + /// + /// Index of the input tensor. If @p idx is greater than the number of model inputs, + /// an exception is thrown. + /// Reference to the tensor. The element_type and shape of the tensor must match + /// the model's input/output element_type and size. + public void set_input_tensor(ulong index, Tensor tensor) + { + HandleException.handler( + NativeMethods.ov_infer_request_set_input_tensor_by_index( + m_ptr, index, tensor.Ptr)); + } + + /// + /// Sets an input tensor to infer models with single input. + /// + /// If model has several inputs, an exception is thrown. + /// Reference to the input tensor. + public void set_input_tensor(Tensor tensor) + { + HandleException.handler( + NativeMethods.ov_infer_request_set_input_tensor( + m_ptr, tensor.Ptr)); + } + /// + /// Sets an output tensor to infer. + /// Index of the input preserved accross Model, CompiledModel, and InferRequest. + /// + /// Index of the output tensor. + /// Reference to the output tensor. The type of the tensor must match the model + /// output element type and shape. + public void set_output_tensor(ulong index, Tensor tensor) + { + HandleException.handler( + NativeMethods.ov_infer_request_set_output_tensor_by_index( + m_ptr, index, tensor.Ptr)); + } + /// + /// Sets an output tensor to infer models with single output. + /// + /// If model has several outputs, an exception is thrown. + /// Reference to the output tensor. + public void set_output_tensor(Tensor tensor) + { + HandleException.handler( + NativeMethods.ov_infer_request_set_output_tensor( + m_ptr, tensor.Ptr)); + } + + + + /// + /// Gets an input/output tensor for inference by tensor name. + /// + /// Name of a tensor to get. + /// The tensor with name @p tensor_name. If the tensor is not found, an exception is thrown. + public Tensor get_tensor(string tensor_name) + { + IntPtr tensor_ptr = IntPtr.Zero; + sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(tensor_name)); + HandleException.handler( + NativeMethods.ov_infer_request_get_tensor(m_ptr, ref c_tensor_name[0], ref tensor_ptr)); + return new Tensor(tensor_ptr); + } + + /// + /// Gets an input/output tensor for inference by node. + /// + /// If the tensor with the specified @n node is not found, an exception is thrown. + /// Node of the tensor to get. + /// Tensor for the node @n node. + public Tensor get_tensor(Node node) + { + IntPtr tensor_ptr = IntPtr.Zero; + ExceptionStatus status; + if (node.node_type == Node.NodeType.e_const) + { + status = NativeMethods.ov_infer_request_get_tensor_by_const_port( + m_ptr, node.Ptr, ref tensor_ptr); + } + else + { + status = NativeMethods.ov_infer_request_get_tensor_by_port( + m_ptr, node.Ptr, ref tensor_ptr); + } + + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("set_tensor get_tensor error : " + status.ToString()); + } + return new Tensor(tensor_ptr); + } + + /// + /// Gets an input/output tensor for inference. + /// + /// If the tensor with the specified @p port is not found, an exception is thrown. + /// Port of the tensor to get. + /// Tensor for the port @p port. + public Tensor get_tensor(Output port) + { + IntPtr tensor_ptr = IntPtr.Zero; + if (port.get_node().node_type == Node.NodeType.e_const) + { + HandleException.handler( + NativeMethods.ov_infer_request_get_tensor_by_const_port( + m_ptr, port.get_node().Ptr, ref tensor_ptr)); + } + else + { + HandleException.handler( + NativeMethods.ov_infer_request_get_tensor_by_port( + m_ptr, port.get_node().Ptr, ref tensor_ptr)); + } + return new Tensor(tensor_ptr); + } + + /// + /// Gets an input tensor for inference. + /// + /// Index of the tensor to get. + /// Tensor with the input index @p idx. If the tensor with the specified @p idx is not found, + /// an exception is thrown. + public Tensor get_input_tensor(ulong index) + { + IntPtr tensor_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_infer_request_get_input_tensor_by_index( + m_ptr, index, ref tensor_ptr)); + return new Tensor(tensor_ptr); + } + + /// + /// Gets an input tensor for inference. + /// + /// The input tensor for the model. If model has several inputs, an exception is thrown. + public Tensor get_input_tensor() + { + IntPtr tensor_ptr = IntPtr.Zero; + + HandleException.handler( + NativeMethods.ov_infer_request_get_input_tensor( + m_ptr, ref tensor_ptr)); + return new Tensor(tensor_ptr); + } + + /// + /// Gets an output tensor for inference. + /// + /// Index of the tensor to get. + /// Tensor with the output index @p idx. If the tensor with the specified @p idx is not found, + /// an exception is thrown. + public Tensor get_output_tensor(ulong index) + { + IntPtr tensor_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_infer_request_get_output_tensor_by_index( + m_ptr, index, ref tensor_ptr)); + return new Tensor(tensor_ptr); + } + + /// + /// Gets an output tensor for inference. + /// + /// Output tensor for the model. If model has several outputs, an exception is thrown. + public Tensor get_output_tensor() + { + IntPtr tensor_ptr = IntPtr.Zero; + + HandleException.handler( + NativeMethods.ov_infer_request_get_output_tensor( + m_ptr, ref tensor_ptr)); + return new Tensor(tensor_ptr); + } + /// + /// Infers specified input(s) in synchronous mode. + /// + /// + /// It blocks all methods of InferRequest while request is ongoing (running or waiting in a queue). + /// Calling any method leads to throwning the ov::Busy exception. + /// + public void infer() + { + HandleException.handler( + NativeMethods.ov_infer_request_infer(m_ptr)); + } + + /// + /// Cancels inference request. + /// + public void cancel() + { + HandleException.handler( + NativeMethods.ov_infer_request_cancel(m_ptr)); + } + /// + /// Starts inference of specified input(s) in asynchronous mode. + /// + /// + /// It returns immediately. Inference starts also immediately. + /// Calling any method while the request in a running state leads to throwning the ov::Busy exception. + /// + public void start_async() + { + HandleException.handler( + NativeMethods.ov_infer_request_start_async(m_ptr)); + } + /// + /// Waits for the result to become available. Blocks until the result becomes available. + /// + public void wait() + { + HandleException.handler( + NativeMethods.ov_infer_request_wait(m_ptr)); + } + + /// + /// Waits for the result to become available. Blocks until the specified timeout has elapsed or the result + /// becomes available, whichever comes first. + /// + /// Maximum duration, in milliseconds, to block for. + /// True if inference request is ready and false, otherwise. + public bool wait_for(long timeout) + { + HandleException.handler( + NativeMethods.ov_infer_request_wait_for(m_ptr, timeout)); + return true; + } + + /// + /// Queries performance measures per layer to identify the most time consuming operation. + /// + /// Not all plugins provide meaningful data. + /// List of profiling information for operations in a model. + public List get_profiling_info() + { + ov_profiling_info_list profiling_info_list = new ov_profiling_info_list(); + HandleException.handler( + NativeMethods.ov_infer_request_get_profiling_info(m_ptr, ref profiling_info_list)); + IntPtr[] profiling_infos_ptr = new IntPtr[profiling_info_list.size]; + Marshal.Copy(profiling_info_list.profiling_infos, profiling_infos_ptr, 0, (int)profiling_info_list.size); + List profiling_infos = new List(); + for (int i = 0; i < (int)profiling_info_list.size; ++i) + { + var temp = Marshal.PtrToStructure(profiling_infos_ptr[i], typeof(Ov.ProfilingInfo)); + Ov.ProfilingInfo profiling_info = (Ov.ProfilingInfo)temp; + profiling_infos.Add(profiling_info); + } + return profiling_infos; + } + } +} diff --git a/modules/csharp_api/csharp/core/layout.cs b/modules/csharp_api/csharp/core/layout.cs new file mode 100644 index 000000000..df3679be5 --- /dev/null +++ b/modules/csharp_api/csharp/core/layout.cs @@ -0,0 +1,94 @@ +using System; +using System.Collections.Generic; +using System.Drawing.Imaging; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + /// + /// ov::Layout represents the text information of tensor's dimensions/axes. E.g. layout `NCHW` means that 4D + /// tensor `{-1, 3, 480, 640}` will have: + /// - 0: `N = -1`: batch dimension is dynamic + /// - 1: `C = 3`: number of channels is '3' + /// - 2: `H = 480`: image height is 480 + /// - 3: `W = 640`: image width is 640 + /// + /// + /// `ov::Layout` can be specified for: + /// - Preprocessing purposes. E.g. + /// - To apply normalization (means/scales) it is usually required to set 'C' dimension in a layout. + /// - To resize the image to specified width/height it is needed to set 'H' and 'W' dimensions in a layout + /// - To transpose image - source and target layout can be set (see + /// `ov::preprocess::PreProcessSteps::convert_layout`) + /// - To set/get model's batch (see `ov::get_batch`/`ov::set_batch') it is required in general to specify 'N' dimension + /// in layout for appropriate inputs + /// + public class Layout + { + /// + /// [private]Layout class pointer. + /// + private IntPtr m_ptr = IntPtr.Zero; + /// + /// [public]Layout class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + /// + /// Constructs a Layout with static or dynamic layout information based on string representation. + /// + /// + /// The string used to construct Layout from. + /// The string representation can be in the following form: + /// - can define order and meaning for dimensions "NCHW" + /// - partial layout specialization: + /// - "NC?" defines 3 dimensional layout, first two NC, 3rd one is not defined + /// - "N...C" defines layout with dynamic rank where 1st dimension is N, last one is C + /// - "NC..." defines layout with dynamic rank where first two are NC, others are not + /// defined + /// - only order of dimensions "adbc" (0312) + /// - Advanced syntax can be used for multi-character names like "[N,C,H,W,...,CustomName]" + /// + public Layout(string layout_desc) + { + sbyte[] c_layout_desc = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(layout_desc)); + ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_layout_create(ref c_layout_desc[0], ref m_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("Layout init error : {0}!", status.ToString()); + } + } + + /// + /// Default deconstruction + /// + ~Layout() + { + dispose(); + } + + /// + /// Release unmanaged resources. + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_layout_free(m_ptr); + m_ptr = IntPtr.Zero; + } + + /// + /// String representation of Layout. + /// + /// String representation of Layout. + public string to_string() + { + return NativeMethods.ov_layout_to_string(m_ptr); + } + } +} diff --git a/modules/csharp_api/csharp/core/model.cs b/modules/csharp_api/csharp/core/model.cs new file mode 100644 index 000000000..8d99617ee --- /dev/null +++ b/modules/csharp_api/csharp/core/model.cs @@ -0,0 +1,514 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; +using System.Xml.Linq; +using static OpenCvSharp.FileStorage; + +namespace OpenVinoSharp +{ + /// + /// A user-defined model + /// + public class Model + { + /// + /// [private]Model class pointer. + /// + public IntPtr m_ptr = IntPtr.Zero; + /// + /// [public]Model class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + /// + /// Default Constructor + /// + /// Model pointer. + public Model(IntPtr ptr) + { + if (ptr == IntPtr.Zero) + { + System.Diagnostics.Debug.WriteLine("Model init error : ptr is null!"); + return; + } + Ptr = ptr; + } + /// + /// Model's destructor + /// + ~Model() { dispose(); } + /// + /// Release unmanaged resources + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_core_free(m_ptr); + m_ptr = IntPtr.Zero; + } + + /// + /// Gets the friendly name for a model. + /// + /// The friendly name for a model. + public string get_friendly_name() + { + + IntPtr s_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_model_get_friendly_name(m_ptr, ref s_ptr)); + string ss = Marshal.PtrToStringAnsi(s_ptr); + + return ss; + } + /// + /// Get single input port of model, which only support single input model. + /// + /// The input port of model. + public Node get_input() + { + IntPtr port_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_model_input(m_ptr, ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_nomal); + } + + /// + /// Get an input port of model by name. + /// + /// input tensor name (string). + /// The input port of model. + public Node get_input(string tensor_name) + { + IntPtr port_ptr = IntPtr.Zero; + sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(tensor_name)); + HandleException.handler( + NativeMethods.ov_model_input_by_name(m_ptr, ref c_tensor_name[0], ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_nomal); + } + + /// + /// Get an input port of model by port index. + /// + /// input tensor index. + /// The input port of model. + public Node get_input(ulong index) + { + IntPtr port_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_model_input_by_index(m_ptr, index, ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_nomal); + } + + /// + /// Get an single output port of model, which only support single output model. + /// + /// The output port of model. + public Node get_output() + { + IntPtr port_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_model_output(m_ptr, ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_nomal); + } + /// + /// Get an output port of model by name. + /// + /// output tensor name (string). + /// The output port of model. + public Node get_output(string tensor_name) + { + IntPtr port_ptr = IntPtr.Zero; + sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(tensor_name)); + HandleException.handler( + NativeMethods.ov_model_output_by_name(m_ptr, ref c_tensor_name[0], ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_nomal); + } + /// + /// Get an output port of model by port index. + /// + /// input tensor index. + /// The output port of model. + public Node get_output(ulong index) + { + IntPtr port_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_model_output_by_index(m_ptr, index, ref port_ptr)); + + return new Node(port_ptr, Node.NodeType.e_nomal); + } + /// + /// Get a const single input port of model, which only support single input model. + /// + /// The const input port of model. + public Node get_const_input() + { + IntPtr port_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_model_const_input(m_ptr, ref port_ptr)); + return new Node(port_ptr,Node.NodeType.e_const); + } + /// + /// Get a const input port of model by name. + /// + /// input tensor name (string). + /// The const input port of model. + public Node get_const_input(string tensor_name) + { + IntPtr port_ptr = IntPtr.Zero; + sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(tensor_name)); + HandleException.handler( + NativeMethods.ov_model_const_input_by_name(m_ptr, ref c_tensor_name[0], ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_const); + } + /// + /// Get a const input port of model by port index. + /// + /// input tensor index. + /// The const input port of model. + public Node get_const_input(ulong index) + { + IntPtr port_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_model_const_input_by_index(m_ptr, index, ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_const); + } + /// + /// Get a single const output port of model, which only support single output model.. + /// + /// The const output port of model. + public Node get_const_output() + { + IntPtr port_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_model_const_output(m_ptr, ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_const); + } + /// + /// Get a const output port of model by port index. + /// + /// output tensor name (string). + /// The const output port of model. + public Node get_const_output(string tensor_name) + { + IntPtr port_ptr = IntPtr.Zero; + sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(tensor_name)); + HandleException.handler( + NativeMethods.ov_model_const_output_by_name(m_ptr, ref c_tensor_name[0], ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_const); + } + /// + /// Get a const output port of model by name. + /// + /// output tensor index. + /// The const output port of model. + public Node get_const_output(ulong index) + { + IntPtr port_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_model_const_output_by_index(m_ptr, index, ref port_ptr)); + return new Node(port_ptr, Node.NodeType.e_const); + } + + /// + /// Get single input of model, which only support single input model. + /// + /// The input of model. + public Input input() + { + Node node = get_input(); + return new Input(node, 0); + } + /// + /// Get an input of model by port index. + /// + /// input tensor index. + /// The input of model. + public Input input(ulong index) + { + Node node = get_input(index); + return new Input(node, index); + } + /// + /// Get an input of model by name. + /// + /// input tensor name (string). + /// The input of model. + public Input input(string tensor_name) + { + Node node = get_input(tensor_name); + return new Input(node, 0); + } + + /// + /// Get single const input of model, which only support single input model. + /// + /// The const input of model. + public Input const_input() + { + Node node = get_const_input(); + return new Input(node, 0); + } + /// + /// Get an const input of model by port index. + /// + /// input tensor index. + /// The const input of model. + public Input const_input(ulong index) + { + Node node = get_const_input(index); + return new Input(node, index); + } + /// + /// Get an const input of model by name. + /// + /// input tensor name (string). + /// The const input of model. + public Input const_input(string tensor_name) + { + Node node = get_const_input(tensor_name); + return new Input(node, 0); + } + + + + /// + /// Get single input of model, which only support single input model. + /// + /// The input of model. + public Output output() + { + Node node = get_output(); + return new Output(node, 0); + } + /// + /// Get an output of model by port index. + /// + /// output tensor index. + /// The output of model. + public Output output(ulong index) + { + Node node = get_output(index); + return new Output(node, index); + } + /// + /// Get an output of model by name. + /// + /// output tensor name (string). + /// The output of model. + public Output output(string tensor_name) + { + Node node = get_output(tensor_name); + return new Output(node, 0); + } + + /// + /// Get single const output of model, which only support single output model. + /// + /// The const output of model. + public Output const_output() + { + Node node = get_const_output(); + return new Output(node, 0); + } + /// + /// Get an const output of model by port index. + /// + /// output tensor index. + /// The const output of model. + public Output const_output(ulong index) + { + Node node = get_const_output(index); + return new Output(node, index); + } + /// + /// Get an const output of model by name. + /// + /// output tensor name (string). + /// The const output of model. + public Output const_output(string tensor_name) + { + Node node = get_const_output(tensor_name); + return new Output(node, 0); + } + /// + /// Get the input size of model. + /// + /// The input size. + public ulong get_inputs_size() + { + ulong input_size = 0; + HandleException.handler( + NativeMethods.ov_model_inputs_size(m_ptr, ref input_size)); + return input_size; + } + /// + /// Get the output size of model. + /// + /// The output size. + public ulong get_outputs_size() + { + ulong output_size = 0; + HandleException.handler( + NativeMethods.ov_model_outputs_size(m_ptr, ref output_size)); + return output_size; + } + + /// + /// Get all input of model. + /// + /// All input of model. + public List inputs() + { + ulong input_size = get_inputs_size(); + List inputs = new List(); + for (ulong index = 0; index < input_size; ++index) + { + inputs.Add(input(index)); + } + return inputs; + } + /// + /// Get all output of model + /// + /// All output of model + public List outputs() + { + ulong output_size = get_outputs_size(); + List outputs = new List(); + for (ulong index = 0; index < output_size; ++index) + { + outputs.Add(output(index)); + } + return outputs; + } + + /// + /// Get all const input of model. + /// + /// All input of model. + public List const_inputs() + { + ulong input_size = get_inputs_size(); + List inputs = new List(); + for (ulong index = 0; index < input_size; ++index) + { + inputs.Add(const_input(index)); + } + return inputs; + } + + /// + /// Get all const output of model + /// + /// All output of model + public List const_outputs() + { + ulong output_size = get_outputs_size(); + List outputs = new List(); + for (ulong index = 0; index < output_size; ++index) + { + outputs.Add(const_output(index)); + } + return outputs; + } + /// + /// The ops defined in the model is dynamic shape. + /// + /// true if any of the ops defined in the model is dynamic shape.. + public bool is_dynamic() + { + return NativeMethods.ov_model_is_dynamic(m_ptr); + } + + + /// + /// Do reshape in model with partial shape for a specified name. + /// + /// The list of input tensor names and PartialShape. + public void reshape(Dictionary partial_shapes) + { + if (1 != partial_shapes.Count) + { + IntPtr[] tensor_names_ptr = new IntPtr[partial_shapes.Count]; + Ov.ov_partial_shape[] shapes = new Ov.ov_partial_shape[partial_shapes.Count]; + int i = 0; + foreach (var partial_shape in partial_shapes) + { + IntPtr p = Marshal.StringToHGlobalAnsi(partial_shape.Key); + tensor_names_ptr[i] = p; + shapes[i] = partial_shape.Value.get_partial_shape(); + } + HandleException.handler( + NativeMethods.ov_model_reshape(m_ptr, tensor_names_ptr, + ref shapes[0], (ulong)partial_shapes.Count)); + } + else + { + foreach (var partial_shape in partial_shapes) + { + sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(partial_shape.Key)); + Ov.ov_partial_shape shape = partial_shape.Value.get_partial_shape(); + HandleException.handler( + NativeMethods.ov_model_reshape_input_by_name(m_ptr, ref c_tensor_name[0], + shape)); + } + + } + } + /// + /// Do reshape in model for one node(port 0). + /// + /// A PartialShape. + public void reshape(PartialShape partial_shape) + { + HandleException.handler( + NativeMethods.ov_model_reshape_single_input(m_ptr, partial_shape.get_partial_shape())); + } + /// + /// Do reshape in model with a list of (port id, partial shape). + /// + /// The list of input port id and PartialShape. + public void reshape(Dictionary partial_shapes) + { + ulong[] indexs = new ulong[partial_shapes.Count]; + Ov.ov_partial_shape[] shapes = new Ov.ov_partial_shape[partial_shapes.Count]; + int i = 0; + foreach (var partial_shape in partial_shapes) + { + indexs[i] = partial_shape.Key; + shapes[i] = partial_shape.Value.get_partial_shape(); + } + HandleException.handler(NativeMethods.ov_model_reshape_by_port_indexes(m_ptr, ref indexs[0], + ref shapes[0], (ulong)partial_shapes.Count)); + } + /// + /// Do reshape in model with a list of (ov_output_port_t, partial shape). + /// + /// The list of input node and PartialShape. + public void reshape(Dictionary partial_shapes) + { + IntPtr[] nodes_ptr = new IntPtr[partial_shapes.Count]; + Ov.ov_partial_shape[] shapes = new Ov.ov_partial_shape[partial_shapes.Count]; + int i = 0; + foreach (var partial_shape in partial_shapes) + { + nodes_ptr[i] = partial_shape.Key.Ptr; + shapes[i] = partial_shape.Value.get_partial_shape(); + } + HandleException.handler(NativeMethods.ov_model_reshape_by_ports(m_ptr, ref nodes_ptr[0], + ref shapes[0], (ulong)partial_shapes.Count)); + } + + } + + +} + + diff --git a/modules/csharp_api/csharp/core/node.cs b/modules/csharp_api/csharp/core/node.cs new file mode 100644 index 000000000..03ce722f2 --- /dev/null +++ b/modules/csharp_api/csharp/core/node.cs @@ -0,0 +1,148 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + /// + /// Nodes are the backbone of the graph of Value dataflow. Every node has + /// zero or more nodes as arguments and one value, which is either a tensor + /// or a (possibly empty) tuple of values. + /// + public class Node + { + /// + /// The node type. + /// + public enum NodeType + { + /// + /// Const type. + /// + e_const = 0, + /// + /// Nomal type. + /// + e_nomal = 1 + }; + /// + /// [private]Node class pointer. + /// + public IntPtr m_ptr = IntPtr.Zero; + + /// + /// [public]Node class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + /// + /// Specify the format type of the node. + /// + public NodeType node_type { get; set; } + + /// + /// Default Constructor. + /// + /// The pointer of node. + /// The type of node. + public Node(IntPtr ptr, NodeType type) + { + Ptr = ptr; + this.node_type = type; + } + /// + /// Default deconstruction. + /// + ~Node() { + dispose(); + } + /// + /// Release unmanaged resources. + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + if (node_type == NodeType.e_const) + { + NativeMethods.ov_output_const_port_free(m_ptr); + } + else + { + NativeMethods.ov_output_port_free(m_ptr); + } + m_ptr = IntPtr.Zero; + } + + /// + /// Get the shape. + /// + /// Returns the shape. + public Shape get_shape() + { + int l = Marshal.SizeOf(typeof(Ov.ov_shape)); + IntPtr shape_ptr = Marshal.AllocHGlobal(l); + if (node_type == NodeType.e_const) + { + HandleException.handler( + NativeMethods.ov_const_port_get_shape(m_ptr, shape_ptr)); + } + else + { + HandleException.handler( + NativeMethods.ov_port_get_shape(m_ptr, shape_ptr)); + } + + return new Shape(shape_ptr); + } + + /// + /// Get the partial shape. + /// + /// Returns the partial shape. + public PartialShape get_partial_shape() + { + int l = Marshal.SizeOf(typeof(Ov.ov_partial_shape)); + IntPtr shape_ptr = Marshal.AllocHGlobal(l); + Ov.ov_partial_shape shape = new Ov.ov_partial_shape(); + HandleException.handler( + NativeMethods.ov_port_get_partial_shape(m_ptr, ref shape)); + return new PartialShape(shape_ptr); + } + + /// + /// Get the unique name of the node. + /// + /// A const reference to the node's unique name. + public string get_name() + { + ExceptionStatus status; + IntPtr s_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_port_get_any_name(m_ptr, ref s_ptr)); + string ss = Marshal.PtrToStringAnsi(s_ptr); + return ss; + } + /// + /// Checks that there is exactly one output and returns its element type. + /// + /// + /// TODO: deprecate in favor of node->get_output_element_type(0) with a suitable check in + /// the calling code, or updates to the calling code if it is making an invalid assumption + /// of only one output. + /// + /// Data type. + public OvType get_element_type() + { + uint data_type = 0; + HandleException.handler( + NativeMethods.ov_port_get_element_type(m_ptr, ref data_type)); + return new OvType((ElementType)data_type); + } + } +} diff --git a/modules/csharp_api/csharp/core/node_input.cs b/modules/csharp_api/csharp/core/node_input.cs new file mode 100644 index 000000000..c3e940d30 --- /dev/null +++ b/modules/csharp_api/csharp/core/node_input.cs @@ -0,0 +1,69 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + /// + /// A handle for one of a node's inputs. + /// + public class Input + { + private Node m_node; + private ulong m_index = 0; + /// + /// Constructs a Output. + /// + /// The node for the input handle. + /// The index of the input. + public Input(Node node, ulong index) + { + m_node = node; + m_index = index; + } + /// + /// Default deconstruction. + /// + ~Input() { + dispose(); + } + /// + /// Release unmanaged resources. + /// + public void dispose() { + m_node.dispose(); + } + /// + /// Get the node referred to by this input handle. + /// + /// The ouput node + public Node get_node() { return m_node; } + /// + /// The index of the input referred to by this input handle. + /// + /// The index of the input. + public ulong get_index() { return m_index; } + /// + /// The element type of the input referred to by this input handle. + /// + /// The element type of the input. + public OvType get_element_type() { return m_node.get_element_type(); } + /// + /// The shape of the input referred to by this input handle. + /// + /// The shape of the input . + public Shape get_shape() { return m_node.get_shape(); } + /// + /// Any tensor names associated with this input + /// + /// tensor names + public string get_any_name() { return m_node.get_name(); } + /// + /// The partial shape of the input referred to by this input handle. + /// + /// The partial shape of the input + public PartialShape get_partial_shape() { return m_node.get_partial_shape(); } + } +} diff --git a/modules/csharp_api/csharp/core/node_output.cs b/modules/csharp_api/csharp/core/node_output.cs new file mode 100644 index 000000000..09e4bd80f --- /dev/null +++ b/modules/csharp_api/csharp/core/node_output.cs @@ -0,0 +1,71 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + /// + /// A handle for one of a node's outputs. + /// + /// + public class Output { + private Node m_node; + private ulong m_index = 0; + /// + /// Constructs a Output. + /// + /// The node for the output handle. + /// The index of the output. + public Output(Node node, ulong index) + { + m_node = node; + m_index = index; + } + /// + /// Default deconstruction. + /// + ~Output() + { + dispose(); + } + /// + /// Release unmanaged resources. + /// + public void dispose() + { + m_node.dispose(); + } + /// + /// Get the node referred to by this output handle. + /// + /// The ouput node + public Node get_node() { return m_node; } + /// + /// The index of the output referred to by this output handle. + /// + /// The index of the output. + public ulong get_index() { return m_index; } + /// + /// The element type of the output referred to by this output handle. + /// + /// The element type of the output. + public OvType get_element_type() { return m_node.get_element_type(); } + /// + /// The shape of the output referred to by this output handle. + /// + /// The shape of the output . + public Shape get_shape(){ return m_node.get_shape(); } + /// + /// Any tensor names associated with this output + /// + /// tensor names + public string get_any_name() { return m_node.get_name(); } + /// + /// The partial shape of the output referred to by this output handle. + /// + /// The partial shape of the output + public PartialShape get_partial_shape() { return m_node.get_partial_shape(); } + } +} diff --git a/modules/csharp_api/csharp/core/partial_shape.cs b/modules/csharp_api/csharp/core/partial_shape.cs new file mode 100644 index 000000000..9cfb0368b --- /dev/null +++ b/modules/csharp_api/csharp/core/partial_shape.cs @@ -0,0 +1,279 @@ +using OpenCvSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; +using System.Xml.Linq; +using ov_partial_shape = OpenVinoSharp.Ov.ov_partial_shape; + +namespace OpenVinoSharp +{ + /// + /// Class representing a shape that may be partially or totally dynamic. + /// + /// + /// Dynamic rank. (Informal notation: `?`) + /// Static rank, but dynamic dimensions on some or all axes. + /// (Informal notation examples: `{1,2,?,4}`, `{?,?,?}`) + /// Static rank, and static dimensions on all axes. + /// (Informal notation examples: `{1,2,3,4}`, `{6}`, `{}`) + /// + public class PartialShape + { + /// + /// [private]Core class pointer. + /// + private IntPtr m_ptr = IntPtr.Zero; + /// + /// [public]Core class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + + + /// + /// PartialShape rank. + /// + private Dimension rank; + + /// + /// PartialShape dimensions. + /// + private Dimension[] dimensions; + + /// + /// Constructing partial shape by pointer. + /// + /// The partial shape ptr./param> + public PartialShape(IntPtr ptr) + { + if (ptr == IntPtr.Zero) + { + System.Diagnostics.Debug.WriteLine("Shape init error : ptr is null!"); + return; + } + this.m_ptr = ptr; + var temp = Marshal.PtrToStructure(ptr, typeof(ov_partial_shape)); + ov_partial_shape shape = (ov_partial_shape)temp; + Dimension rank_tmp = new Dimension(shape.rank); + + if (!rank_tmp.is_dynamic()){ + rank = rank_tmp; + IntPtr[] d_ptr = new IntPtr[rank.get_max()]; + Marshal.Copy(shape.dims, d_ptr, 0, (int)rank.get_min()); + + dimensions = new Dimension[rank.get_min()]; + for (int i = 0; i < rank.get_min(); ++i) + { + var temp1 = Marshal.PtrToStructure(ptr, typeof(Ov.ov_dimension)); + Dimension dim = new Dimension((Ov.ov_dimension)temp1); + dimensions[i] = dim; + } + } + else { + rank = rank_tmp; + } + } + /// + /// Constructing partial shape by dimensions. + /// + /// The partial shape dimensions array. + public PartialShape(Dimension[] dimensions) + { + Ov.ov_dimension[] ds = new Ov.ov_dimension[dimensions.Length]; + for (int i = 0; i < dimensions.Length; ++i) + { + ds[i] = dimensions[i].get_dimension(); + } + HandleException.handler( + NativeMethods.ov_partial_shape_create((long)dimensions.Length, ref ds[0], m_ptr)); + this.dimensions = dimensions; + rank = new Dimension(dimensions.Length, dimensions.Length); + } + /// + /// Constructing partial shape by dimensions. + /// + /// The partial shape dimensions list. + public PartialShape(List dimensions) : this(dimensions.ToArray()) + { + } + + /// + /// Constructing dynamic partial shape by dimensions. + /// + /// The partial shape rank. + /// The partial shape dimensions array. + public PartialShape(Dimension rank, Dimension[] dimensions) + { + Ov.ov_dimension[] ds = new Ov.ov_dimension[dimensions.Length]; + for (int i = 0; i < dimensions.Length; ++i) + { + ds[i] = dimensions[i].get_dimension(); + } + HandleException.handler( + NativeMethods.ov_partial_shape_create_dynamic(rank.get_dimension(), ref ds[0], m_ptr)); + this.dimensions = dimensions; + this.rank = rank; + } + + /// + /// Constructing dynamic partial shape by dimensions. + /// + /// The partial shape rank. + /// The partial shape dimensions list. + public PartialShape(Dimension rank, List dimensions) : this(rank, dimensions.ToArray()) + { + + } + /// + /// Constructing static partial shape by dimensions. + /// + /// The partial shape rank. + /// The partial shape dimensions array. + public PartialShape(long rank, long[] dimensions) + { + HandleException.handler( + NativeMethods.ov_partial_shape_create_static(rank, ref dimensions[0], m_ptr)); + this.rank = new Dimension(rank); + for (int i = 0; i < dimensions.Length; ++i) + { + this.dimensions[i] = new Dimension(dimensions[i]); + } + } + /// + /// Constructing static partial shape by dimensions. + /// + /// The partial shape rank. + /// The partial shape dimensions list. + public PartialShape(long rank, List dimensions) : this(rank, dimensions.ToArray()) + {} + + /// + /// Constructing static partial shape by shape. + /// + /// The shape + public PartialShape(Shape shape) + { + HandleException.handler( + NativeMethods.ov_shape_to_partial_shape(shape.shape, m_ptr)); + this.rank = new Dimension(shape.Count); + for (int i = 0; i < dimensions.Length; ++i) + { + this.dimensions[i] = new Dimension(shape[i]); + } + } + + /// + /// Default deconstruction. + /// + ~PartialShape() + { + dispose(); + } + /// + /// Release unmanaged resources. + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_partial_shape_free(m_ptr); + m_ptr = IntPtr.Zero; + } + + /// + /// Get ov_partial_shape + /// + /// return ov_partial_shape. + public ov_partial_shape get_partial_shape() + { + ov_partial_shape partial_shape = new ov_partial_shape(); + partial_shape.rank = rank.get_dimension(); + int l = Marshal.SizeOf(typeof(Ov.ov_dimension)); + IntPtr[] ds_ptr = new IntPtr[rank.get_max()]; + for (int i = 0; i < rank.get_max(); ++i) { + IntPtr ptr = Marshal.AllocHGlobal(l); + Marshal.StructureToPtr(dimensions[i], ptr, false); + ds_ptr[i] = ptr; + } + + IntPtr d_ptr = Marshal.AllocHGlobal((int)(l * rank.get_max())); + Marshal.Copy(ds_ptr, 0, d_ptr, (int)rank.get_max()); + partial_shape.dims = d_ptr; + return partial_shape; + } + /// + /// Get dimensions. + /// + /// Dimension[ + public Dimension[] get_dimensions() { + return dimensions; + } + + /// + /// Convert partial shape without dynamic data to a static shape. + /// + /// The shape. + public Shape to_shape() + { + IntPtr shape_ptr = IntPtr.Zero; + HandleException.handler( + NativeMethods.ov_partial_shape_to_shape(get_partial_shape(), shape_ptr)); + return new Shape(shape_ptr); + } + + /// + /// Check if this shape is static. + /// + /// A shape is considered static if it has static rank, and all dimensions of the shape + /// are static. + /// `true` if this shape is static, else `false`. + public bool is_static() { + return !is_dynamic(); + } + + /// + /// Check if this shape is dynamic. + /// + /// A shape is considered static if it has static rank, and all dimensions of the shape + /// are static. + /// `false` if this shape is static, else `true`. + public bool is_dynamic() { + return NativeMethods.ov_partial_shape_is_dynamic(get_partial_shape()); + } + + /// + /// Get partial shape string. + /// + /// + public string to_string() + { + string s = "Shape : {"; + if (rank.is_dynamic()) + { + s += "?"; + } + else + { + for (int i = 0; i < rank.get_max(); ++i) + { + if (dimensions[i].is_dynamic()) + { + s += "?,"; + } + else + { + s += dimensions[i].get_dimension().max.ToString() + ","; + } + } + } + s = s.Substring(0, s.Length - 1); + s += "}"; + return s; + } + } +} diff --git a/modules/csharp_api/csharp/core/remote_context.cs b/modules/csharp_api/csharp/core/remote_context.cs new file mode 100644 index 000000000..8fea322aa --- /dev/null +++ b/modules/csharp_api/csharp/core/remote_context.cs @@ -0,0 +1,37 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +namespace OpenVinoSharp +{ + /// + /// This class represents an abstraction for remote (non-CPU) accelerator device-specific inference context. + /// Such context represents a scope on the device within which compiled models and remote memory tensors can exist, + /// function, and exchange data. + /// + public class RemoteContext + { + /// + /// [private]RemoteContext class pointer. + /// + private IntPtr m_ptr = IntPtr.Zero; + /// + /// [public]RemoteContext class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + /// + /// Default Constructor + /// + /// RemoteContext pointer. + public RemoteContext(IntPtr ptr) + { + if (ptr == IntPtr.Zero) + { + System.Diagnostics.Debug.WriteLine("RemoteContext init error : ptr is null!"); + return; + } + Ptr = ptr; + } + } +} diff --git a/modules/csharp_api/csharp/core/shape.cs b/modules/csharp_api/csharp/core/shape.cs new file mode 100644 index 000000000..c25e3e859 --- /dev/null +++ b/modules/csharp_api/csharp/core/shape.cs @@ -0,0 +1,127 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Security.Cryptography.X509Certificates; +using System.Text; +using System.Threading.Tasks; + +using ov_shape = OpenVinoSharp.Ov.ov_shape; + +namespace OpenVinoSharp +{ + + /// + /// Shape for a tensor. + /// + /// ov_runtime_c#_api + public class Shape : List + { + /// + /// [struct] The shape ov_shape + /// + public ov_shape shape; + /// + /// [private]Shape class pointer. + /// + private IntPtr m_ptr = IntPtr.Zero; + /// + /// [public]Shape class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + /// + /// Constructs Shape from the initialized IntPtr. + /// + /// Initialized IntPtr + public Shape(IntPtr ptr) + { + if (ptr == IntPtr.Zero) + { + System.Diagnostics.Debug.WriteLine("Shape init error : ptr is null!"); + return; + } + this.m_ptr = ptr; + var temp = Marshal.PtrToStructure(ptr, typeof(ov_shape)); + shape = (ov_shape)temp; + long[] dims = shape.get_dims(); + for (int i = 0; i < shape.rank; ++i) + { + this.Add(dims[i]); + } + } + /// + /// Constructs Shape from the list. + /// + /// Initialized list + public Shape(List axis_lengths) + { + + for (int i = 0; i < axis_lengths.Count; ++i) + { + this.Add(axis_lengths[i]); + } + int l = Marshal.SizeOf(typeof(ov_shape)); + m_ptr = Marshal.AllocHGlobal(l); + HandleException.handler( + NativeMethods.ov_shape_create((long)this.Count, ref axis_lengths.ToArray()[0], m_ptr)); + var temp = Marshal.PtrToStructure(m_ptr, typeof(ov_shape)); + shape = (ov_shape)temp; + } + /// + /// Constructs Shape from the initialized array. + /// + /// Initialized array + public Shape(long[] axis_lengths) + { + + for (int i = 0; i < axis_lengths.Length; ++i) + { + this.Add(axis_lengths[i]); + } + int l = Marshal.SizeOf(typeof(ov_shape)); + m_ptr = Marshal.AllocHGlobal(l); + HandleException.handler( + NativeMethods.ov_shape_create((long)this.Count, ref axis_lengths[0], m_ptr)); + var temp = Marshal.PtrToStructure(m_ptr, typeof(ov_shape)); + shape = (ov_shape)temp; + } + /// + /// Shape's destructor + /// + ~Shape() + { + dispose(); + } + /// + /// Release unmanaged resources + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_core_free(m_ptr); + m_ptr = IntPtr.Zero; + } + /// + /// Convert shape to string. + /// + /// shape string + public string to_string() + { + if (this.Count < 1) + { + return "NULL"; + } + string s = "Shape : {"; + foreach(var i in this) + { + s += i.ToString() + ", "; + } + s = s.Substring(0, s.Length - 2); + s += "}"; + return s; + } + } +} diff --git a/modules/csharp_api/csharp/core/tensor.cs b/modules/csharp_api/csharp/core/tensor.cs new file mode 100644 index 000000000..e71b8d2fa --- /dev/null +++ b/modules/csharp_api/csharp/core/tensor.cs @@ -0,0 +1,313 @@ +using OpenVinoSharp.element; +using OpenVinoSharp.preprocess; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Security.Cryptography; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + /// + /// Tensor API holding host memory. + /// It can throw exceptions safely for the application, where it is properly handled. + /// + /// ov_runtime_c#_api + public class Tensor + { + /// + /// [private]Tensor class pointer. + /// + private IntPtr m_ptr = IntPtr.Zero; + /// + /// [public]Tensor class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + /// + /// Constructs Tensor from the initialized pointer. + /// + /// Tensor pointer. + public Tensor(IntPtr ptr) + { + if (ptr == IntPtr.Zero) + { + System.Diagnostics.Debug.WriteLine("Tensor init error : ptr is null!"); + return; + } + this.m_ptr = ptr; + } + + /// + /// Constructs Tensor using element type ,shape and image data. + /// + /// Tensor element type + /// Tensor shape + /// Image data + public Tensor(element.Type type, Shape shape, OvMat mat) + { + int l =mat.mat_data.Length; + IntPtr data = Marshal.AllocHGlobal(l); + Marshal.Copy(mat.mat_data, 0, data, (int)mat.mat_data_size); + HandleException.handler( + NativeMethods.ov_tensor_create_from_host_ptr + ((uint)type.get_type(), shape.shape, data, ref m_ptr)); + } + /// + /// Constructs Tensor using element type and shape. Wraps allocated host memory. + /// + /// Does not perform memory allocation internally. + /// Tensor element type + /// Tensor shape + /// Pointer to pre-allocated host memory + public Tensor(element.Type type, Shape shape, IntPtr host_ptr) + { + HandleException.handler( + NativeMethods.ov_tensor_create_from_host_ptr + ((uint)type.get_type(), shape.shape, host_ptr, ref m_ptr)); + } + + /// + /// Constructs Tensor using element type and shape. Allocate internal host storage using default allocator + /// + /// Tensor element type + /// Tensor shape + public Tensor(element.Type type, Shape shape) + { + HandleException.handler( + NativeMethods.ov_tensor_create + ((uint)type.get_type(), shape.shape, ref m_ptr)); + } + + /// + /// Default copy constructor + /// + /// other Tensor object + public Tensor(Tensor tensor) + { + HandleException.handler( + NativeMethods.ov_tensor_create_from_host_ptr + ((uint)tensor.get_element_type().get_type(), tensor.get_shape().shape, tensor.data(), ref m_ptr)); + } + + /// + /// Tensor's destructor + /// + ~Tensor() + { + dispose(); + } + /// + /// Release unmanaged resources + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_tensor_free(m_ptr); + m_ptr = IntPtr.Zero; + } + /// + /// Set new shape for tensor, deallocate/allocate if new total size is bigger than previous one. + /// + /// Memory allocation may happen + /// A new shape + public void set_shape(Shape shape) + { + HandleException.handler( + NativeMethods.ov_tensor_set_shape(m_ptr, shape.shape)); + } + + /// + /// Get tensor shape + /// + /// A tensor shape + public Shape get_shape() + { + int l = Marshal.SizeOf(typeof(Ov.ov_shape)); + IntPtr shape_ptr = Marshal.AllocHGlobal(l); + HandleException.handler( + NativeMethods.ov_tensor_get_shape(m_ptr, shape_ptr)); + + return new Shape(shape_ptr); + } + /// + /// Get tensor element type + /// + /// A tensor element type + public OvType get_element_type() + { + uint type = 100; + HandleException.handler( + NativeMethods.ov_tensor_get_element_type(m_ptr, out type)); + OvType t = new OvType((ElementType)type); + return t; + } + + /// + /// Returns the total number of elements (a product of all the dims or 1 for scalar). + /// + /// The total number of elements. + public ulong get_size() + { + ulong size = 0; + HandleException.handler( + NativeMethods.ov_tensor_get_size(m_ptr, ref size)); + return size; + } + + /// + /// Returns the size of the current Tensor in bytes. + /// + /// Tensor's size in bytes + public ulong get_byte_size() + { + ulong size = 0; + HandleException.handler( + NativeMethods.ov_tensor_get_byte_size(m_ptr, ref size)); + return size; + } + + /// + /// Copy tensor, destination tensor should have the same element type and shape + /// + /// Data type. + /// destination tensor + public void copy_to(Tensor dst) + { + ulong length = this.get_size(); + T[] data = this.get_data((int)length); + dst.set_data(data); + } + + /// + /// Provides an access to the underlaying host memory. + /// + /// A host pointer to tensor memory. + public IntPtr data() + { + IntPtr data_ptr = new IntPtr(); + HandleException.handler( + NativeMethods.ov_tensor_data(m_ptr, ref data_ptr)); + return data_ptr; + } + + /// + /// Load the specified type of data into the underlying host memory. + /// + /// data type + /// Data to be loaded. + public void set_data(T[] input_data) + { + IntPtr data_ptr = new IntPtr(); + HandleException.handler( + NativeMethods.ov_tensor_data(m_ptr, ref data_ptr)); + int length = input_data.Length; + + string t = typeof(T).ToString(); + if (t == "System.Byte") + { + float[] data = (float[])Convert.ChangeType(input_data, typeof(float[])); + Marshal.Copy(data, 0, data_ptr, length); + } + else if (t == "System.Int32") + { + int[] data = (int[])Convert.ChangeType(input_data, typeof(int[])); + Marshal.Copy(data, 0, data_ptr, length); + } + else if (t == "System.Int64") + { + long[] data = (long[])Convert.ChangeType(input_data, typeof(long[])); + Marshal.Copy(data, 0, data_ptr, length); + } + else if (t == "System.Int16") + { + short[] data = (short[])Convert.ChangeType(input_data, typeof(short[])); + Marshal.Copy(data, 0, data_ptr, length); + } + else if (t == "System.Single") + { + float[] data = (float[])Convert.ChangeType(input_data, typeof(float[])); + Marshal.Copy(data, 0, data_ptr, length); + } + else if (t == "System.Double") + { + double[] data = (double[])Convert.ChangeType(input_data, typeof(double[])); + Marshal.Copy(data, 0, data_ptr, length); + } + else + { + Console.WriteLine("Data format error, not supported. Only double, flaot, int, long, shaort and byte data formats are supported"); + } + } + + /// + /// Read data of the specified type from the underlying host memory. + /// + /// Type of data read. + /// The length of the read data. + /// Read data. + public T[] get_data(int length) + { + IntPtr data_ptr = new IntPtr(); + HandleException.handler( + NativeMethods.ov_tensor_data(m_ptr, ref data_ptr)); + string t = typeof(T).ToString(); + T[] result = new T[length]; + + if (t == "System.Byte") + { + byte[] data = new byte[length]; + Marshal.Copy(data_ptr, data, 0, length); + result = (T[])Convert.ChangeType(data, typeof(T[])); + return result; + } + else if (t == "System.Int32") + { + int[] data = new int[length]; + Marshal.Copy(data_ptr, data, 0, length); + result = (T[])Convert.ChangeType(data, typeof(T[])); + return result; + } + else if (t == "System.Int64") + { + long[] data = new long[length]; + Marshal.Copy(data_ptr, data, 0, length); + result = (T[])Convert.ChangeType(data, typeof(T[])); + return result; + } + else if (t == "System.Int16") + { + short[] data = new short[length]; + Marshal.Copy(data_ptr, data, 0, length); + result = (T[])Convert.ChangeType(data, typeof(T[])); + return result; + } + else if (t == "System.Single") + { + float[] data = new float[length]; + Marshal.Copy(data_ptr, data, 0, length); + result = (T[])Convert.ChangeType(data, typeof(T[])); + return result; + } + else if (t == "System.Double") + { + double[] data = new double[length]; + Marshal.Copy(data_ptr, data, 0, length); + result = (T[])Convert.ChangeType(data, typeof(T[])); + return result; + } + else + { + Console.WriteLine("Data format error, not supported. Only double, flaot, int, long, shaort and byte data formats are supported"); + return result; + } + + } + + } +} diff --git a/modules/csharp_api/csharp/exception/exception.cs b/modules/csharp_api/csharp/exception/exception.cs new file mode 100644 index 000000000..fcee37d34 --- /dev/null +++ b/modules/csharp_api/csharp/exception/exception.cs @@ -0,0 +1,46 @@ +using OpenCvSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + /// + /// The default exception to be thrown by OpenVINO + /// + [Serializable] + // ReSharper disable once InconsistentNaming + internal class OVException : Exception + { + /// + /// The numeric code for error status + /// + public ExceptionStatus status { get; set; } + + + /// + /// A description of the error + /// + public string err_msg { get; set; } + + + /// + /// Constructor + /// + /// The numeric code for error status + /// The source file name where error is encountered + /// A description of the error + /// The source file name where error is encountered + /// The line number in the source where error is encountered + public OVException(ExceptionStatus status, string err_msg) + : base(err_msg) + { + this.status = status; + this.err_msg = err_msg; + } + + } +} diff --git a/modules/csharp_api/csharp/exception/handle_exception.cs b/modules/csharp_api/csharp/exception/handle_exception.cs new file mode 100644 index 000000000..83eccefcc --- /dev/null +++ b/modules/csharp_api/csharp/exception/handle_exception.cs @@ -0,0 +1,236 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + /// + /// OpenVINO C API Return value anomaly detection handle + /// + static class HandleException + { + /// + /// Check if there are any abnormalities in the return value, and if so, return the + /// corresponding exceptions according to the abnormal value + /// + /// + public static void handler(ExceptionStatus status) { + if (ExceptionStatus.OK == status) { + return; + } + else if (ExceptionStatus.GENERAL_ERROR == status) + { + general_error(); + } + else if (ExceptionStatus.NOT_IMPLEMENTED == status) + { + not_implemented(); + } + else if (ExceptionStatus.NETWORK_NOT_LOADED == status) { + network_not_loaded(); + } + else if (ExceptionStatus.PARAMETER_MISMATCH == status) + { + parameter_mismatch(); + } + else if (ExceptionStatus.NOT_FOUND == status) + { + not_found(); + } + else if (ExceptionStatus.OUT_OF_BOUNDS == status) + { + out_of_bounds(); + } + else if (ExceptionStatus.UNEXPECTED == status) + { + unexpection(); + } + else if (ExceptionStatus.REQUEST_BUSY == status) + { + request_busy(); + } else if (ExceptionStatus.RESULT_NOT_READY == status) { + result_not_ready(); + } + else if (ExceptionStatus.NOT_ALLOCATED == status) + { + not_allocated(); + } + else if (ExceptionStatus.INFER_NOT_STARTED == status) + { + infer_not_started(); + } + else if (ExceptionStatus.NETWORK_NOT_READ == status) + { + netword_not_read(); + } + else if (ExceptionStatus.INFER_CANCELLED == status) + { + infer_cancelled(); + } + else if (ExceptionStatus.INVALID_C_PARAM == status) + { + invalid_c_param(); + } + else if (ExceptionStatus.UNKNOWN_C_ERROR == status) + { + unknown_c_error(); + } + else if (ExceptionStatus.NOT_IMPLEMENT_C_METHOD == status) + { + not_implement_c_method(); + } + else if (ExceptionStatus.UNKNOW_EXCEPTION == status) + { + unknown_exception(); + } + + } + /// + /// Throw GENERAL_ERROR OpenVINOException. + /// + /// general error! + private static void general_error() { + throw new OVException(ExceptionStatus.GENERAL_ERROR, "general error!"); + } + /// + /// Throw NOT_IMPLEMENTED OpenVINOException. + /// + /// not implemented! + private static void not_implemented() + { + throw new OVException(ExceptionStatus.NOT_IMPLEMENTED, "not implemented!"); + } + + /// + /// Throw NETWORK_NOT_LOADED OpenVINOException. + /// + /// network not loaded! + private static void network_not_loaded() + { + throw new OVException(ExceptionStatus.NETWORK_NOT_LOADED, "network not loaded!"); + } + + + /// + /// Throw PARAMETER_MISMATCH OpenVINOException. + /// + /// parameter mismatch! + private static void parameter_mismatch() + { + throw new OVException(ExceptionStatus.PARAMETER_MISMATCH, "parameter mismatch!"); + } + + /// + /// Throw NOT_FOUND OpenVINOException. + /// + /// not found! + private static void not_found() + { + throw new OVException(ExceptionStatus.NOT_FOUND, "not found!"); + } + + /// + /// Throw OUT_OF_BOUNDS OpenVINOException. + /// + /// out of bounds! + private static void out_of_bounds() + { + throw new OVException(ExceptionStatus.OUT_OF_BOUNDS, "out of bounds!"); + } + + + /// + /// Throw UNEXPECTED OpenVINOException. + /// + /// unexpection! + private static void unexpection() + { + throw new OVException(ExceptionStatus.UNEXPECTED, "unexpection!"); + } + + + + /// + /// Throw REQUEST_BUSY OpenVINOException. + /// + /// request busy! + private static void request_busy() + { + throw new OVException(ExceptionStatus.REQUEST_BUSY, "request busy!"); + } + /// + /// Throw RESULT_NOT_READY OpenVINOException. + /// + /// result not ready! + private static void result_not_ready() + { + throw new OVException(ExceptionStatus.RESULT_NOT_READY, "result not ready!"); + } + /// + /// Throw OpenVINOException. + /// + /// not allocated! + private static void not_allocated() + { + throw new OVException(ExceptionStatus.NOT_ALLOCATED, "not allocated!"); + } + /// + /// Throw INFER_NOT_STARTED OpenVINOException. + /// + /// infer not started! + private static void infer_not_started() + { + throw new OVException(ExceptionStatus.INFER_NOT_STARTED, "infer not started!"); + } + /// + /// Throw NETWORK_NOT_READ OpenVINOException. + /// + /// netword not read! + private static void netword_not_read() + { + throw new OVException(ExceptionStatus.NETWORK_NOT_READ, "netword not read!"); + } + /// + /// Throw INFER_CANCELLED OpenVINOException. + /// + /// infer cancelled! + private static void infer_cancelled() + { + throw new OVException(ExceptionStatus.INFER_CANCELLED, "infer cancelled!"); + } + /// + /// Throw INVALID_C_PARAM OpenVINOException. + /// + /// invalid c param! + private static void invalid_c_param() + { + throw new OVException(ExceptionStatus.INVALID_C_PARAM, "invalid c param!"); + } + /// + /// Throw UNKNOWN_C_ERROR OpenVINOException. + /// + /// unknown c error! + private static void unknown_c_error() + { + throw new OVException(ExceptionStatus.UNKNOWN_C_ERROR, "unknown c error!"); + } + /// + /// Throw NOT_IMPLEMENT_C_METHOD OpenVINOException. + /// + /// not implement c method! + private static void not_implement_c_method() + { + throw new OVException(ExceptionStatus.NOT_IMPLEMENT_C_METHOD, "not implement c method!"); + } + /// + /// Throw UNKNOW_EXCEPTION OpenVINOException. + /// + /// unknown exception! + private static void unknown_exception() + { + throw new OVException(ExceptionStatus.UNKNOW_EXCEPTION, "unknown exception!"); + } + } +} diff --git a/modules/csharp_api/csharp/model/Yolov8.cs b/modules/csharp_api/csharp/model/Yolov8.cs new file mode 100644 index 000000000..9cefd895b --- /dev/null +++ b/modules/csharp_api/csharp/model/Yolov8.cs @@ -0,0 +1,665 @@ +using OpenCvSharp; +using OpenCvSharp.Dnn; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + + +namespace OpenVinoSharp.model.Yolov8 +{ + /// + /// Key point data + /// + public class Result + { + /// + /// Get Result Length + /// + public int length + { + get + { + return scores.Count; + } + } + + /// + /// Identification result class + /// + public List classes = new List(); + /// + /// Confidence value + /// + public List scores = new List(); + /// + /// Prediction box + /// + public List rects = new List(); + /// + /// Split Region + /// + public List masks = new List(); + /// + /// Key points of the human body + /// + public List poses = new List(); + + /// + /// object detection + /// + /// Predictiveness scores + /// Identification box + /// Identification class + public void add(float score, Rect rect, int cla) + { + scores.Add(score); + rects.Add(rect); + classes.Add(cla); + } + /// + /// 物体分割 + /// + /// Predictiveness scores + /// Identification box + /// Identification class + /// Semantic segmentation results + public void add(float score, Rect rect, int cla, Mat mask) + { + scores.Add(score); + rects.Add(rect); + classes.Add(cla); + masks.Add(mask); + } + /// + /// Key point prediction + /// + /// Predictiveness scores + /// Identification box + /// Key point data + public void add(float score, Rect rect, PoseData pose) + { + scores.Add(score); + rects.Add(rect); + poses.Add(pose); + } + } + /// + /// Human Key Point Data + /// + public struct PoseData + { + /// + /// Key point prediction score + /// + public float[] score; + /// + /// Key point prediction results. + /// + public List point; + /// + /// Default Constructor + /// + /// Key point prediction results. + /// Image scaling ratio. + public PoseData(float[] data, float[] scales) + { + score = new float[data.Length]; + point = new List(); + for (int i = 0; i < 17; i++) + { + Point p = new Point((int)(data[3 * i] * scales[0]), (int)(data[3 * i + 1] * scales[1])); + this.point.Add(p); + this.score[i] = data[3 * i + 2]; + } + } + /// + /// Convert PoseData to string. + /// + /// PoseData string. + public string to_string() + { + string[] point_str = new string[] { "Nose", "Left Eye", "Right Eye", "Left Ear", "Right Ear", + "Left Shoulder", "Right Shoulder", "Left Elbow", "Right Elbow", "Left Wrist", "Right Wrist", + "Left Hip", "Right Hip", "Left Knee", "Right Knee", "Left Ankle", "Right Ankle" }; + string ss = ""; + for (int i = 0; i < point.Count; i++) + { + ss += point_str[i] + ": (" + point[i].X.ToString("0") + " ," + point[i].Y.ToString("0") + " ," + score[i].ToString("0.00") + ") "; + } + return ss; + } + } + + /// + /// Yolov8 model inference result processing method. + /// + public class ResultProcess + { + /// + /// Identify Result Types + /// + public string[] class_names; + /// + /// Image information scaling ratio h, scaling ratio h, height, width + /// + public float[] scales; + /// + /// Confidence threshold + /// + public float score_threshold; + /// + /// Non maximum suppression threshold + /// + public float nms_threshold; + /// + /// Number of categories + /// + public int categ_nums = 0; + + + /// + /// SegmentationResult processing class construction + /// + /// scaling ratio h, scaling ratio h, height, width + /// score threshold + /// nms threshold + public ResultProcess(float[] scales, int categ_nums, float score_threshold = 0.3f, float nms_threshold = 0.5f) + { + this.scales = scales; + this.score_threshold = score_threshold; + this.nms_threshold = nms_threshold; + this.categ_nums = categ_nums; + } + + /// + /// Read local recognition result type file to memory + /// + /// file path + /// + /// Only the. txt file format is supported, and the content format for this category is as follows: + /// sea lion + /// Scottish deerhound + /// tiger cat + /// ··· + /// + public void read_class_names(string path) + { + + List str = new List(); + StreamReader sr = new StreamReader(path); + string line; + while ((line = sr.ReadLine()) != null) + { + str.Add(line); + } + + class_names = str.ToArray(); + } + + /// + /// Result process + /// + /// Model prediction output + /// Model recognition results + public KeyValuePair[] process_cls_result(float[] result) + { + List new_list = new List { }; + for (int i = 0; i < result.Length; i++) + { + new_list.Add(new float[] { (float)result[i], i }); + } + new_list.Sort((a, b) => b[0].CompareTo(a[0])); + + KeyValuePair[] cls = new KeyValuePair[10]; + for (int i = 0; i < 10; ++i) + { + cls[i] = new KeyValuePair((int)new_list[i][1], new_list[i][0]); + } + return cls; + } + + /// + /// Result drawing + /// + /// recognition result + /// source image + /// result image + public Mat draw_cls_result(KeyValuePair result, Mat image) + { + Cv2.PutText(image, class_names[result.Key] + ": " + result.Value.ToString("0.00"), + new Point(25, 30), HersheyFonts.HersheySimplex, 1, new Scalar(0, 0, 255), 2); + return image; + } + + + + /// + /// Result process + /// + /// Model prediction output + /// Model recognition results + public Result process_det_result(float[] result) + { + Mat result_data = new Mat(4 + categ_nums, 8400, MatType.CV_32F, result); + result_data = result_data.T(); + + // Storage results list + List position_boxes = new List(); + List class_ids = new List(); + List confidences = new List(); + // Preprocessing output results + for (int i = 0; i < result_data.Rows; i++) + { + Mat classes_scores = result_data.Row(i).ColRange(4, 4 + categ_nums);//GetArray(i, 5, classes_scores); + Point max_classId_point, min_classId_point; + double max_score, min_score; + // Obtain the maximum value and its position in a set of data + Cv2.MinMaxLoc(classes_scores, out min_score, out max_score, + out min_classId_point, out max_classId_point); + // Confidence level between 0 ~ 1 + // Obtain identification box information + if (max_score > 0.25) + { + float cx = result_data.At(i, 0); + float cy = result_data.At(i, 1); + float ow = result_data.At(i, 2); + float oh = result_data.At(i, 3); + int x = (int)((cx - 0.5 * ow) * this.scales[0]); + int y = (int)((cy - 0.5 * oh) * this.scales[1]); + int width = (int)(ow * this.scales[0]); + int height = (int)(oh * this.scales[1]); + Rect box = new Rect(); + box.X = x; + box.Y = y; + box.Width = width; + box.Height = height; + + position_boxes.Add(box); + class_ids.Add(max_classId_point.X); + confidences.Add((float)max_score); + } + } + + // NMS non maximum suppression + int[] indexes = new int[position_boxes.Count]; + CvDnn.NMSBoxes(position_boxes, confidences, this.score_threshold, this.nms_threshold, out indexes); + + Result re_result = new Result(); + // + for (int i = 0; i < indexes.Length; i++) + { + int index = indexes[i]; + re_result.add(confidences[index], position_boxes[index], class_ids[index]); + } + return re_result; + } + + /// + /// Result drawing + /// + /// recognition result + /// image + /// + public Mat draw_det_result(Result result, Mat image) + { + + // Draw recognition results on the image + for (int i = 0; i < result.length; i++) + { + //Console.WriteLine(result.rects[i]); + Cv2.Rectangle(image, result.rects[i], new Scalar(0, 0, 255), 2, LineTypes.Link8); + Cv2.Rectangle(image, new Point(result.rects[i].TopLeft.X, result.rects[i].TopLeft.Y + 30), + new Point(result.rects[i].BottomRight.X, result.rects[i].TopLeft.Y), new Scalar(0, 255, 255), -1); + Cv2.PutText(image, class_names[ result.classes[i]] + "-" + result.scores[i].ToString("0.00"), + new Point(result.rects[i].X, result.rects[i].Y + 25), + HersheyFonts.HersheySimplex, 0.8, new Scalar(0, 0, 0), 2); + } + return image; + } + + /// + /// sigmoid + /// + /// + /// + private float sigmoid(float a) + { + float b = 1.0f / (1.0f + (float)Math.Exp(-a)); + return b; + } + + /// + /// Result process + /// + /// detection output + /// segmentation output + /// + public Result process_seg_result(float[] detect, float[] proto) + { + Mat detect_data = new Mat(36 + categ_nums, 8400, MatType.CV_32F, detect); + Mat proto_data = new Mat(32, 25600, MatType.CV_32F, proto); + detect_data = detect_data.T(); + List position_boxes = new List(); + List class_ids = new List(); + List confidences = new List(); + List masks = new List(); + for (int i = 0; i < detect_data.Rows; i++) + { + + Mat classes_scores = detect_data.Row(i).ColRange(4, 4 + categ_nums);//GetArray(i, 5, classes_scores); + Point max_classId_point, min_classId_point; + double max_score, min_score; + Cv2.MinMaxLoc(classes_scores, out min_score, out max_score, + out min_classId_point, out max_classId_point); + + if (max_score > 0.25) + { + //Console.WriteLine(max_score); + + Mat mask = detect_data.Row(i).ColRange(4 + categ_nums, categ_nums + 36); + + float cx = detect_data.At(i, 0); + float cy = detect_data.At(i, 1); + float ow = detect_data.At(i, 2); + float oh = detect_data.At(i, 3); + int x = (int)((cx - 0.5 * ow) * this.scales[0]); + int y = (int)((cy - 0.5 * oh) * this.scales[1]); + int width = (int)(ow * this.scales[0]); + int height = (int)(oh * this.scales[1]); + Rect box = new Rect(); + box.X = x; + box.Y = y; + box.Width = width; + box.Height = height; + + position_boxes.Add(box); + class_ids.Add(max_classId_point.X); + confidences.Add((float)max_score); + masks.Add(mask); + } + } + + + int[] indexes = new int[position_boxes.Count]; + CvDnn.NMSBoxes(position_boxes, confidences, this.score_threshold, this.nms_threshold, out indexes); + + Result re_result = new Result(); // Output Result Class + // RGB images with colors + Mat rgb_mask = Mat.Zeros(new Size((int)scales[3], (int)scales[2]), MatType.CV_8UC3); + Random rd = new Random(); // Generate Random Numbers + for (int i = 0; i < indexes.Length; i++) + { + int index = indexes[i]; + // Division scope + Rect box = position_boxes[index]; + int box_x1 = Math.Max(0, box.X); + int box_y1 = Math.Max(0, box.Y); + int box_x2 = Math.Max(0, box.BottomRight.X); + int box_y2 = Math.Max(0, box.BottomRight.Y); + + // Segmentation results + Mat original_mask = masks[index] * proto_data; + for (int col = 0; col < original_mask.Cols; col++) + { + original_mask.At(0, col) = sigmoid(original_mask.At(0, col)); + } + // 1x25600 -> 160x160 Convert to original size + Mat reshape_mask = original_mask.Reshape(1, 160); + + //Console.WriteLine("m1.size = {0}", m1.Size()); + + // Split size after scaling + int mx1 = Math.Max(0, (int)((box_x1 / scales[0]) * 0.25)); + int mx2 = Math.Max(0, (int)((box_x2 / scales[0]) * 0.25)); + int my1 = Math.Max(0, (int)((box_y1 / scales[1]) * 0.25)); + int my2 = Math.Max(0, (int)((box_y2 / scales[1]) * 0.25)); + // Crop Split Region + Mat mask_roi = new Mat(reshape_mask, new OpenCvSharp.Range(my1, my2), new OpenCvSharp.Range(mx1, mx2)); + // Convert the segmented area to the actual size of the image + Mat actual_maskm = new Mat(); + Cv2.Resize(mask_roi, actual_maskm, new Size(box_x2 - box_x1, box_y2 - box_y1)); + // Binary segmentation region + for (int r = 0; r < actual_maskm.Rows; r++) + { + for (int c = 0; c < actual_maskm.Cols; c++) + { + float pv = actual_maskm.At(r, c); + if (pv > 0.5) + { + actual_maskm.At(r, c) = 1.0f; + } + else + { + actual_maskm.At(r, c) = 0.0f; + } + } + } + + // 预测 + Mat bin_mask = new Mat(); + actual_maskm = actual_maskm * 200; + actual_maskm.ConvertTo(bin_mask, MatType.CV_8UC1); + if ((box_y1 + bin_mask.Rows) >= scales[2]) + { + box_y2 = (int)scales[2] - 1; + } + if ((box_x1 + bin_mask.Cols) >= scales[3]) + { + box_x2 = (int)scales[3] - 1; + } + // Obtain segmentation area + Mat mask = Mat.Zeros(new Size((int)scales[3], (int)scales[2]), MatType.CV_8UC1); + bin_mask = new Mat(bin_mask, new OpenCvSharp.Range(0, box_y2 - box_y1), new OpenCvSharp.Range(0, box_x2 - box_x1)); + Rect roi = new Rect(box_x1, box_y1, box_x2 - box_x1, box_y2 - box_y1); + bin_mask.CopyTo(new Mat(mask, roi)); + // Color segmentation area + Cv2.Add(rgb_mask, new Scalar(rd.Next(0, 255), rd.Next(0, 255), rd.Next(0, 255)), rgb_mask, mask); + + re_result.add(confidences[index], position_boxes[index], class_ids[index], rgb_mask.Clone()); + + } + + return re_result; + } + + /// + /// Result drawing + /// + /// recognition result + /// image + /// + public Mat draw_seg_result(Result result, Mat image) + { + Mat masked_img = new Mat(); + // Draw recognition results on the image + for (int i = 0; i < result.length; i++) + { + Cv2.Rectangle(image, result.rects[i], new Scalar(0, 0, 255), 2, LineTypes.Link8); + Cv2.Rectangle(image, new Point(result.rects[i].TopLeft.X, result.rects[i].TopLeft.Y + 30), + new Point(result.rects[i].BottomRight.X, result.rects[i].TopLeft.Y), new Scalar(0, 255, 255), -1); + Cv2.PutText(image, class_names[result.classes[i]] + "-" + result.scores[i].ToString("0.00"), + new Point(result.rects[i].X, result.rects[i].Y + 25), + HersheyFonts.HersheySimplex, 0.8, new Scalar(0, 0, 0), 2); + Cv2.AddWeighted(image, 0.5, result.masks[i], 0.5, 0, masked_img); + } + return masked_img; + } + + /// + /// Result process + /// + /// Model prediction output + /// Model recognition results + public Result process_pose_result(float[] result) + { + Mat result_data = new Mat(56, 8400, MatType.CV_32F, result); + result_data = result_data.T(); + List position_boxes = new List(); + List confidences = new List(); + List pose_datas = new List(); + for (int i = 0; i < result_data.Rows; i++) + { + if (result_data.At(i, 4) > 0.25) + { + //Console.WriteLine(max_score); + float cx = result_data.At(i, 0); + float cy = result_data.At(i, 1); + float ow = result_data.At(i, 2); + float oh = result_data.At(i, 3); + int x = (int)((cx - 0.5 * ow) * this.scales[0]); + int y = (int)((cy - 0.5 * oh) * this.scales[1]); + int width = (int)(ow * this.scales[0]); + int height = (int)(oh * this.scales[1]); + Rect box = new Rect(); + box.X = x; + box.Y = y; + box.Width = width; + box.Height = height; + Mat pose_mat = result_data.Row(i).ColRange(5, 56); + float[] pose_data = new float[51]; + pose_mat.GetArray(out pose_data); + PoseData pose = new PoseData(pose_data, this.scales); + + position_boxes.Add(box); + + confidences.Add((float)result_data.At(i, 4)); + pose_datas.Add(pose); + } + } + + int[] indexes = new int[position_boxes.Count]; + CvDnn.NMSBoxes(position_boxes, confidences, this.score_threshold, this.nms_threshold, out indexes); + + Result re_result = new Result(); + for (int i = 0; i < indexes.Length; i++) + { + int index = indexes[i]; + re_result.add(confidences[index], position_boxes[index], pose_datas[index]); + //Console.WriteLine("rect: {0}, score: {1}", position_boxes[index], confidences[index]); + } + return re_result; + + } + /// + /// Result drawing + /// + /// recognition result + /// image + /// + public Mat draw_pose_result(Result result, Mat image, double visual_thresh) + { + + // 将识别结果绘制到图片上 + for (int i = 0; i < result.length; i++) + { + Cv2.Rectangle(image, result.rects[i], new Scalar(0, 0, 255), 2, LineTypes.Link8); + + draw_poses(result.poses[i], ref image, visual_thresh); + } + return image; + } + /// + /// Key point result drawing + /// + /// Key point data + /// image + public void draw_poses(PoseData pose, ref Mat image, double visual_thresh) + { + // Connection point relationship + int[,] edgs = new int[17, 2] { { 0, 1 }, { 0, 2}, {1, 3}, {2, 4}, {3, 5}, {4, 6}, {5, 7}, {6, 8}, + {7, 9}, {8, 10}, {5, 11}, {6, 12}, {11, 13}, {12, 14},{13, 15 }, {14, 16 }, {11, 12 } }; + // Color Library + Scalar[] colors = new Scalar[18] { new Scalar(255, 0, 0), new Scalar(255, 85, 0), new Scalar(255, 170, 0), + new Scalar(255, 255, 0), new Scalar(170, 255, 0), new Scalar(85, 255, 0), new Scalar(0, 255, 0), + new Scalar(0, 255, 85), new Scalar(0, 255, 170), new Scalar(0, 255, 255), new Scalar(0, 170, 255), + new Scalar(0, 85, 255), new Scalar(0, 0, 255), new Scalar(85, 0, 255), new Scalar(170, 0, 255), + new Scalar(255, 0, 255), new Scalar(255, 0, 170), new Scalar(255, 0, 85) }; + // Draw Keys + for (int p = 0; p < 17; p++) + { + if (pose.score[p] < visual_thresh) + { + continue; + } + + Cv2.Circle(image, pose.point[p], 2, colors[p], -1); + //Console.WriteLine(pose.point[p]); + } + // draw + for (int p = 0; p < 17; p++) + { + if (pose.score[edgs[p, 0]] < visual_thresh || pose.score[edgs[p, 1]] < visual_thresh) + { + continue; + } + + float[] point_x = new float[] { pose.point[edgs[p, 0]].X, pose.point[edgs[p, 1]].X }; + float[] point_y = new float[] { pose.point[edgs[p, 0]].Y, pose.point[edgs[p, 1]].Y }; + + Point center_point = new Point((int)((point_x[0] + point_x[1]) / 2), (int)((point_y[0] + point_y[1]) / 2)); + double length = Math.Sqrt(Math.Pow((double)(point_x[0] - point_x[1]), 2.0) + Math.Pow((double)(point_y[0] - point_y[1]), 2.0)); + int stick_width = 2; + Size axis = new Size(length / 2, stick_width); + double angle = (Math.Atan2((double)(point_y[0] - point_y[1]), (double)(point_x[0] - point_x[1]))) * 180 / Math.PI; + Point[] polygon = Cv2.Ellipse2Poly(center_point, axis, (int)angle, 0, 360, 1); + Cv2.FillConvexPoly(image, polygon, colors[p]); + + } + } + /// + /// Print and output image classification results + /// + /// classification results + public void print_result(KeyValuePair[] result) + { + Console.WriteLine("\n Classification Top 10 result : \n"); + Console.WriteLine("classid probability"); + Console.WriteLine("------- -----------"); + for (int i = 0; i < 10; ++i) + { + Console.WriteLine("{0} {1}", result[i].Key.ToString("0"), result[i].Value.ToString("0.000000")); + } + } + /// + /// Print out image prediction results + /// + /// prediction results + public void print_result(Result result) + { + if (result.poses.Count != 0) + { + Console.WriteLine("\n Classification result : \n"); + for (int i = 0; i < result.length; ++i) + { + string ss = (i + 1).ToString() + ": 1 " + result.scores[i].ToString("0.00") + " " + result.rects[i].ToString() + +" " + result.poses[i].to_string(); + Console.WriteLine(ss); + } + return; + } + + if (result.masks.Count != 0) + { + Console.WriteLine("\n Segmentation result : \n"); + for (int i = 0; i < result.length; ++i) + { + string ss = (i + 1).ToString() + ": " + result.classes[i]+ "\t" + result.scores[i].ToString("0.00") + " " + result.rects[i].ToString(); + Console.WriteLine(ss); + } + return; + } + Console.WriteLine("\n Detection result : \n"); + for (int i = 0; i < result.length; ++i) + { + string ss = (i + 1).ToString() + ": " + result.classes[i] + "\t" + result.scores[i].ToString("0.00") + " " + result.rects[i].ToString(); + Console.WriteLine(ss); + } + + } + + }; + +} diff --git a/modules/csharp_api/csharp/native_methods/ov_base.cs b/modules/csharp_api/csharp/native_methods/ov_base.cs new file mode 100644 index 000000000..80e115b4e --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_base.cs @@ -0,0 +1,16 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + /// + /// Introducing C API. + /// + public partial class NativeMethods + { + private const string dll_extern = "./openvino2023.0/openvino_c.dll"; + } +} diff --git a/modules/csharp_api/csharp/native_methods/ov_common.cs b/modules/csharp_api/csharp/native_methods/ov_common.cs new file mode 100644 index 000000000..ae342c292 --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_common.cs @@ -0,0 +1,30 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + public partial class NativeMethods + { + /// + /// Print the error info. + /// + /// a status code. + /// error info. + [DllImport(dll_extern, EntryPoint = "ov_get_error_info", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static string ov_get_error_info(int status); + + /// + /// free char + /// + /// The pointer to the char to free. + [DllImport(dll_extern, EntryPoint = "ov_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_free(ref char content); + + } +} diff --git a/modules/csharp_api/csharp/native_methods/ov_compiled_model.cs b/modules/csharp_api/csharp/native_methods/ov_compiled_model.cs new file mode 100644 index 000000000..d10278119 --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_compiled_model.cs @@ -0,0 +1,202 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + public partial class NativeMethods + { + /// + /// Get the input size of ov_compiled_model_t. + /// + /// A pointer to the ov_compiled_model_t. + /// the compiled_model's input size. + /// + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_inputs_size", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_inputs_size( + IntPtr compiled_model, ref ulong size); + + /// + /// Get the single const input port of ov_compiled_model_t, which only support single input model. + /// + /// A pointer to the ov_compiled_model_t. + /// A pointer to the ov_output_const_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_input", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_input( + IntPtr compiled_model, ref IntPtr input_port); + + /// + /// Get a const input port of ov_compiled_model_t by port index. + /// + /// A pointer to the ov_compiled_model_t. + /// input index. + /// A pointer to the ov_output_const_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_input_by_index", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_input_by_index( + IntPtr compiled_model, + ulong index, + ref IntPtr input_port); + + /// + /// Get a const input port of ov_compiled_model_t by name. + /// + /// A pointer to the ov_compiled_model_t. + /// nput tensor name (char *). + /// A pointer to the ov_output_const_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_input_by_name", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_input_by_name( + IntPtr compiled_model, + ref sbyte name, + ref IntPtr input_port); + + /// + /// Get the output size of ov_compiled_model_t. + /// + /// A pointer to the ov_compiled_model_t. + /// the compiled_model's output size. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_outputs_size", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_outputs_size( + IntPtr compiled_model, + ref ulong size); + + /// + /// Get the single const output port of ov_compiled_model_t, which only support single output model. + /// + /// A pointer to the ov_compiled_model_t. + /// A pointer to the ov_output_const_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_output", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_output( + IntPtr compiled_model, ref IntPtr output_port); + + + /// + /// Get a const output port of ov_compiled_model_t by port index. + /// + /// A pointer to the ov_compiled_model_t. + /// input index. + /// A pointer to the ov_output_const_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_output_by_index", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_output_by_index( + IntPtr compiled_model, + ulong index, + ref IntPtr output_port); + + + /// + /// Get a const output port of ov_compiled_model_t by name. + /// + /// A pointer to the ov_compiled_model_t. + /// tensor name (char *). + /// A pointer to the ov_output_const_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_output_by_name", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_output_by_name( + IntPtr compiled_model, + ref sbyte name, + ref IntPtr output_port); + + /// + /// Gets runtime model information from a device. + /// + /// A pointer to the ov_compiled_model_t. + /// A pointer to the ov_model_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_get_runtime_model", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_get_runtime_model( + IntPtr compiled_model, + ref IntPtr model); + + /// + /// Creates an inference request object used to infer the compiled model. + /// + /// A pointer to the ov_compiled_model_t. + /// A pointer to the ov_infer_request_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_create_infer_request", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_create_infer_request( + IntPtr compiled_model, + ref IntPtr infer_request); + + /// + /// Sets properties for a device, acceptable keys can be found in ov_property_key_xxx. + /// + /// A pointer to the ov_compiled_model_t. + /// The property key string. + /// The property value string. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_set_property", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_set_property( + IntPtr compiled_model, + IntPtr property_key, + IntPtr property_value); + + /// + /// Gets properties for current compiled model. + /// + /// A pointer to the ov_compiled_model_t. + /// Property key. + /// A pointer to property value. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_get_property", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_get_property( + IntPtr compiled_model, + ref sbyte property_key, + ref IntPtr property_value); + + /// + /// Exports the current compiled model to an output stream `std::ostream`. + /// The exported model can also be imported via the ov::Core::import_model method. + /// + /// A pointer to the ov_compiled_model_t. + /// Path to the file. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_export_model", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_export_model( + IntPtr compiled_model, + ref sbyte export_model_path); + + /// + /// Release the memory allocated by ov_compiled_model_t. + /// + /// A pointer to the ov_compiled_model_t. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_compiled_model_free(IntPtr compiled_model); + + /// + /// Returns pointer to device-specific shared context on a remote accelerator + /// device that was used to create this CompiledModel. + /// + /// A pointer to the ov_compiled_model_t. + /// Return context. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_compiled_model_get_context", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_compiled_model_get_context( + IntPtr compiled_model, + ref IntPtr context); + + } +} diff --git a/modules/csharp_api/csharp/native_methods/ov_core.cs b/modules/csharp_api/csharp/native_methods/ov_core.cs new file mode 100644 index 000000000..65e039a09 --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_core.cs @@ -0,0 +1,332 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + public partial class NativeMethods + { + + /// + /// Get version of OpenVINO. + /// + /// a pointer to the version + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_get_openvino_version( + IntPtr version); + + /// + /// Release the memory allocated by ov_version_t. + /// + /// A pointer to the ov_version_t to free memory. + [DllImport(dll_extern, EntryPoint = "ov_version_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_version_free( + IntPtr version); + + /// + /// Constructs OpenVINO Core instance by default. + /// See RegisterPlugins for more details. + /// + /// A pointer to the newly created ov_core_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_core_create", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_create( + ref IntPtr core); + + /// + /// Constructs OpenVINO Core instance using XML configuration file with devices description. + /// See RegisterPlugins for more details. + /// + /// A path to .xml file with devices to load from. + /// If XML configuration file is not specified, then default plugin.xml file will be used. + /// A pointer to the newly created ov_core_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_core_create_with_config", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_create_with_config( + string xml_config_file, + ref IntPtr core); + + /// + /// Release the memory allocated by ov_core_t. + /// + /// A pointer to the ov_core_t to free memory. + [DllImport(dll_extern, EntryPoint = "ov_core_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_core_free( + IntPtr core); + + /// + /// Reads models from IR / ONNX / PDPD / TF / TFLite formats. + /// + /// A pointer to the ie_core_t instance. + /// Path to a model. + /// Path to a data file. + /// A pointer to the newly created model. + /// Status code of the operation: OK(0) for success. + /// + /// + /// For IR format (*.bin): + /// if `bin_path` is empty, will try to read a bin file with the same name as xml and + /// if the bin file with the same name is not found, will load IR without weights. + /// For the following file formats the `bin_path` parameter is not used: + /// + /// ONNX format (*.onnx) + /// PDPD(*.pdmodel) + /// TF(*.pb) + /// TFLite(*.tflite) + /// + [DllImport(dll_extern, EntryPoint = "ov_core_read_model_unicode", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_read_model_unicode( + IntPtr core, + string model_path, + string bin_path, + ref IntPtr model); + + /// + /// Reads models from IR / ONNX / PDPD / TF / TFLite formats. + /// + /// A pointer to the ie_core_t instance. + /// Path to a model. + /// Path to a data file. + /// A pointer to the newly created model. + /// Status code of the operation: OK(0) for success. + /// + /// + /// For IR format (*.bin): + /// if `bin_path` is empty, will try to read a bin file with the same name as xml and + /// if the bin file with the same name is not found, will load IR without weights. + /// For the following file formats the `bin_path` parameter is not used: + /// + /// ONNX format (*.onnx) + /// PDPD(*.pdmodel) + /// TF(*.pb) + /// TFLite(*.tflite) + /// + [DllImport(dll_extern, EntryPoint = "ov_core_read_model", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_read_model( + IntPtr core, + ref sbyte model_path, + ref sbyte bin_path, + ref IntPtr model); + + /// + /// Reads models from IR / ONNX / PDPD / TF / TFLite formats. + /// + /// A pointer to the ie_core_t instance. + /// Path to a model. + /// Shared pointer to a constant tensor with weights. + /// A pointer to the newly created model. + /// + /// Reading ONNX / PDPD / TF / TFLite models does not support loading weights + /// from the @p weights tensors. + /// + /// Created model object shares the weights with the @p weights object. + /// Thus, do not create @p weights on temporary data that can be freed later, + /// since the model constant data will point to an invalid memory. + /// + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_core_read_model_from_memory", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_read_model_from_memory(IntPtr core, + ref sbyte model_path, + IntPtr weights, + ref IntPtr model); + + /// + /// Creates a compiled model from a source model object. Users can create + /// as many compiled models as they need and use them simultaneously + /// (up to the limitation of the hardware resources). + /// + /// A pointer to the ie_core_t instance. + /// Model object acquired from Core::read_model. + /// Name of a device to load a model to. + /// How many properties args will be passed, + /// each property contains 2 args: key and value. + /// A pointer to the newly created compiled_model. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_core_compile_model", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_compile_model( + IntPtr core, + IntPtr model, + ref sbyte device_name, + ulong property_args_size, + ref IntPtr compiled_model); + + /// + /// Reads a model and creates a compiled model from the IR/ONNX/PDPD file. + /// This can be more efficient than using the ov_core_read_model_from_XXX + ov_core_compile_model flow, + /// especially for cases when caching is enabled and a cached model is available. + /// + /// A pointer to the ie_core_t instance. + /// Path to a model. + /// Name of a device to load a model to. + /// How many properties args will be passed, + /// each property contains 2 args: key and value. + /// A pointer to the newly created compiled_model. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_core_compile_model_from_file", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_compile_model_from_file( + IntPtr core, + ref sbyte model_path, + ref sbyte device_name, + ulong property_args_size, + ref IntPtr compiled_model); + + /// + /// Sets properties for a device, acceptable keys can be found in ov_property_key_xxx. + /// + /// A pointer to the ie_core_t instance. + /// Name of a device. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_core_set_property", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_set_property( + IntPtr core, + ref sbyte device_name); + + /// + /// Gets properties related to device behaviour. + /// The method extracts information that can be set via the set_property method. + /// + /// A pointer to the ie_core_t instance. + /// Name of a device to get a property value. + /// Property key. + /// A pointer to property value with string format. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_core_get_property", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_get_property( + IntPtr core, + ref sbyte device_name, + ref sbyte property_key, + ref IntPtr property_value); + + /// + /// Returns devices available for inference. + /// + /// A pointer to the ie_core_t instance. + /// A pointer to the ov_available_devices_t instance. + /// Core objects go over all registered plugins and ask about available devices. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_core_get_available_devices", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_get_available_devices( + IntPtr core, + IntPtr devices); + + /// + /// Releases memory occpuied by ov_available_devices_t + /// + /// A pointer to the ov_available_devices_t instance. + [DllImport(dll_extern, EntryPoint = "ov_available_devices_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_available_devices_free(IntPtr devices); + + /// + /// Imports a compiled model from the previously exported one. + /// + /// A pointer to the ov_core_t instance. + /// A pointer to content of the exported model. + /// Number of bytes in the exported network. + /// Name of a device to import a compiled model for. + /// A pointer to the newly created compiled_model. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_core_import_model", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_import_model( + IntPtr core, + ref sbyte content, + ulong content_size, + ref sbyte device_name, + ref IntPtr compiled_model); + + + /// + /// Returns device plugins version information. + /// Device name can be complex and identify multiple devices at once like `HETERO:CPU,GPU`; + /// in this case, std::map contains multiple entries, each per device. + /// + /// A pointer to the ov_core_t instance. + /// Device name to identify a plugin. + /// A pointer to versions corresponding to device_name. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_core_get_versions_by_device_name", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_get_versions_by_device_name( + IntPtr core, + ref sbyte device_name, + IntPtr versions); + + + /// + /// Releases memory occupied by ov_core_version_list_t. + /// + /// A pointer to the ie_core_versions to free memory. + [DllImport(dll_extern, EntryPoint = "ov_core_versions_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_core_versions_free( + IntPtr versions); + + /// + /// Creates a new remote shared context object on the specified accelerator device + /// using specified plugin-specific low-level device API parameters (device handle, pointer, context, etc.). + /// + /// A pointer to the ov_core_t instance. + /// Device name to identify a plugin. + /// How many property args will be for this remote context creation. + /// A pointer to the newly created remote context. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_core_create_context", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_create_context( + IntPtr core, + ref sbyte device_name, + ulong context_args_size, + ref IntPtr context); + + + /// + /// Creates a compiled model from a source model within a specified remote context. + /// + /// A pointer to the ov_core_t instance. + /// Model object acquired from ov_core_read_model. + /// A pointer to the newly created remote context. + /// How many args will be for this compiled model. + /// A pointer to the newly created compiled_model. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_core_compile_model_with_context", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_compile_model_with_context( + IntPtr core, + IntPtr model, + IntPtr context, + ulong property_args_size, + ref IntPtr compiled_model); + + /// + /// Gets a pointer to default (plugin-supplied) shared context object for the specified accelerator device. + /// + /// A pointer to the ov_core_t instance. + /// Name of a device to get a default shared context from. + /// A pointer to the referenced remote context. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_core_compile_model_with_context", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_compile_model_with_context( + IntPtr core, + ref sbyte device_name, + ref IntPtr context); + } +} diff --git a/modules/csharp_api/csharp/native_methods/ov_dimension.cs b/modules/csharp_api/csharp/native_methods/ov_dimension.cs new file mode 100644 index 000000000..99fa541a1 --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_dimension.cs @@ -0,0 +1,27 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +using ov_dimension = OpenVinoSharp.Ov.ov_dimension; + +namespace OpenVinoSharp +{ + public partial class NativeMethods + { + + + + /// + /// Check this dimension whether is dynamic + /// + /// The dimension pointer that will be checked. + /// Boolean, true is dynamic and false is static. + [DllImport(dll_extern, EntryPoint = "ov_dimension_is_dynamic", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static bool ov_dimension_is_dynamic(ov_dimension dim); + + } +} diff --git a/modules/csharp_api/csharp/native_methods/ov_infer_request.cs b/modules/csharp_api/csharp/native_methods/ov_infer_request.cs new file mode 100644 index 000000000..00a66f843 --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_infer_request.cs @@ -0,0 +1,278 @@ +using OpenCvSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Runtime.InteropServices.ComTypes; +using System.Text; +using System.Threading.Tasks; +using static OpenCvSharp.Stitcher; + +namespace OpenVinoSharp +{ + public partial class NativeMethods + { + /// + /// Set an input/output tensor to infer on by the name of tensor. + /// + /// A pointer to the ov_infer_request_t. + /// Name of the input or output tensor. + /// Reference to the tensor. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_set_tensor", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_set_tensor( + IntPtr infer_request, + ref sbyte tensor_name, + IntPtr tensor); + + /// + /// Set an input/output tensor to infer request for the port. + /// + /// A pointer to the ov_infer_request_t. + /// Port of the input or output tensor, which can be got by calling ov_model_t/ov_compiled_model_t interface. + /// + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_set_tensor_by_port", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_set_tensor_by_port( + IntPtr infer_request, + IntPtr port, + IntPtr tensor); + /// + /// Set an input/output tensor to infer request for the port. + /// + /// A pointer to the ov_infer_request_t. + /// Const port of the input or output tensor, which can be got by call interface from ov_model_t/ov_compiled_model_t. + /// Reference to the tensor. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_set_tensor_by_const_port", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_set_tensor_by_const_port( + IntPtr infer_request, + IntPtr port, + IntPtr tensor); + /// + /// Set an input tensor to infer on by the index of tensor. + /// + /// A pointer to the ov_infer_request_t. + /// Index of the input port. If @p idx is greater than the number of model inputs, an error will return. + /// Reference to the tensor. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_set_input_tensor_by_index", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_set_input_tensor_by_index( + IntPtr infer_request, + ulong idx, + IntPtr tensor); + /// + /// Set an input tensor for the model with single input to infer on. + /// + /// A pointer to the ov_infer_request_t. + /// Reference to the tensor. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_set_input_tensor", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_set_input_tensor( + IntPtr infer_request, + IntPtr tensor); + /// + /// Set an output tensor to infer by the index of output tensor. + /// + /// A pointer to the ov_infer_request_t. + /// Index of the output tensor. + /// Reference to the tensor. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_set_output_tensor_by_index", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_set_output_tensor_by_index( + IntPtr infer_request, + ulong idx, + IntPtr tensor); + /// + /// Set an output tensor to infer models with single output. + /// + /// A pointer to the ov_infer_request_t. + /// Reference to the tensor. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_set_output_tensor", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_set_output_tensor( + IntPtr infer_request, + IntPtr tensor); + /// + /// Get an input/output tensor by the name of tensor. + /// + /// A pointer to the ov_infer_request_t. + /// Name of the input or output tensor to get. + /// Reference to the tensor. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_get_tensor", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_get_tensor( + IntPtr infer_request, + ref sbyte tensor_name, + ref IntPtr tensor); + /// + /// Get an input/output tensor by const port. + /// + /// A pointer to the ov_infer_request_t. + /// Port of the tensor to get. @p port is not found, an error will return. + /// Reference to the tensor. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_get_tensor_by_const_port", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_get_tensor_by_const_port( + IntPtr infer_request, + IntPtr port, + ref IntPtr tensor); + /// + /// Get an input/output tensor by port. + /// + /// A pointer to the ov_infer_request_t. + /// Port of the tensor to get. @p port is not found, an error will return. + /// Reference to the tensor. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_get_tensor_by_port", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_get_tensor_by_port( + IntPtr infer_request, + IntPtr port, + ref IntPtr tensor); + /// + /// Get an input tensor by the index of input tensor. + /// + /// A pointer to the ov_infer_request_t. + /// ndex of the tensor to get. @p idx. If the tensor with the specified @p idx is not found, an error will return. + /// Reference to the tensor. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_get_input_tensor_by_index", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_get_input_tensor_by_index( + IntPtr infer_request, + ulong idx, + ref IntPtr tensor); + /// + /// Get an input tensor from the model with only one input tensor. + /// + /// A pointer to the ov_infer_request_t. + /// Reference to the tensor. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_get_input_tensor", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_get_input_tensor( + IntPtr infer_request, + ref IntPtr tensor); + /// + /// Get an output tensor by the index of output tensor. + /// + /// A pointer to the ov_infer_request_t. + /// ndex of the tensor to get. @p idx. If the tensor with the specified @p idx is not found, an error will return. + /// Reference to the tensor. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_get_output_tensor_by_index", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_get_output_tensor_by_index( + IntPtr infer_request, + ulong idx, + ref IntPtr tensor); + /// + /// Get an output tensor from the model with only one output tensor. + /// + /// A pointer to the ov_infer_request_t. + /// Reference to the tensor. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_get_output_tensor", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_get_output_tensor( + IntPtr infer_request, + ref IntPtr tensor); + /// + /// Infer specified input(s) in synchronous mode. + /// + /// A pointer to the ov_infer_request_t. + /// Status code of the operation: OK(0) for success.. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_infer", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_infer( + IntPtr infer_request); + + /// + /// Cancel inference request. + /// + /// A pointer to the ov_infer_request_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_cancel", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_cancel(IntPtr infer_request); + + + /// + /// Start inference of specified input(s) in asynchronous mode. + /// + /// A pointer to the ov_infer_request_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_start_async", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_start_async(IntPtr infer_request); + + /// + /// Wait for the result to become available. + /// + /// A pointer to the ov_infer_request_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_wait", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_wait(IntPtr infer_request); + + /// + /// Waits for the result to become available. Blocks until the specified timeout has elapsed or the result becomes available, + /// whichever comes first. + /// + /// A pointer to the ov_infer_request_t. + /// Maximum duration, in milliseconds, to block for. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_wait_for", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_wait_for(IntPtr infer_request, long timeout); + + /// + /// Set callback function, which will be called when inference is done. + /// + /// A pointer to the ov_infer_request_t. + /// A function to be called. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_set_callback", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_set_callback(IntPtr infer_request, IntPtr callback); + + /// + /// Release the memory allocated by ov_infer_request_t. + /// + /// A pointer to the ov_infer_request_t to free memory. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_infer_request_free(IntPtr infer_request); + + /// + /// Query performance measures per layer to identify the most time consuming operation. + /// + /// A pointer to the ov_infer_request_t. + /// Vector of profiling information for operations in a model. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_infer_request_get_profiling_info", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_infer_request_get_profiling_info(IntPtr infer_request, ref Ov.ov_profiling_info_list profiling_infos); + + /// + /// Release the memory allocated by ov_profiling_info_list_t. + /// + /// A pointer to the ov_profiling_info_list_t to free memory. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_profiling_info_list_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_profiling_info_list_free(IntPtr profiling_infos); + + + } +} diff --git a/modules/csharp_api/csharp/native_methods/ov_layout.cs b/modules/csharp_api/csharp/native_methods/ov_layout.cs new file mode 100644 index 000000000..f3feeed28 --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_layout.cs @@ -0,0 +1,41 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + public partial class NativeMethods + { + /// + /// Create a layout object. + /// + /// The description of layout. + /// The layout input pointer. + /// a status code, return OK if successful + [DllImport(dll_extern, EntryPoint = "ov_layout_create", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_layout_create( + ref sbyte layout_desc, + ref IntPtr layout); + + /// + /// Free layout object. + /// + /// The pointer of layout. + [DllImport(dll_extern, EntryPoint = "ov_layout_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_layout_free(IntPtr layout); + + /// + /// Convert layout object to a readable string. + /// + /// layout will be converted. + /// string that describes the layout content. + [DllImport(dll_extern, EntryPoint = "ov_layout_to_string", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static string ov_layout_to_string(IntPtr layout); + } +} diff --git a/modules/csharp_api/csharp/native_methods/ov_model.cs b/modules/csharp_api/csharp/native_methods/ov_model.cs new file mode 100644 index 000000000..164bac365 --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_model.cs @@ -0,0 +1,304 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + public partial class NativeMethods + { + + /// + /// Release the memory allocated by ov_model_t. + /// + /// A pointer to the ov_model_t to free memory. + [DllImport(dll_extern, EntryPoint = "ov_model_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_model_free( + IntPtr model); + + /// + /// Get a const single input port of ov_model_t, which only support single input model. + /// + /// A pointer to the ov_model_t. + /// A pointer to the ov_output_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_const_input", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_const_input( + IntPtr model, + ref IntPtr input_port); + + /// + /// Get a const input port of ov_model_t by name. + /// + /// A pointer to the ov_model_t. + /// input tensor name (char *). + /// A pointer to the ov_output_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_const_input_by_name", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_const_input_by_name( + IntPtr model, + ref sbyte tensor_name, + ref IntPtr input_port); + + /// + /// Get a const input port of ov_model_t by port index. + /// + /// A pointer to the ov_model_t. + /// input tensor index. + /// A pointer to the ov_output_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_const_input_by_index", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_const_input_by_index( + IntPtr model, + ulong index, + ref IntPtr input_port); + + /// + /// Get single input port of ov_model_t, which only support single input model. + /// + /// A pointer to the ov_model_t. + /// A pointer to the ov_output_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_input", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_input( + IntPtr model, + ref IntPtr input_port); + + /// + /// Get an input port of ov_model_t by name. + /// + /// A pointer to the ov_model_t. + /// input tensor name (char *). + /// A pointer to the ov_output_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_input_by_name", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_input_by_name( + IntPtr model, + ref sbyte tensor_name, + ref IntPtr input_port); + + /// + /// Get an input port of ov_model_t by port index. + /// + /// A pointer to the ov_model_t. + /// input tensor index. + /// A pointer to the ov_output_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_input_by_index", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_input_by_index( + IntPtr model, + ulong index, + ref IntPtr input_port); + + + /// + /// Get a single const output port of ov_model_t, which only support single output model.. + /// + /// A pointer to the ov_model_t. + /// A pointer to the ov_output_const_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_const_output", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_const_output( + IntPtr model, + ref IntPtr output_port); + + /// + /// Get a const output port of ov_model_t by port index. + /// + /// A pointer to the ov_model_t. + /// input tensor index. + /// A pointer to the ov_output_const_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_const_output_by_index", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_const_output_by_index( + IntPtr model, + ulong index, + ref IntPtr output_port); + + /// + /// Get a const output port of ov_model_t by name. + /// + /// A pointer to the ov_model_t. + /// input tensor name (char *). + /// A pointer to the ov_output_const_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_const_output_by_name", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_const_output_by_name( + IntPtr model, + ref sbyte tensor_name, + ref IntPtr output_port); + + + /// + /// Get an single output port of ov_model_t, which only support single output model. + /// + /// A pointer to the ov_model_t. + /// A pointer to the ov_output_const_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_output", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_output( + IntPtr model, + ref IntPtr output_port); + + /// + /// Get an output port of ov_model_t by port index. + /// + /// A pointer to the ov_model_t. + /// input tensor index. + /// A pointer to the ov_output_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_output_by_index", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_output_by_index( + IntPtr model, + ulong index, + ref IntPtr output_port); + + /// + /// Get an output port of ov_model_t by name. + /// + /// A pointer to the ov_model_t. + /// output tensor name (char *). + /// A pointer to the ov_output_port_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_output_by_name", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_output_by_name( + IntPtr model, + ref sbyte tensor_name, + ref IntPtr output_port); + + /// + /// Get the input size of ov_model_t. + /// + /// A pointer to the ov_model_t. + /// the model's input size. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_inputs_size", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_inputs_size(IntPtr model, ref ulong input_size); + + /// + /// Get the output size of ov_model_t. + /// + /// A pointer to the ov_model_t. + /// the model's output size. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_outputs_size", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_outputs_size( + IntPtr model, + ref ulong output_size); + + /// + /// Returns true if any of the ops defined in the model is dynamic shape.. + /// + /// A pointer to the ov_model_t. + /// true if model contains dynamic shapes + [DllImport(dll_extern, EntryPoint = "ov_model_is_dynamic", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static bool ov_model_is_dynamic( + IntPtr model); + + /// + /// Do reshape in model with a list of (name, partial shape). + /// + /// A pointer to the ov_model_t. + /// The list of input tensor names. + /// A PartialShape list. + /// The item count in the list. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_reshape", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_reshape( + IntPtr model, + IntPtr[] tensor_names, + ref Ov.ov_partial_shape partial_shapes, + ulong size); + + + /// + /// Do reshape in model with partial shape for a specified name. + /// + /// A pointer to the ov_model_t. + /// The tensor name of input tensor. + /// A PartialShape. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_reshape_input_by_name", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_reshape_input_by_name( + IntPtr model, + ref sbyte tensor_name, + Ov.ov_partial_shape partial_shape); + + /// + /// Do reshape in model for one node(port 0). + /// + /// A pointer to the ov_model_t. + /// A PartialShape. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_reshape_single_input", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_reshape_single_input( + IntPtr model, + Ov.ov_partial_shape partial_shape); + + /// + /// Do reshape in model with a list of (port id, partial shape). + /// + /// A pointer to the ov_model_t. + /// The array of port indexes. + /// A PartialShape list. + /// The item count in the list. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_reshape_by_port_indexes", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_reshape_by_port_indexes( + IntPtr model, + ref ulong port_indexes, + ref Ov.ov_partial_shape partial_shapes, + ulong size); + + /// + /// Do reshape in model with a list of (ov_output_port_t, partial shape). + /// + /// A pointer to the ov_model_t. + /// The ov_output_port_t list. + /// A PartialShape list. + /// The item count in the list. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_reshape_by_ports", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_reshape_by_ports( + IntPtr model, + ref IntPtr output_ports, + ref Ov.ov_partial_shape partial_shapes, + ulong size); + + /// + /// Gets the friendly name for a model. + /// + /// A pointer to the ov_model_t. + /// the model's friendly name. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_model_get_friendly_name", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_model_get_friendly_name( + IntPtr model, + ref IntPtr friendly_name); + + } +} diff --git a/modules/csharp_api/csharp/native_methods/ov_node.cs b/modules/csharp_api/csharp/native_methods/ov_node.cs new file mode 100644 index 000000000..640905bac --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_node.cs @@ -0,0 +1,89 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + public partial class NativeMethods + { + /// + /// Get the shape of port object. + /// + /// A pointer to ov_output_const_port_t. + /// tensor shape. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_const_port_get_shape", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_const_port_get_shape( + IntPtr port, + IntPtr tensor_shape); + + /// + /// Get the shape of port object. + /// + /// A pointer to ov_output_port_t. + /// A pointer to the tensor name. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_const_port_get_shape", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_port_get_shape( + IntPtr port, + IntPtr tensor_shape); + /// + /// Get the tensor name of port. + /// + /// A pointer to the ov_output_const_port_t. + /// A pointer to the tensor name. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_port_get_any_name", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_port_get_any_name( + IntPtr port, + ref IntPtr tensor_name); + + /// + /// Get the partial shape of port. + /// + /// A pointer to the ov_output_const_port_t. + /// Partial shape. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_port_get_partial_shape", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_port_get_partial_shape( + IntPtr port, + ref Ov.ov_partial_shape partial_shape); + + /// + /// Get the tensor type of port. + /// + /// A pointer to the ov_output_const_port_t. + /// tensor type. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_port_get_element_type", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_port_get_element_type( + IntPtr port, + ref uint tensor_type); + + /// + /// free port object + /// + /// The pointer to the instance of the ov_output_port_t to free. + [DllImport(dll_extern, EntryPoint = "ov_output_port_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_output_port_free( + IntPtr port); + + /// + /// free const port + /// + /// The pointer to the instance of the ov_output_const_port_t to free. + [DllImport(dll_extern, EntryPoint = "ov_output_const_port_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_output_const_port_free( + IntPtr port); + } +} diff --git a/modules/csharp_api/csharp/native_methods/ov_partial_shape.cs b/modules/csharp_api/csharp/native_methods/ov_partial_shape.cs new file mode 100644 index 000000000..8994a9bf5 --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_partial_shape.cs @@ -0,0 +1,117 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + + public partial class NativeMethods + { + /// + /// Initialze a partial shape with static rank and dynamic dimension. + /// + /// support static rank. + /// support dynamic and static dimension. + /// The pointer of partial shape + /// + /// Static rank, but dynamic dimensions on some or all axes. + /// Examples: `{1,2,?,4}` or `{?,?,?}` or `{1,2,-1,4}` + /// Static rank, and static dimensions on all axes. + /// Examples: `{ 1,2,3,4}` or `{6}` or `{}` + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_partial_shape_create( + long rank, + ref Ov.ov_dimension dims, + IntPtr partial_shape_obj); + + /// + /// Initialze a partial shape with static rank and dynamic dimension. + /// + /// support dynamic and static rank. + /// support dynamic and static dimension. + /// The pointer of partial shape + /// + /// Dynamic rank: + /// Example: `?` + /// Static rank, but dynamic dimensions on some or all axes. + /// Examples: `{1,2,?,4}` or `{?,?,?}` or `{1,2,-1,4}` + /// Static rank, and static dimensions on all axes. + /// Examples: `{ 1,2,3,4}` or `{6}` or `{}` + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_partial_shape_create_dynamic( + Ov.ov_dimension rank, + ref Ov.ov_dimension dims, + IntPtr partial_shape_obj); + + /// + /// Initialize a partial shape with static rank and static dimension. + /// + /// support dynamic and static rank. + /// support dynamic and static dimension. + /// The pointer of partial shape + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_partial_shape_create_static( + long rank, + ref long dims, + IntPtr partial_shape_obj); + + /// + /// Release internal memory allocated in partial shape. + /// + /// The object's internal memory will be released. + [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_partial_shape_free(IntPtr partial_shape); + + /// + /// Convert partial shape without dynamic data to a static shape. + /// + /// The partial_shape pointer. + /// The shape pointer. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_partial_shape_to_shape( + Ov.ov_partial_shape partial_shape, + IntPtr shape); + + /// + /// Convert shape to partial shape. + /// + /// The shape. + /// The partial_shape pointer. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_shape_to_partial_shape( + Ov.ov_shape shape, + IntPtr partial_shape); + + /// + /// Check this partial_shape whether is dynamic + /// + /// The partial_shape. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static bool ov_partial_shape_is_dynamic(Ov.ov_partial_shape partial_shape); + + /// + /// Helper function, convert a partial shape to readable string. + /// + /// The partial_shape pointer. + /// A string reprensts partial_shape's content. + [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static string ov_partial_shape_to_string(Ov.ov_partial_shape partial_shape); + } +} diff --git a/modules/csharp_api/csharp/native_methods/ov_prepostprocess.cs b/modules/csharp_api/csharp/native_methods/ov_prepostprocess.cs new file mode 100644 index 000000000..b783084b0 --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_prepostprocess.cs @@ -0,0 +1,462 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + public partial class NativeMethods + { + /// + /// Create a ov_preprocess_prepostprocessor_t instance. + /// + /// A pointer to the ov_model_t. + /// A pointer to the ov_preprocess_prepostprocessor_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_prepostprocessor_create", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_prepostprocessor_create( + IntPtr model, + ref IntPtr preprocess); + + /// + /// Release the memory allocated by ov_preprocess_prepostprocessor_t. + /// + /// A pointer to the ov_preprocess_prepostprocessor_t to free memory. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_prepostprocessor_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_preprocess_prepostprocessor_free( + IntPtr preprocess); + + /// + /// Get the input info of ov_preprocess_prepostprocessor_t instance. + /// + /// A pointer to the ov_preprocess_prepostprocessor_t. + /// A pointer to the ov_preprocess_input_info_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_prepostprocessor_get_input_info", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_prepostprocessor_get_input_info( + IntPtr preprocess, + ref IntPtr preprocess_input_info); + + /// + /// Get the input info of ov_preprocess_prepostprocessor_t instance by tensor name. + /// + /// A pointer to the ov_preprocess_prepostprocessor_t. + /// The name of input. + /// A pointer to the ov_preprocess_input_info_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_prepostprocessor_get_input_info_by_name", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_prepostprocessor_get_input_info_by_name( + IntPtr preprocess, + ref sbyte tensor_name, + ref IntPtr preprocess_input_info); + + /// + /// Get the input info of ov_preprocess_prepostprocessor_t instance by tensor order. + /// + /// A pointer to the ov_preprocess_prepostprocessor_t. + /// The order of input. + /// A pointer to the ov_preprocess_input_info_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_prepostprocessor_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_prepostprocessor_get_input_info_by_index( + IntPtr preprocess, + ulong tensor_index, + ref IntPtr preprocess_input_info); + + /// + /// Release the memory allocated by ov_preprocess_input_info_t. + /// + /// A pointer to the ov_preprocess_input_info_t to free memory. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_prepostprocessor_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_preprocess_input_info_free( + IntPtr preprocess_input_info); + + /// + /// Get a ov_preprocess_input_tensor_info_t. + /// + /// A pointer to the ov_preprocess_input_info_t. + /// A pointer to ov_preprocess_input_tensor_info_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_info_get_tensor_info", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_info_get_tensor_info( + IntPtr preprocess_input_info, + ref IntPtr preprocess_input_tensor_info); + + /// + /// Release the memory allocated by ov_preprocess_input_tensor_info_t. + /// + /// A pointer to the ov_preprocess_input_tensor_info_t to free memory. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_tensor_info_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_preprocess_input_tensor_info_free( + IntPtr preprocess_input_tensor_info); + + /// + /// Get a ov_preprocess_preprocess_steps_t. + /// + /// A pointer to the ov_preprocess_input_info_t. + /// A pointer to ov_preprocess_preprocess_steps_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_info_get_preprocess_steps", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_info_get_preprocess_steps( + IntPtr preprocess_input_info, + ref IntPtr preprocess_input_steps); + + + /// + /// Release the memory allocated by ov_preprocess_preprocess_steps_t. + /// + /// A pointer to the ov_preprocess_preprocess_steps_t to free memory. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_preprocess_steps_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_preprocess_preprocess_steps_free( + IntPtr preprocess_input_process_steps); + + + /// + /// Add resize operation to model's dimensions. + /// + /// A pointer to ov_preprocess_preprocess_steps_t. + /// A ov_preprocess_resizeAlgorithm instance + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_preprocess_steps_resize", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_preprocess_steps_resize( + IntPtr preprocess_input_process_steps, + int resize_algorithm); + + + /// + /// Add scale preprocess operation. Divide each element of input by specified value. + /// + /// A pointer to ov_preprocess_preprocess_steps_t. + /// Scaling value. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_preprocess_steps_scale", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_preprocess_steps_scale( + IntPtr preprocess_input_process_steps, + float value); + + + /// + /// Add mean preprocess operation. Subtract specified value from each element of input. + /// + /// A pointer to ov_preprocess_preprocess_steps_t. + /// Value to subtract from each element. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_preprocess_steps_mean", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_preprocess_steps_mean( + IntPtr preprocess_input_process_steps, + float value); + + /// + /// Crop input tensor between begin and end coordinates. + /// + /// A pointer to ov_preprocess_preprocess_steps_t. + /// Pointer to begin indexes for input tensor cropping. + /// Negative values represent counting elements from the end of input tensor + /// The size of begin array. + /// Pointer to end indexes for input tensor cropping. + /// End indexes are exclusive, which means values including end edge are not included in the output slice. + /// Negative values represent counting elements from the end of input tensor + /// The size of end array + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_preprocess_steps_crop", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_preprocess_steps_crop( + IntPtr preprocess_input_process_steps, + ref int begin, + int begin_size, + ref int end, + int end_size); + + /// + /// Add 'convert layout' operation to specified layout. + /// + /// A pointer to ov_preprocess_preprocess_steps_t. + /// A point to ov_layout_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_preprocess_steps_convert_layout", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_preprocess_steps_convert_layout( + IntPtr preprocess_input_process_steps, + IntPtr layout); + + + /// + /// Reverse channels operation. + /// + /// A pointer to ov_preprocess_preprocess_steps_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_preprocess_steps_reverse_channels", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_preprocess_steps_reverse_channels( + IntPtr preprocess_input_process_steps); + + /// + /// Set ov_preprocess_input_tensor_info_t precesion. + /// + /// A pointer to the ov_preprocess_input_tensor_info_t. + /// A point to element_type. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_tensor_info_set_element_type", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_tensor_info_set_element_type( + IntPtr preprocess_input_tensor_info, + uint element_type); + + /// + /// Set ov_preprocess_input_tensor_info_t color format. + /// + /// A pointer to the ov_preprocess_input_tensor_info_t. + /// The enumerate of colorFormat + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_tensor_info_set_color_format", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_tensor_info_set_color_format( + IntPtr preprocess_input_tensor_info, + uint color_format); + + + /// + /// Set ov_preprocess_input_tensor_info_t color format with subname. + /// + /// A pointer to the ov_preprocess_input_tensor_info_t. + /// The enumerate of colorFormat + /// The size of sub_names. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_tensor_info_set_color_format_with_subname", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_tensor_info_set_color_format_with_subname( + IntPtr preprocess_input_tensor_info, + uint color_format, + ulong sub_names_size); + + + /// + /// Set ov_preprocess_input_tensor_info_t spatial_static_shape. + /// + /// A pointer to the ov_preprocess_input_tensor_info_t. + /// The height of input + /// The width of input + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_tensor_info_set_spatial_static_shape", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_tensor_info_set_spatial_static_shape( + IntPtr preprocess_input_tensor_info, + ulong input_height, + ulong input_width); + + + /// + /// Set ov_preprocess_input_tensor_info_t memory type. + /// + /// A pointer to the ov_preprocess_input_tensor_info_t. + /// Memory type. Refer to ov_remote_context.h to get memory type string info. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_tensor_info_set_memory_type", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_tensor_info_set_memory_type( + IntPtr preprocess_input_tensor_info, + ref sbyte mem_type); + + + /// + /// Convert ov_preprocess_preprocess_steps_t element type. + /// + /// A pointer to the ov_preprocess_preprocess_steps_t. + /// preprocess input element type. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_preprocess_steps_convert_element_type", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_preprocess_steps_convert_element_type( + IntPtr preprocess_input_process_steps, + uint element_type); + + + /// + /// onvert ov_preprocess_preprocess_steps_t color. + /// + /// A pointer to the ov_preprocess_preprocess_steps_t. + /// The enumerate of colorFormat. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_preprocess_steps_convert_color", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_preprocess_steps_convert_color( + IntPtr preprocess_input_process_steps, + uint color_format); + + + /// + /// Helper function to reuse element type and shape from user's created tensor. + /// + /// A pointer to the ov_preprocess_input_tensor_info_t. + /// A point to ov_tensor_t + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_tensor_info_set_from", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_tensor_info_set_from( + IntPtr preprocess_input_tensor_info, + IntPtr tensor); + + /// + /// Set ov_preprocess_input_tensor_info_t layout. + /// + /// A pointer to the ov_preprocess_input_tensor_info_t. + /// A point to ov_layout_t + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_tensor_info_set_layout", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_tensor_info_set_layout( + IntPtr preprocess_input_tensor_info, + IntPtr layout); + + + /// + /// Get the output info of ov_preprocess_output_info_t instance. + /// + /// A pointer to the ov_preprocess_prepostprocessor_t. + /// A pointer to the ov_preprocess_output_info_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_prepostprocessor_get_output_info", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_prepostprocessor_get_output_info( + IntPtr preprocess, + ref IntPtr preprocess_output_info); + + + /// + /// Get the output info of ov_preprocess_output_info_t instance. + /// + /// A pointer to the ov_preprocess_prepostprocessor_t. + /// The tensor index. + /// A pointer to the ov_preprocess_output_info_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_prepostprocessor_get_output_info_by_index", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_prepostprocessor_get_output_info_by_index( + IntPtr preprocess, + ulong tensor_index, + ref IntPtr preprocess_output_info); + + + /// + /// Get the output info of ov_preprocess_output_info_t instance. + /// + /// A pointer to the ov_preprocess_prepostprocessor_t. + /// The name of input. + /// A pointer to the ov_preprocess_output_info_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_prepostprocessor_get_output_info_by_name", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_prepostprocessor_get_output_info_by_name( + IntPtr preprocess, + ref sbyte tensor_name, + ref IntPtr preprocess_output_info); + + + /// + /// Release the memory allocated by ov_preprocess_output_info_t. + /// + /// A pointer to the ov_preprocess_output_info_t to free memory. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_output_info_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_preprocess_output_info_free(IntPtr preprocess_output_info); + + + /// + /// Get a ov_preprocess_input_tensor_info_t. + /// + /// A pointer to the ov_preprocess_output_info_t. + /// A pointer to the ov_preprocess_output_tensor_info_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_output_info_get_tensor_info", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_output_info_get_tensor_info( + IntPtr preprocess_output_info, + ref IntPtr preprocess_output_tensor_info); + + /// + /// Release the memory allocated by ov_preprocess_output_tensor_info_t. + /// + /// A pointer to the ov_preprocess_output_tensor_info_t to free memory. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_output_tensor_info_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_preprocess_output_tensor_info_free( + IntPtr preprocess_output_tensor_info); + + + /// + /// Set ov_preprocess_input_tensor_info_t precesion. + /// + /// A pointer to the ov_preprocess_output_tensor_info_t. + /// A point to element_type + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_output_set_element_type", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_output_set_element_type( + IntPtr preprocess_output_tensor_info, + uint element_type); + + + /// + /// Get current input model information. + /// + /// A pointer to the ov_preprocess_input_info_t. + /// A pointer to the ov_preprocess_input_model_info_t + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_info_get_model_info", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_info_get_model_info( + IntPtr preprocess_input_info, + ref IntPtr preprocess_input_model_info); + + /// + /// Release the memory allocated by ov_preprocess_input_model_info_t. + /// + /// A pointer to the ov_preprocess_input_model_info_t to free memory. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_model_info_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_model_info_free( + IntPtr preprocess_input_model_info); + + /// + /// Set layout for model's input tensor. + /// + /// A pointer to the ov_preprocess_input_model_info_t + /// A point to ov_layout_t + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_model_info_set_layout", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_model_info_set_layout( + IntPtr preprocess_input_model_info, + IntPtr layout); + + + /// + /// Adds pre/post-processing operations to function passed in constructor. + /// + /// A pointer to the ov_preprocess_prepostprocessor_t. + /// A pointer to the ov_model_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_preprocess_prepostprocessor_build", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_prepostprocessor_build( + IntPtr preprocess, + ref IntPtr model); + + } +} diff --git a/modules/csharp_api/csharp/native_methods/ov_rank.cs b/modules/csharp_api/csharp/native_methods/ov_rank.cs new file mode 100644 index 000000000..5354deafc --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_rank.cs @@ -0,0 +1,24 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +using ov_rank = OpenVinoSharp.Ov.ov_rank; +namespace OpenVinoSharp +{ + + public partial class NativeMethods + { + + /// + /// Check this rank whether is dynamic + /// + /// The rank pointer that will be checked. + /// The return value. + [DllImport(dll_extern, EntryPoint = "ov_rank_is_dynamic", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static bool ov_rank_is_dynamic(ov_rank rank); + } +} diff --git a/modules/csharp_api/csharp/native_methods/ov_shape.cs b/modules/csharp_api/csharp/native_methods/ov_shape.cs new file mode 100644 index 000000000..e414a1942 --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_shape.cs @@ -0,0 +1,37 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + public partial class NativeMethods + { + /// + /// Initialize a fully shape object, allocate space for its dimensions + /// and set its content id dims is not null. + /// + /// The rank value for this object, it should be more than 0(>0) + /// The dimensions data for this shape object, it's size should be equal to rank. + /// The input/output shape object pointer. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_shape_create", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_shape_create( + long rank, + ref long dims, + IntPtr shape); + + /// + /// Free a shape object's internal memory. + /// + /// The input shape object pointer. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_shape_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_shape_free( + IntPtr shape); + } +} diff --git a/modules/csharp_api/csharp/native_methods/ov_tensor.cs b/modules/csharp_api/csharp/native_methods/ov_tensor.cs new file mode 100644 index 000000000..c87450c27 --- /dev/null +++ b/modules/csharp_api/csharp/native_methods/ov_tensor.cs @@ -0,0 +1,121 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + public partial class NativeMethods + { + /// + /// Constructs Tensor using element type and shape. Allocate internal host storage using default allocator. + /// + /// Tensor element type. + /// Tensor shape. + /// Pointer to pre-allocated host memory. + /// A point to ov_tensor_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_tensor_create_from_host_ptr", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_tensor_create_from_host_ptr( + uint type, + Ov.ov_shape shape, + IntPtr host_ptr, + ref IntPtr tensor); + + /// + /// Constructs Tensor using element type and shape. Allocate internal host storage using default allocator. + /// + /// Tensor element type + /// Tensor shape. + /// A point to ov_tensor_t. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_tensor_create", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_tensor_create( + uint type, + Ov.ov_shape shape, + ref IntPtr tensor); + /// + /// Set new shape for tensor, deallocate/allocate if new total size is bigger than previous one. + /// + /// A point to ov_tensor_t.. + /// Tensor shape. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_tensor_set_shape", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_tensor_set_shape( + IntPtr tensor, + Ov.ov_shape shape); + + /// + /// Get shape for tensor. + /// + /// A point to ov_tensor_t. + /// Tensor shape. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_tensor_get_shape", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_tensor_get_shape( + IntPtr tensor, + IntPtr shape); + + /// + /// Get type for tensor. + /// + /// A point to ov_tensor_t. + /// Tensor element type. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_tensor_get_element_type", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_tensor_get_element_type( + IntPtr tensor, + out uint type); + + /// + /// the total number of elements (a product of all the dims or 1 for scalar). + /// + /// A point to ov_tensor_t. + /// number of elements. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_tensor_get_size", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_tensor_get_size( + IntPtr tensor, + ref ulong elements_size); + + /// + /// the size of the current Tensor in bytes. + /// + /// A point to ov_tensor_t + /// the size of the current Tensor in bytes. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_tensor_get_byte_size", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_tensor_get_byte_size( + IntPtr tensor, + ref ulong byte_size); + + /// + /// Provides an access to the underlaying host memory. + /// + /// A point to ov_tensor_t + /// A point to host memory. + /// Status code of the operation: OK(0) for success. + [DllImport(dll_extern, EntryPoint = "ov_tensor_data", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_tensor_data( + IntPtr tensor, + ref IntPtr data); + + /// + /// Free ov_tensor_t. + /// + /// A point to ov_tensor_t + [DllImport(dll_extern, EntryPoint = "ov_tensor_free", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static void ov_tensor_free(IntPtr tensor); + } +} diff --git a/modules/csharp_api/csharp/ov/ov.cs b/modules/csharp_api/csharp/ov/ov.cs new file mode 100644 index 000000000..65ca74df8 --- /dev/null +++ b/modules/csharp_api/csharp/ov/ov.cs @@ -0,0 +1,33 @@ +using System; +using System.Runtime.InteropServices; + +namespace OpenVinoSharp +{ /// + /// Global functions under ov namespace + /// + public static partial class Ov + { + /// + /// Get version of OpenVINO. + /// + /// Version of OpenVINO + public static Version get_openvino_version() + { + int l = Marshal.SizeOf(typeof(Version)); + IntPtr ptr = Marshal.AllocHGlobal(l); + ExceptionStatus status = NativeMethods.ov_get_openvino_version(ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("ov get_openvino_version() error!"); + return new Version(); + } + var temp = Marshal.PtrToStructure(ptr, typeof(Version)); + Version version = (Version)temp; + string build = string.Copy(version.buildNumber); + string description = string.Copy(version.description); + Version new_version = new Version(build, description); + NativeMethods.ov_version_free(ptr); + return new_version; + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/csharp/ov/ov_struct.cs b/modules/csharp_api/csharp/ov/ov_struct.cs new file mode 100644 index 000000000..39acdb1a3 --- /dev/null +++ b/modules/csharp_api/csharp/ov/ov_struct.cs @@ -0,0 +1,170 @@ +using System; +using System.Runtime.InteropServices; + +namespace OpenVinoSharp +{ + public static partial class Ov + { + /// + /// Reprents a static shape. + /// + public struct ov_shape + { + /// + /// the rank of shape + /// + public long rank; + /// + /// the dims of shape + /// + public IntPtr dims_ptr; + /// + /// Get the dims of shape + /// + /// the dims of shape + public long[] get_dims() + { + long[] dims = new long[rank]; + Marshal.Copy(dims_ptr, dims, 0, (int)rank); + return dims; + } + } + + /// + /// It represents a shape that may be partially or totally dynamic. + /// + /// + /// Dynamic rank. (Informal notation: `?`) + /// Static rank, but dynamic dimensions on some or all axes. + /// (Informal notation examples: `{1,2,?,4}`, `{?,?,?}`) + /// Static rank, and static dimensions on all axes. + /// (Informal notation examples: `{1,2,3,4}`, `{6}`, `{}`) + /// + public struct ov_partial_shape + { + +#if NET7_0_OR_GREATER || NET6_0_OR_GREATER + /// + /// The rank + /// + public ov_dimension rank; + /// + /// The dimension + /// + public IntPtr dims = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(Dimension))); + /// + /// Default Constructor + /// + public ov_partial_shape() + { + rank = new ov_dimension(); + dims = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(Dimension))); + } +#else + /// + /// The rank + /// + public ov_dimension rank; + /// + /// The dimension + /// + public IntPtr dims; +#endif + } + + + /// + /// This is a structure interface equal to ov::Rank + /// + public struct ov_rank + { + /// + /// The lower inclusive limit for the Rank. + /// + public long min; + /// + /// The upper inclusive limit for the Rank. + /// + public long max; + }; + + /// + /// This is a structure interface equal to ov::Dimension + /// + public struct ov_dimension + { + /// + /// The lower inclusive limit for the dimension. + /// + public long min; + /// + /// The upper inclusive limit for the dimension. + /// + public long max; + }; + + /// + /// Represents basic inference profiling information per operation. + /// + /// + /// If the operation is executed using tiling, the sum time per each tile is indicated as the total execution time. + /// Due to parallel execution, the total execution time for all nodes might be greater than the total inference time. + /// + public struct ProfilingInfo + { + /// + /// Defines the general status of a node. + /// + public enum Status + { + /// + /// A node is not executed. + /// + NOT_RUN, + /// + /// A node is optimized out during graph optimization phase. + /// + OPTIMIZED_OUT, + /// + /// A node is executed. + /// + EXECUTED + }; + /// + /// The absolute time, in microseconds, that the node ran (in total). + /// + public ulong real_time; + /// + /// The net host CPU time that the node ran. + /// + public ulong cpu_time; + /// + /// Name of a node. + /// + public string node_name; + /// + /// Execution type of a unit. + /// + public string exec_type; + /// + /// Node type. + /// + public string node_type; + }; + /// + /// A list of profiling info data + /// + public struct ov_profiling_info_list + { + /// + /// The list of ProfilingInfo + /// + public IntPtr profiling_infos; + /// + /// he list size + /// + public ulong size; + }; + + } +} diff --git a/modules/csharp_api/csharp/preprocess/OvMat.cs b/modules/csharp_api/csharp/preprocess/OvMat.cs new file mode 100644 index 000000000..fb63cfbaa --- /dev/null +++ b/modules/csharp_api/csharp/preprocess/OvMat.cs @@ -0,0 +1,63 @@ +using System; +using System.Collections.Generic; +using System.Drawing; +using System.Drawing.Imaging; +using System.IO; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess +{ + public class OvMat + { + public byte[] mat_data { get; set; } + public ulong mat_data_size { get; set; } + public int mat_width { get; set; } + public int mat_height { get; set; } + public int mat_channels { get; set; } + public ElementType mat_type { get; set; } = ElementType.U8; + + public OvMat() { } + public OvMat(byte[] mat_data, ulong mat_data_size, int mat_width, int mat_height, int mat_channels, ElementType mat_type) + { + this.mat_data = mat_data; + this.mat_data_size = mat_data_size; + this.mat_width = mat_width; + this.mat_height = mat_height; + this.mat_channels = mat_channels; + this.mat_type = mat_type; + } + public OvMat(string image_path) + { + Bitmap img = new Bitmap(image_path); + Rectangle rect = new Rectangle(0, 0, img.Width, img.Height); + BitmapData bit = img.LockBits(rect, ImageLockMode.ReadWrite, img.PixelFormat); + byte[] byte_data = new byte[bit.Width * bit.Height * 3]; + Marshal.Copy(bit.Scan0, byte_data, 0, byte_data.Length); + this.mat_data = byte_data; + this.mat_data_size = (ulong)(img.Height * img.Width * 3); + this.mat_width = img.Width; + this.mat_height = img.Height; + this.mat_channels = 3; + this.mat_type = ElementType.U8; + img.Dispose(); + + } + public static OvMat read(string image_path) + { + Bitmap img = new Bitmap(image_path); + Rectangle rect = new Rectangle(0, 0, img.Width, img.Height); + BitmapData bit = img.LockBits(rect, ImageLockMode.ReadWrite, PixelFormat.Format24bppRgb); + img.UnlockBits(bit); + byte[] byte_data = new byte[bit.Width * bit.Height * 3]; + Marshal.Copy(bit.Scan0, byte_data, 0, byte_data.Length); + OvMat mat = new OvMat(byte_data, (ulong)(img.Height * img.Width * 3), img.Width, img.Height, 3, ElementType.U8); + + img.Dispose(); + return mat; + //return new OvMat(image_path); + } + } +} diff --git a/modules/csharp_api/csharp/preprocess/common.cs b/modules/csharp_api/csharp/preprocess/common.cs new file mode 100644 index 000000000..67cb895bc --- /dev/null +++ b/modules/csharp_api/csharp/preprocess/common.cs @@ -0,0 +1,73 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess +{ + /// + /// This enum contains enumerations for color format. + /// + public enum ColorFormat : uint + { + /// + /// Undefine color format + /// + UNDEFINE = 0U, + /// + /// Image in NV12 format as single tensor + /// + NV12_SINGLE_PLANE, + /// + /// Image in NV12 format represented as separate tensors for Y and UV planes. + /// + NV12_TWO_PLANES, + /// + /// Image in I420 (YUV) format as single tensor + /// + I420_SINGLE_PLANE, + /// + /// Image in I420 format represented as separate tensors for Y, U and V planes. + /// + I420_THREE_PLANES, + /// + /// Image in RGB interleaved format (3 channels) + /// + RGB, + /// + /// Image in BGR interleaved format (3 channels) + /// + BGR, + /// + /// Image in GRAY format (1 channel) + /// + GRAY, + /// + /// Image in RGBX interleaved format (4 channels) + /// + RGBX, + /// + /// Image in BGRX interleaved format (4 channels) + /// + BGRX + }; + /// + /// This enum contains codes for all preprocess resize algorithm. + /// + public enum ResizeAlgorithm + { + /// + /// linear algorithm + /// + RESIZE_LINEAR, + /// + /// cubic algorithm + /// + RESIZE_CUBIC, + /// + /// nearest algorithm + /// + RESIZE_NEAREST + }; +} diff --git a/modules/csharp_api/csharp/preprocess/input_info.cs b/modules/csharp_api/csharp/preprocess/input_info.cs new file mode 100644 index 000000000..a9bfb96e5 --- /dev/null +++ b/modules/csharp_api/csharp/preprocess/input_info.cs @@ -0,0 +1,106 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess +{ + /// + /// Class holding preprocessing information for one input + /// From preprocessing pipeline perspective, each input can be represented as: + /// - User's input parameter info (InputInfo::tensor) + /// - Preprocessing steps applied to user's input (InputInfo::preprocess) + /// - Model's input info, which is a final input's info after preprocessing (InputInfo::model) + /// + public class InputInfo + { + /// + /// [private]InputInfo class pointer. + /// + public IntPtr m_ptr = IntPtr.Zero; + + /// + /// [public]InputInfo class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + /// + /// Default construction through InputInfo pointer. + /// + /// InputInfo pointer. + public InputInfo(IntPtr ptr) + { + if (ptr == IntPtr.Zero) + { + System.Diagnostics.Debug.WriteLine("InputInfo init error : ptr is null!"); + return; + } + this.m_ptr = ptr; + } + /// + /// Default destructor + /// + ~InputInfo() { dispose(); } + /// + /// Release unmanaged resources. + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_preprocess_input_info_free(m_ptr); + m_ptr = IntPtr.Zero; + } + + /// + /// Get current input tensor information with ability to change specific data + /// + /// Reference to current input tensor structure + public InputTensorInfo tensor() + { + IntPtr input_tensor_ptr = IntPtr.Zero; + ExceptionStatus status = NativeMethods.ov_preprocess_input_info_get_tensor_info( + m_ptr, ref input_tensor_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("InputInfo tensor error : {0}!", status.ToString()); + } + return new InputTensorInfo(input_tensor_ptr); + } + + /// + /// Get current input preprocess information with ability to add more preprocessing steps + /// + /// Reference to current preprocess steps structure. + public PreProcessSteps preprocess() + { + IntPtr preprocess_ptr = IntPtr.Zero; + ExceptionStatus status = NativeMethods.ov_preprocess_input_info_get_preprocess_steps( + m_ptr, ref preprocess_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("InputInfo preprocess error : {0}!", status.ToString()); + } + return new PreProcessSteps(preprocess_ptr); + } + + /// + /// Get current input model information with ability to change original model's input data + /// + /// Reference to current model's input information structure. + public InputModelInfo model() + { + IntPtr model_ptr = IntPtr.Zero; + ExceptionStatus status = NativeMethods.ov_preprocess_input_info_get_model_info( + m_ptr, ref model_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("InputInfo preprocess error : {0}!", status.ToString()); + } + return new InputModelInfo(model_ptr); + } + }; +} diff --git a/modules/csharp_api/csharp/preprocess/input_model_info.cs b/modules/csharp_api/csharp/preprocess/input_model_info.cs new file mode 100644 index 000000000..f6e5e5b2b --- /dev/null +++ b/modules/csharp_api/csharp/preprocess/input_model_info.cs @@ -0,0 +1,77 @@ +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess +{ + /// + /// Information about model's input tensor. If all information is already included to loaded model, this info + /// may not be needed. However it can be set to specify additional information about model, like 'layout'. + /// + /// + /// Example of usage of model 'layout': + /// Support model has input parameter with shape {1, 3, 224, 224} and user needs to resize input image to model's + /// dimensions. It can be done like this + /// + public class InputModelInfo + { + /// + /// [private]InputModelInfo class pointer. + /// + public IntPtr m_ptr = IntPtr.Zero; + + /// + /// [public]InputModelInfo class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + /// + /// Default construction through InputModelInfo pointer. + /// + /// InputModelInfo pointer. + public InputModelInfo(IntPtr ptr) + { + if (ptr == IntPtr.Zero) + { + System.Diagnostics.Debug.WriteLine("InputModelInfo init error : ptr is null!"); + return; + } + this.m_ptr = ptr; + } + /// + /// Default destructor + /// + ~InputModelInfo() { dispose(); } + /// + /// Release unmanaged resources + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_preprocess_input_model_info_free(m_ptr); + m_ptr = IntPtr.Zero; + } + + /// + /// Set layout for model's input tensor. This version allows chaining for Lvalue objects + /// + /// Layout for model's input tensor. + /// Reference to 'this' to allow chaining with other calls in a builder-like manner + public InputModelInfo set_layout(Layout layout) + { + ExceptionStatus status = NativeMethods.ov_preprocess_input_model_info_set_layout( + m_ptr, layout.Ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("InputModelInfo set_layout error : {0}!", status.ToString()); + } + return this; + } + } +} diff --git a/modules/csharp_api/csharp/preprocess/input_tensor_info.cs b/modules/csharp_api/csharp/preprocess/input_tensor_info.cs new file mode 100644 index 000000000..6223ab14a --- /dev/null +++ b/modules/csharp_api/csharp/preprocess/input_tensor_info.cs @@ -0,0 +1,189 @@ +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess +{ + /// + /// Information about user's input tensor. By default, it will be initialized to same data (type/shape/etc) as + /// model's input parameter. User application can override particular parameters (like 'element_type') according to + /// application's data and specify appropriate conversions in pre-processing steps + /// + public class InputTensorInfo + { + /// + /// [private]InputTensorInfo class pointer. + /// + public IntPtr m_ptr = IntPtr.Zero; + + /// + /// [public]InputTensorInfo class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + /// + /// Default construction through InputTensorInfo pointer. + /// + /// InputTensorInfo pointer. + public InputTensorInfo(IntPtr ptr) + { + if (ptr == IntPtr.Zero) + { + System.Diagnostics.Debug.WriteLine("InputTensorInfo init error : ptr is null!"); + return; + } + this.m_ptr = ptr; + } + /// + /// Default destructor + /// + ~InputTensorInfo() { dispose(); } + /// + /// Release unmanaged resources + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_preprocess_input_tensor_info_free(m_ptr); + + m_ptr = IntPtr.Zero; + } + /// + /// Set color format for user's input tensor. + /// + /// + /// In general way, some formats support multi-plane input, e.g. NV12 image can be represented as 2 separate tensors + /// (planes): Y plane and UV plane. set_color_format API also allows to set sub_names for such parameters for + /// convenient usage of plane parameters. During build stage, new parameters for each plane will be inserted to the + /// place of original parameter. This means that all parameters located after will shift their positions accordingly + /// (e.g. {param1, param2} will become {param1/Y, param1/UV, param2}) + /// + /// Color format of input image. + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public InputTensorInfo set_color_format(ColorFormat format) + { + ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_color_format( + m_ptr, (uint)format); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("InputTensorInfo set_color_format error : {0}!", status.ToString()); + } + return this; + } + /// + /// + /// + /// + /// + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public InputTensorInfo set_color_format(ColorFormat format, ulong sub_names_size) + { + ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_color_format_with_subname( + m_ptr, (uint)format, sub_names_size); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("InputTensorInfo set_color_format error : {0}!", status.ToString()); + } + return this; + } + + /// + /// Set element type for user's input tensor + /// + /// Element type for user's input tensor. + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public InputTensorInfo set_element_type(OvType type) + { + ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_element_type( + m_ptr, (uint)type.get_type()); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("InputTensorInfo set_element_type error : {0}!", status.ToString()); + } + return this; + } + + /// + /// By default, input image shape is inherited from model input shape. Use this method to specify different + /// width and height of user's input image. In case if input image size is not known, use + /// `set_spatial_dynamic_shape` method. + /// + /// Set fixed user's input image height. + /// Set fixed user's input image width. + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public InputTensorInfo set_spatial_static_shape(ulong input_height, ulong input_width) + { + ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_spatial_static_shape( + m_ptr, input_height, input_width); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("InputTensorInfo set_shape error : {0}!", status.ToString()); + } + return this; + } + + /// + /// Set memory type runtime information for user's input tensor + /// + /// Memory type. Refer to specific plugin's documentation for exact string format + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public InputTensorInfo set_memory_type(string memory_type) + { + sbyte[] c_mem_type = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(memory_type)); + ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_memory_type( + m_ptr, ref c_mem_type[0]); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("InputTensorInfo set_shape error : {0}!", status.ToString()); + } + return this; + } + + /// + /// Set layout for user's input tensor + /// + /// Layout for user's input tensor. + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public InputTensorInfo set_layout(Layout layout) + { + ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_layout( + m_ptr, layout.Ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("InputTensorInfo set_layout error : {0}!", status.ToString()); + } + return this; + } + + + /// + /// Helper function to reuse element type and shape from user's created tensor. Use this only in case if + /// input tensor is already known and available before. Overwrites previously set element type & shape via + /// `set_element_type` and `set_shape`. Tensor's memory type is not reused, so if `runtime_tensor` represents remote + /// tensor with particular memory type - you should still specify appropriate memory type manually using + /// `set_memory_type` + /// + /// + /// As for `InputTensorInfo::set_shape`, this method shall not be used together with methods + /// 'set_spatial_dynamic_shape' and 'set_spatial_static_shape', otherwise ov::AssertFailure exception will be thrown + /// + /// User's created tensor. + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public InputTensorInfo set_from(Tensor runtime_tensor) + { + ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_from( + m_ptr, runtime_tensor.Ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("InputTensorInfo set_from error : {0}!", status.ToString()); + } + return this; + } + } +} diff --git a/modules/csharp_api/csharp/preprocess/output_info.cs b/modules/csharp_api/csharp/preprocess/output_info.cs new file mode 100644 index 000000000..31e7e6c85 --- /dev/null +++ b/modules/csharp_api/csharp/preprocess/output_info.cs @@ -0,0 +1,74 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess +{ + /// + /// Class holding postprocessing information for one output + /// From postprocessing pipeline perspective, each output can be represented as: + /// - Model's output info, (OutputInfo::model) + /// - Postprocessing steps applied to user's input (OutputInfo::postprocess) + /// - User's desired output parameter information, which is a final one after preprocessing (OutputInfo::tensor) + /// + public class OutputInfo + { + /// + /// [private]OutputInfo class pointer. + /// + public IntPtr m_ptr = IntPtr.Zero; + + /// + /// [public]OutputInfo class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + /// + /// Default construction through OutputInfo pointer. + /// + /// OutputInfo pointer. + public OutputInfo(IntPtr ptr) + { + if (ptr == IntPtr.Zero) + { + System.Diagnostics.Debug.WriteLine("OutputInfo init error : ptr is null!"); + return; + } + this.m_ptr = ptr; + } + /// + /// Default destructor + /// + ~OutputInfo() { dispose(); } + /// + /// Release unmanaged resources + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_preprocess_output_info_free(m_ptr); + m_ptr = IntPtr.Zero; + } + + /// + /// Get current output tensor information with ability to change specific data + /// + /// Reference to current output tensor structure + public OutputTensorInfo tensor() + { + IntPtr output_tensor_ptr = IntPtr.Zero; + ExceptionStatus status = NativeMethods.ov_preprocess_output_info_get_tensor_info( + m_ptr, ref output_tensor_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("InputInfo tensor error : {0}!", status.ToString()); + } + return new OutputTensorInfo(output_tensor_ptr); + } + } +} diff --git a/modules/csharp_api/csharp/preprocess/output_tensor_info.cs b/modules/csharp_api/csharp/preprocess/output_tensor_info.cs new file mode 100644 index 000000000..b2f47e8c8 --- /dev/null +++ b/modules/csharp_api/csharp/preprocess/output_tensor_info.cs @@ -0,0 +1,72 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess +{ + /// + /// Information about user's desired output tensor. By default, it will be initialized to same data + /// (type/shape/etc) as model's output parameter. User application can override particular parameters (like + /// 'element_type') according to application's data and specify appropriate conversions in post-processing steps + /// + public class OutputTensorInfo + { + /// + /// [private]OutputTensorInfo class pointer. + /// + public IntPtr m_ptr = IntPtr.Zero; + + /// + /// [public]OutputTensorInfo class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + /// + /// Default construction through OutputTensorInfo pointer. + /// + /// OutputTensorInfo pointer. + public OutputTensorInfo(IntPtr ptr) + { + if (ptr == IntPtr.Zero) + { + System.Diagnostics.Debug.WriteLine("OutputTensorInfo init error : ptr is null!"); + return; + } + this.m_ptr = ptr; + } + /// + /// Default destructor + /// + ~OutputTensorInfo() { dispose(); } + /// + /// Release unmanaged resources + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_preprocess_output_tensor_info_free(m_ptr); + m_ptr = IntPtr.Zero; + } + + /// + /// Set element type for user's desired output tensor. + /// + /// Element type for user's output tensor. + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public OutputTensorInfo set_element_type(ElementType type) + { + ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_output_set_element_type( + m_ptr, (uint)type); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("OutputTensorInfo set_element_type error : {0}!", status.ToString()); + } + return this; + } + } +} diff --git a/modules/csharp_api/csharp/preprocess/prepost_processor.cs b/modules/csharp_api/csharp/preprocess/prepost_processor.cs new file mode 100644 index 000000000..b201c2096 --- /dev/null +++ b/modules/csharp_api/csharp/preprocess/prepost_processor.cs @@ -0,0 +1,178 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess +{ + + /// + /// Main class for adding pre- and post- processing steps to existing ov::Model + /// + /// + /// This is a helper class for writing easy pre- and post- processing operations on ov::Model object assuming that + /// any preprocess operation takes one input and produces one output. + /// + /// For advanced preprocessing scenarios, like combining several functions with multiple inputs/outputs into one, + /// client's code can use transformation passes over ov::Model + /// + public class PrePostProcessor + { + /// + /// [private]PrePostProcessor class pointer. + /// + public IntPtr m_ptr = IntPtr.Zero; + + /// + /// [public]PrePostProcessor class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + /// + /// Default construction through Model. + /// + /// model. + public PrePostProcessor(Model model) + { + ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_create(model.Ptr, ref m_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PrePostProcessor init error : {0}!", status.ToString()); + } + } + /// + /// Default destructor + /// + ~PrePostProcessor() { dispose(); } + /// + /// Release unmanaged resources + /// + public void dispose() { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_preprocess_prepostprocessor_free(m_ptr); + m_ptr = IntPtr.Zero; + } + + /// + /// Gets input pre-processing data structure. Should be used only if model/function has only one input + /// Using returned structure application's code is able to set user's tensor data (e.g layout), preprocess steps, + /// target model's data + /// + /// Reference to model's input information structure + public InputInfo input() + { + IntPtr input_ptr = IntPtr.Zero; + ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_get_input_info(m_ptr, ref input_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PrePostProcessor input error : {0}!", status.ToString()); + } + return new InputInfo(input_ptr); + } + + /// + /// Gets input pre-processing data structure for input identified by it's tensor name + /// + /// Tensor name of specific input. Throws if tensor name is not associated with any input in a model + /// Reference to model's input information structure + public InputInfo input(string tensor_name) + { + IntPtr input_ptr = IntPtr.Zero; + sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(tensor_name)); + ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_get_input_info_by_name(m_ptr, ref c_tensor_name[0], ref input_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PrePostProcessor input error : {0}!", status.ToString()); + } + return new InputInfo(input_ptr); + } + /// + /// Gets input pre-processing data structure for input identified by it's order in a model + /// + /// Input index of specific input. Throws if input index is out of range for associated function. + /// Reference to model's input information structure + public InputInfo input(ulong tensor_index) + { + IntPtr input_ptr = IntPtr.Zero; + ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_get_input_info_by_index(m_ptr, tensor_index, ref input_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PrePostProcessor input error : {0}!", status.ToString()); + } + return new InputInfo(input_ptr); + } + + /// + /// Gets output post-processing data structure. Should be used only if model/function has only one output + /// Using returned structure application's code is able to set model's output data, post-process steps, user's + /// tensor data (e.g layout) + /// + /// Reference to model's output information structure + public OutputInfo output() + { + IntPtr input_ptr = IntPtr.Zero; + ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_get_output_info(m_ptr, ref input_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PrePostProcessor output error : {0}!", status.ToString()); + } + return new OutputInfo(input_ptr); + } + + /// + /// Gets output post-processing data structure for output identified by it's tensor name + /// + /// Tensor name of specific output. Throws if tensor name is not associated with any input in a model + /// Reference to model's output information structure + public OutputInfo output(string tensor_name) + { + IntPtr input_ptr = IntPtr.Zero; + sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(tensor_name)); + ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_get_output_info_by_name(m_ptr, ref c_tensor_name[0], ref input_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PrePostProcessor output error : {0}!", status.ToString()); + } + return new OutputInfo(input_ptr); + } + + /// + /// Gets output post-processing data structure for output identified by it's order in a model + /// + /// utput index of specific output. Throws if output index is out of range for associated function + /// Reference to model's output information structure + public OutputInfo output(ulong tensor_index) + { + IntPtr input_ptr = IntPtr.Zero; + ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_get_output_info_by_index(m_ptr, tensor_index, ref input_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PrePostProcessor output error : {0}!", status.ToString()); + } + return new OutputInfo(input_ptr); + } + + /// + /// Adds pre/post-processing operations to function passed in constructor + /// + /// Function with added pre/post-processing operations + public Model build() + { + IntPtr model_ptr = IntPtr.Zero; + ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_build( + m_ptr, ref model_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PrePostProcessor build error : " + status.ToString()); + } + return new Model(model_ptr); + } + } + + +} diff --git a/modules/csharp_api/csharp/preprocess/preprocess_steps.cs b/modules/csharp_api/csharp/preprocess/preprocess_steps.cs new file mode 100644 index 000000000..6d6d0746b --- /dev/null +++ b/modules/csharp_api/csharp/preprocess/preprocess_steps.cs @@ -0,0 +1,238 @@ +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess +{ + /// + /// Preprocessing steps. Each step typically intends adding of some operation to input parameter + /// User application can specify sequence of preprocessing steps in a builder-like manner + /// + public class PreProcessSteps + { + /// + /// [private]PreProcessSteps class pointer. + /// + public IntPtr m_ptr = IntPtr.Zero; + + /// + /// [public]PreProcessSteps class pointer. + /// + public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + + /// + /// Default construction through PreProcessSteps pointer. + /// + /// PreProcessSteps pointer. + public PreProcessSteps(IntPtr ptr) + { + if (ptr == IntPtr.Zero) + { + System.Diagnostics.Debug.WriteLine("PreProcessSteps init error : ptr is null!"); + return; + } + this.m_ptr = ptr; + } + /// + /// Default destructor + /// + ~PreProcessSteps() { dispose(); } + /// + /// Release unmanaged resources + /// + public void dispose() + { + if (m_ptr == IntPtr.Zero) + { + return; + } + NativeMethods.ov_preprocess_preprocess_steps_free(m_ptr); + + m_ptr = IntPtr.Zero; + } + + /// + /// Add resize operation to model's dimensions. + /// + /// esize algorithm. + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public PreProcessSteps resize(ResizeAlgorithm resize) + { + ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_preprocess_steps_resize( + m_ptr, (int)resize); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PreProcessSteps resize error : {0}!", status.ToString()); + } + return this; + } + + + /// + /// Add scale preprocess operation. Divide each element of input by specified value. + /// + /// Scaling value. + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public PreProcessSteps scale(float value) + { + ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_preprocess_steps_scale( + m_ptr, value); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PreProcessSteps resize error : {0}!", status.ToString()); + } + return this; + } + + /// + /// Add mean preprocess operation. Subtract specified value from each element of input. + /// + /// Value to subtract from each element. + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public PreProcessSteps mean(float value) + { + ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_preprocess_steps_mean( + m_ptr, value); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PreProcessSteps mean error : {0}!", status.ToString()); + } + return this; + } + + /// + /// Crop input tensor between begin and end coordinates. Under the hood, inserts `opset8::Slice` operation to + /// execution graph. It is recommended to use to together with `ov::preprocess::InputTensorInfo::set_shape` to set + /// original input shape before cropping + /// + /// Begin indexes for input tensor cropping. Negative values represent counting elements from the end + /// of input tensor + /// End indexes for input tensor cropping. End indexes are exclusive, which means values including end + /// edge are not included in the output slice. Negative values represent counting elements from the end of input tensor + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public PreProcessSteps crop(int[] begin, int[] end) + { + ExceptionStatus status = NativeMethods.ov_preprocess_preprocess_steps_crop( + m_ptr, ref begin[0], begin.Length, ref end[0], end.Length); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PreProcessSteps crop error : {0}!", status.ToString()); + } + return this; + } + /// + /// Crop input tensor between begin and end coordinates. Under the hood, inserts `opset8::Slice` operation to + /// execution graph. It is recommended to use to together with `ov::preprocess::InputTensorInfo::set_shape` to set + /// original input shape before cropping + /// + /// Begin indexes for input tensor cropping. Negative values represent counting elements from the end + /// of input tensor + /// End indexes for input tensor cropping. End indexes are exclusive, which means values including end + /// edge are not included in the output slice. Negative values represent counting elements from the end of input + /// tensor + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public PreProcessSteps crop(List begin, List end) + { + ExceptionStatus status = NativeMethods.ov_preprocess_preprocess_steps_crop( + m_ptr, ref begin.ToArray()[0], begin.Count, ref end.ToArray()[0], end.Count); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PreProcessSteps crop error : {0}!", status.ToString()); + } + return this; + } + + /// + /// Add 'convert layout' operation to specified layout. + /// + /// New layout after conversion. If not specified - destination layout is obtained from + /// appropriate model input properties. + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + /// + /// Adds appropriate 'transpose' operation between user layout and target layout. + /// Current implementation requires source and destination layout to have same number of dimensions + /// + /// + /// when user data has 'NHWC' layout (example is RGB image, [1, 224, 224, 3]) but model expects + /// planar input image ('NCHW', [1, 3, 224, 224]). Preprocessing may look like this: + /// + /// var proc = PrePostProcessor(model); + /// proc.input().tensor().set_layout("NHWC"); // User data is NHWC + /// proc.input().preprocess().convert_layout("NCHW")) // model expects input as NCHW + /// + /// + public PreProcessSteps convert_layout(Layout layout) + { + ExceptionStatus status = NativeMethods.ov_preprocess_preprocess_steps_convert_layout( + m_ptr, layout.Ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PreProcessSteps convert_layout error : {0}!", status.ToString()); + } + return this; + } + + /// + /// Reverse channels operation. + /// + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + /// + /// Adds appropriate operation which reverses channels layout. Operation requires layout having 'C' + /// dimension Operation convert_color (RGB-BGR) does reversing of channels also, but only for NHWC layout + /// + /// + /// when user data has 'NCHW' layout (example is [1, 3, 224, 224] RGB order) but model expects + /// BGR planes order. Preprocessing may look like this: + /// + /// var proc = PrePostProcessor(function); + /// proc.input().preprocess().convert_layout({0, 3, 1, 2}); + /// + /// + public PreProcessSteps reverse_channels() + { + ExceptionStatus status = NativeMethods.ov_preprocess_preprocess_steps_reverse_channels( + m_ptr); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PreProcessSteps reverse_channels error : {0}!", status.ToString()); + } + return this; + } + + /// + /// Converts color format for user's input tensor. Requires source color format to be specified by + /// nputTensorInfo::set_color_format. + /// + /// Destination color format of input image. + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public PreProcessSteps convert_color(ColorFormat format) + { + ExceptionStatus status = NativeMethods.ov_preprocess_preprocess_steps_convert_color( + m_ptr, (uint)format); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PreProcessSteps convert_element_type error : {0}!", status.ToString()); + } + return this; + } + + /// + /// Add convert element type preprocess operation. + /// + /// Desired type of input. + /// Reference to 'this' to allow chaining with other calls in a builder-like manner. + public PreProcessSteps convert_element_type(OvType type) + { + ExceptionStatus status = NativeMethods.ov_preprocess_preprocess_steps_convert_element_type( + m_ptr, (uint)type.get_type()); + if (status != 0) + { + System.Diagnostics.Debug.WriteLine("PreProcessSteps convert_element_type error : {0}!", status.ToString()); + } + return this; + } + } +} diff --git a/modules/csharp_api/demos/yolov8/Program.cs b/modules/csharp_api/demos/yolov8/Program.cs new file mode 100644 index 000000000..48ed06e6f --- /dev/null +++ b/modules/csharp_api/demos/yolov8/Program.cs @@ -0,0 +1,200 @@ +using OpenCvSharp; +using OpenCvSharp.Dnn; +using OpenVinoSharp; +using OpenVinoSharp.model.Yolov8; +using System.Runtime.InteropServices; + +namespace yolov8 +{ + internal class Program + { + static void Main(string[] args) + { + // -------- Get OpenVINO runtime version -------- + + OpenVinoSharp.Version version = Ov.get_openvino_version(); + + Console.WriteLine("---- OpenVINO INFO----"); + Console.WriteLine("Description : {0}", version.description); + Console.WriteLine("Build number: {0}", version.buildNumber); + + if (args.Length < 2) + { + Console.WriteLine("Please enter the complete command parameters: <>model_path> "); + } + string device_name = "AUTO"; + if (args.Length > 3) + { + device_name = args[3]; + Console.WriteLine("Set inference device {0}.", args[3]); + } + else + { + Console.WriteLine("No inference device specified, default device set to AUTO."); + } + string lable = String.Empty; + if (args.Length > 4) + { + lable = args[4]; + } + + if (args[0] == "det" || args[0] == "seg"|| args[0] == "pose"|| args[0] == "cls") + { + yolov8_infer(args[0], args[1], args[2], device_name, lable); + } + else + { + Console.WriteLine("Please specify the model prediction type, such as 'det'、'seg'、'pose'、'cls'"); + } + + } + + static void yolov8_infer(string flg, string model_path, string image_path, string device, string classer_path) + { + // -------- Step 1. Initialize OpenVINO Runtime Core -------- + Core core = new Core(); + // -------- Step 2. Read a model -------- + Console.WriteLine("[INFO] Loading model files: {0}", model_path); + Model model = core.read_model(model_path); + print_model_info(model); + + // -------- Step 3. Loading a model to the device -------- + CompiledModel compiled_model = core.compiled_model(model, device); + + // -------- Step 4. Create an infer request -------- + InferRequest infer_request = compiled_model.create_infer_request(); + // -------- Step 5. Process input images -------- + Console.WriteLine("[INFO] Read image files: {0}", image_path); + Mat image = new Mat(image_path); // Read image by opencvsharp + int max_image_length = image.Cols > image.Rows ? image.Cols : image.Rows; + Mat max_image = Mat.Zeros(new OpenCvSharp.Size(max_image_length, max_image_length), MatType.CV_8UC3); + Rect roi = new Rect(0, 0, image.Cols, image.Rows); + image.CopyTo(new Mat(max_image, roi)); + float[] factors = new float[4]; + factors[0] = factors[1] = (float)(max_image_length / 640.0); + factors[2] = image.Rows; + factors[3] = image.Cols; + + // -------- Step 6. Set up input -------- + Tensor input_tensor = infer_request.get_input_tensor(); + Shape input_shape = input_tensor.get_shape(); + Mat input_mat = CvDnn.BlobFromImage(max_image, 1.0 / 255.0, new Size(input_shape[2], input_shape[3]), 0, true, false); + float[] input_data = new float[input_shape[1] * input_shape[2] * input_shape[3]]; + Marshal.Copy(input_mat.Ptr(0), input_data, 0, input_data.Length); + input_tensor.set_data(input_data); + + + // -------- Step 7. Do inference synchronously -------- + + infer_request.infer(); + + // time test + //DateTime start = DateTime.Now; + //for (int i = 0; i < 10; ++i) + //{ + // infer_request.infer(); + //} + //DateTime end = DateTime.Now; + //TimeSpan ts = end.Subtract(start); + //Console.WriteLine("[INFO] infer time: {0}", ts.TotalMilliseconds / 10); + + + // -------- Step 9. Process output -------- + Console.WriteLine(); + if (flg == "det") + { + Tensor output_tensor = infer_request.get_output_tensor(); + int output_length = (int)output_tensor.get_size(); + float[] output_data = output_tensor.get_data(output_length); + + ResultProcess process = new ResultProcess(factors, 80); + Result result = process.process_det_result(output_data); + process.read_class_names(classer_path); + + process.print_result(result); + + if (classer_path != String.Empty) + { + process.read_class_names(classer_path); + Mat result_image = process.draw_det_result(result, image); + Cv2.ImShow("result", result_image); + Cv2.WaitKey(0); + } + + } + else if (flg == "seg") + { + Tensor output_tensor_det = infer_request.get_tensor("output0"); + int output_length_det = (int)output_tensor_det.get_size(); + float[] output_data_det = output_tensor_det.get_data(output_length_det); + + Tensor output_tensor_pro = infer_request.get_tensor("output1"); + int output_length_pro = (int)output_tensor_pro.get_size(); + float[] output_data_pro = output_tensor_pro.get_data(output_length_pro); + + ResultProcess process = new ResultProcess(factors, 80); + Result result = process.process_seg_result(output_data_det, output_data_pro); + + process.print_result(result); + + if (classer_path != String.Empty) + { + process.read_class_names(classer_path); + Mat result_image = process.draw_seg_result(result, image); + Cv2.ImShow("result", result_image); + Cv2.WaitKey(0); + } + + } + else if (flg == "pose") + { + Tensor output_tensor = infer_request.get_output_tensor(); + int output_length = (int)output_tensor.get_size(); + float[] output_data = output_tensor.get_data(output_length); + + ResultProcess process = new ResultProcess(factors, 80); + Result result = process.process_pose_result(output_data); + + + Mat result_image = process.draw_pose_result(result, image, 0.2); + process.print_result(result); + Cv2.ImShow("result", result_image); + Cv2.WaitKey(0); + } + else if (flg == "cls") + { + Tensor output_tensor = infer_request.get_output_tensor(); + int output_length = (int)output_tensor.get_size(); + float[] output_data = output_tensor.get_data(output_length); + + ResultProcess process = new ResultProcess(factors, 80); + KeyValuePair[] result = process.process_cls_result(output_data); + + process.print_result(result); + + } + } + + /// + /// Output relevant information of the model + /// + /// Model class + static void print_model_info(Model model) + { + Console.WriteLine("[INFO] model name: {0}", model.get_friendly_name()); + + Node input_node = model.get_const_input(0); + Console.WriteLine("[INFO] inputs:"); + Console.WriteLine("[INFO] input name: {0}", input_node.get_name()); + Console.WriteLine("[INFO] input type: {0}", input_node.get_type().to_string()); + Console.WriteLine("[INFO] input shape: {0}", input_node.get_shape().to_string()); + input_node.dispose(); + Node output_node = model.get_const_output(0); + Console.WriteLine("[INFO] outputs:"); + Console.WriteLine("[INFO] output name: {0}", output_node.get_name()); + Console.WriteLine("[INFO] output type: {0}", output_node.get_type().to_string()); + Console.WriteLine("[INFO] output shape: {0}", output_node.get_shape().to_string()); + output_node.dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/demos/yolov8/Properties/launchSettings.json b/modules/csharp_api/demos/yolov8/Properties/launchSettings.json new file mode 100644 index 000000000..60d9dd268 --- /dev/null +++ b/modules/csharp_api/demos/yolov8/Properties/launchSettings.json @@ -0,0 +1,11 @@ +{ + "profiles": { + "yolov8": { + "commandName": "Project", + "commandLineArgs": "det ./../../../../../model/yolov8/yolov8s.xml ./../../../../../dataset/image/demo_2.jpg CPU ./../../../../../dataset/lable/COCO_lable.txt" + //"commandLineArgs": "cls ./../../../../../model/yolov8/yolov8s-cls.xml ./../../../../../dataset/image/demo_7.jpg CPU " + //"commandLineArgs": "pose ./../../../../../model/yolov8/yolov8s-pose.xml ./../../../../../dataset/image/demo_9.jpg CPU ", + //"commandLineArgs": "seg ./../../../../../model/yolov8\\yolov8s-seg.xml ./../../../../../dataset/image/demo_2.jpg CPU ./../../../../../dataset/lable/COCO_lable.txt" + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/demos/yolov8/README.md b/modules/csharp_api/demos/yolov8/README.md new file mode 100644 index 000000000..d6c8d8761 --- /dev/null +++ b/modules/csharp_api/demos/yolov8/README.md @@ -0,0 +1,328 @@ +# OpenVINO™ C# API Deployment Yolov8 Model Example + +[简体中文](README_cn.md) | English + +  OpenVINO™ C# API version 3.0 has undergone significant updates compared to version 2.0, changing from refactoring the C++API to directly reading OpenVino ™ The official C API makes the application more flexible and supports a richer range of functions. OpenVINO™ C# API 3.0 API interface with multiple references to OpenVino ™ C++API implementation, therefore it is closer to the C++API when used, which will be more friendly to friends who are familiar with using the C++API. +  This example demonstrates how to deploy the Yolov8 full series model using the OpenVINO™ C# API 3.0 API. +  This example supports the full range of Yolov8 models, as well as official pre training models and personal training models. +  The following C # APIs will be mainly used in the example: + +| Feature | API | Description | +| :----------------------: | ------------------------------------------------------------ | ------------------------------------------------------------ | +| OpenVINO Runtime Version | Ov.get_openvino_version() | Get Openvino API version. | +| Basic Infer Flow | Core.read_model(), core.compiled_model(), CompiledModel.create_infer_request(), InferRequest.get_input_tensor(), InferRequest.get_output_tensor(), InferRequest.get_tensor() | Common API to do inference: read and compile a model, create an infer request, configure input and output tensors. | +| Synchronous Infer | InferRequest.infer() | Do synchronous inference. | +| Model Operations | Model.get_friendly_name(), Model.get_const_input(), Model.get_const_output() | Get inputs and outputs of a model. | +| Node Operations | Node.get_name(), Node.get_type(), Node.get_shape( | Get node message. | +| Tensor Operations | Tensor.get_shape(), Tensor.set_data(), Tensor.get_size(), Tensor.get_data() | Get a tensor shape, size, data and set data. | +| Yolov8 Process | ResultProcess.process_det_result(), ResultProcess.process_seg_result(), ResultProcess.process_pose_result, ResultProcess.process_cls_result(), ResultProcess.read_class_names(), ResultProcess.draw_det_result(), ResultProcess.draw_pose_result(), ResultProcess.draw_seg_result(), ResultProcess.print_result() | Process and draw yolov8 result. | + +  The information listed below has been verified and tested by code running. If there are other successful testing environments, please feel free to supplement: + +| **Options** | **Values** | +| --------------------- | ------------------------------------------------------------ | +| Validated Models | Yolov8-det、Yolov8-cls、Yolov8-pose、Yolov8-seg | +| Model Format | OpenVINO™ toolkit Intermediate Representation (*.xml + *.bin), ONNX (*.onnx) | +| Supported devices | CPU、iGPU、dGPU(Not tested) | +| Operating environment | Window 10 、Window 11; | +| Building environment | Visual Studio 11,.NET 6.0 | + + + +## How It Works + +  When the project runs, the sample program will read the user specified path model, test images, and category files to prepare relevant data for model inference testing; Load the specified model and image into OpenVINO ™ Reasoning the core and performing synchronous reasoning, then loading the obtained reasoning data into a custom Yolov8 data processing class for result processing. +  OpenVINO used in the project ™ The relevant components have been encapsulated in OpenVINO™ C# API, and there is no need to install OpenVino separately ™。 + +## Project Dependency + +  All dependencies in the project can be installed through the **NuGet** package: + +- **OpenVINO™ C# API** + +  You can install it through the NuGet tool that comes with Visual Studio, + +  If the project is compiled through **dotnet**, the corresponding package can be added using the following statement: + +``` +dotnet add package OpenVINO.CSharp.win +``` + +## Model acquisition + +  All the models used in the project were downloaded from the **ultra tics** platform. The following are download examples: + +1. Installing Ultralytics + + ``` + pip install ultralytics + ``` + +2. Export Yolov8 model + + ``` + yolo export model=yolov8s.pt format=onnx #yolov8-det + yolo export model=yolov8s-cls.pt format=onnx #yolov8-cls + yolo export model=yolov8s-pose.pt format=onnx #yolov8-pose + yolo export model=yolov8s-seg.pt format=onnx #yolov8-seg + ``` + +3. Convert to IR format + +   IR format here via OpenVINO ™ The model optimization tool implementation requires the installation of OpenVINO ™ Python version, specific implementation can refer to [Model Preparation OpenVINO ™ Documentation](https://docs.openvino.ai/2023.0/openvino_docs_model_processing_introduction.html) , can also be achieved through the command line: + + ``` + mo -input_model yolov8s.onnx + ``` + +## Building + +  Currently, rapid implementation in the Window environment has been achieved. Please refer to the installation of the environment for reference[Windows Installation OpenVINO™ C# API](./../../docs/en/windows_install.md) + +  The Linux environment is still under development. + +- **Download source code** + + The complete project code and model files have been provided in the code repository, and the project source code can be downloaded through Git. + + ``` + git clone https://github.com/guojin-yan/OpenVINO-CSharp-API.git + cd OpenVINO-CSharp-API + ``` + +- **Visual Studio compile** + +  If compiling using Visual Studio, you can open the ``CSharp.sln`` solution through the solution and install the project dependencies as described in [Project Dependencies](##Project Dependency). The ```openvino2023.0``` folder will then be added to the project. + +
+ +  Finally, the project is built and compiled by right-clicking on the project ->Generate. + +- **dotnet compile** + + If the project is compiled through **dotnet**, run the following commands in sequence: + + ``` + cd demos\yolov8 + dotnet add package OpenVINO.CSharp.win # add OpenVINO.CSharp.win + dotnet build # building project + ``` + +  After the project is compiled, an executable file will be generated in the ``bin\Debug\net6.0'``directory. + +## Run + +- **Visual Studio Run** + + To run this project on the Visual Studio platform, you need to modify the ``Properties\launchSettings.json`` file to specify the program command line input. The content of the ``launchSettings.json`` file is shown below. To use it, you need to add the command line \. + + ```json + { + "profiles": { + "yolov8": { + "commandName": "Project", + "commandLineArgs": "" + } + } + } + ``` + + After adding command line content, rebuild the project and run it. + + The main content of \ parameters is as follows: + + ```shell + + ``` + + When running the example, it is necessary to specify the model prediction type, model path, and image file path parameters simultaneously. The prediction type input includes four types: 'det', 'seg', 'pose', and 'cls'; The default inference device is set to 'AUTO'. For 'det' and 'seg' predictions, the \ parameter can be set. If this parameter is set, the results will be plotted on the image. If it is not set, it will be printed through the console. + + - Reasoning that the input parameters of the Yolov8-det model are + + ```shell + det ./../../../../../model/yolov8/yolov8s.xml ./../../../../../dataset/image/demo_2.jpg CPU ./../../../../../dataset/lable/COCO_lable.txt + ``` + + - Reasoning that the input parameters of the Yolov8-cls model are + + ```shell + cls ./../../../../../model/yolov8/yolov8s-cls.xml ./../../../../../dataset/image/demo_7.jpg CPU + ``` + + - Reasoning that the input parameters of the Yolov8-pose model are + + ```shell + pose ./../../../../../model/yolov8/yolov8s-pose.xml ./../../../../../dataset/image/demo_9.jpg CPU + ``` + + - Reasoning that the input parameters of the Yolov8-seg model are + + ```shell + seg ./../../../../../model/yolov8/yolov8s-seg.xml ./../../../../../dataset/image/demo_2.jpg CPU ./../../../../../dataset/lable/COCO_lable.txt + ``` + +- **dotnet run** + + If running through dotnet, simply run the following command + + ```shell + dotnet run + ``` + + The \ parameter settings are as follows: + + - Reasoning that the input parameters of the Yolov8-det model are + + ```shell + det ./../../model/yolov8/yolov8s.xml ./../../dataset/image/demo_2.jpg CPU ./../../dataset/lable/COCO_lable.txt + ``` + + - Reasoning that the input parameters of the Yolov8-cls model are + + ```shell + cls ./../../model/yolov8/yolov8s-cls.xml ./../../dataset/image/demo_7.jpg CPU + ``` + + - Reasoning that the input parameters of the Yolov8-pose model are + + ```shell + pose ./../../model/yolov8/yolov8s-pose.xml ./../../dataset/image/demo_9.jpg CPU + ``` + + - Reasoning that the input parameters of the Yolov8-seg model are + + ```shell + seg ./../../model/yolov8\\yolov8s-seg.xml ./../../dataset/image/demo_2.jpg CPU ./../../dataset/lable/COCO_lable.txt + ``` + +### Results Display + +The program will output model inference information and inference results: + +#### Yolov8-det model inference results + +```shell +PS E:\Git_space\OpenVinoSharp\demos\yolov8> dotnet run det ./../../model/yolov8/yolov8s.xml ./../../dataset/image/demo_2.jpg CPU ./../../dataset/lable/COCO_lable.txt +---- OpenVINO INFO---- +Description : OpenVINO Runtime +Build number: 2023.0.1-11005-fa1c41994f3-releases/2023/0 +Set inference device CPU. +[INFO] Loading model files: ./../../model/yolov8/yolov8s.xml +[INFO] model name: torch_jit +[INFO] inputs: +[INFO] input name: images +[INFO] input type: f32 +[INFO] input shape: Shape : [1, 3, 640, 640] +[INFO] outputs: +[INFO] output name: output0 +[INFO] output type: f32 +[INFO] output shape: Shape : [1, 84, 8400] +[INFO] Read image files: ./../../dataset/image/demo_2.jpg + + + Detection result : + +1: 0 0.89 (x:744 y:43 width:388 height:667) +2: 0 0.88 (x:149 y:202 width:954 height:507) +3: 27 0.72 (x:435 y:433 width:98 height:284) +``` + +
+ +#### Yolov8-pose model inference results + +```shell +PS E:\Git_space\OpenVinoSharp\demos\yolov8> dotnet run pose ./../../model/yolov8/yolov8s-pose.xml ./../../dataset/image/demo_9.jpg CPU +---- OpenVINO INFO---- +Description : OpenVINO Runtime +Build number: 2023.0.1-11005-fa1c41994f3-releases/2023/0 +Set inference device CPU. +[INFO] Loading model files: ./../../model/yolov8/yolov8s-pose.xml +[INFO] model name: torch_jit +[INFO] inputs: +[INFO] input name: images +[INFO] input type: f32 +[INFO] input shape: Shape : [1, 3, 640, 640] +[INFO] outputs: +[INFO] output name: output0 +[INFO] output type: f32 +[INFO] output shape: Shape : [1, 56, 8400] +[INFO] Read image files: ./../../dataset/image/demo_9.jpg + + + Classification result : + +1: 1 0.94 (x:104 y:22 width:151 height:365) Nose: (188 ,60 ,0.92) Left Eye: (192 ,52 ,0.83) Right Eye: (179 ,54 ,0.89) Left Ear: (197 ,52 ,0.48) Right Ear: (166 ,56 ,0.75) Left Shoulder: (212 ,91 ,0.92) Right Shoulder: (151 ,94 ,0.94) Left Elbow: (230 ,145 ,0.89) Right Elbow: (138 ,143 ,0.92) Left Wrist: (244 ,199 ,0.88) Right Wrist: (118 ,187 ,0.91) Left Hip: (202 ,191 ,0.97) Right Hip: (169 ,193 ,0.97) Left Knee: (183 ,271 ,0.96) Right Knee: (183 ,275 ,0.96) Left Ankle: (174 ,358 ,0.87) Right Ankle: (197 ,354 ,0.88) +``` + +
+ +#### Yolov8-seg model inference results + +```shell +PS E:\Git_space\OpenVinoSharp\demos\yolov8> dotnet run seg ./../../model/yolov8\\yolov8s-seg.xml ./../../dataset/image/demo_2.jpg CPU ./../../dataset/lable/COCO_lable.txt +---- OpenVINO INFO---- +Description : OpenVINO Runtime +Build number: 2023.0.1-11005-fa1c41994f3-releases/2023/0 +Set inference device CPU. +[INFO] Loading model files: ./../../model/yolov8\\yolov8s-seg.xml +[INFO] model name: torch_jit +[INFO] inputs: +[INFO] input name: images +[INFO] input type: f32 +[INFO] input shape: Shape : [1, 3, 640, 640] +[INFO] outputs: +[INFO] output name: output0 +[INFO] output type: f32 +[INFO] output shape: Shape : [1, 116, 8400] +[INFO] Read image files: ./../../dataset/image/demo_2.jpg + + + Segmentation result : + +1: 0 0.90 (x:745 y:41 width:402 height:671) +2: 0 0.86 (x:118 y:196 width:1011 height:515) +3: 27 0.70 (x:434 y:436 width:90 height:280) +``` + +
+ + + +#### Yolov8-cls model inference results + +```shell +PS E:\Git_space\OpenVinoSharp\demos\yolov8> dotnet run cls ./../../model/yolov8/yolov8s-cls.xml ./../../dataset/image/demo_7.jpg CPU +---- OpenVINO INFO---- +Description : OpenVINO Runtime +Build number: 2023.0.1-11005-fa1c41994f3-releases/2023/0 +Set inference device CPU. +[INFO] Loading model files: ./../../model/yolov8/yolov8s-cls.xml +[INFO] model name: torch_jit +[INFO] inputs: +[INFO] input name: images +[INFO] input type: f32 +[INFO] input shape: Shape : [1, 3, 224, 224] +[INFO] outputs: +[INFO] output name: output0 +[INFO] output type: f32 +[INFO] output shape: Shape : [1, 1000] +[INFO] Read image files: ./../../dataset/image/demo_7.jpg + + + Classification Top 10 result : + +classid probability +------- ----------- +294 0.992172 +269 0.002861 +296 0.002111 +295 0.000714 +270 0.000546 +276 0.000432 +106 0.000159 +362 0.000147 +260 0.000078 +272 0.000070 +``` + diff --git a/modules/csharp_api/demos/yolov8/README_cn.md b/modules/csharp_api/demos/yolov8/README_cn.md new file mode 100644 index 000000000..2c29bc2f5 --- /dev/null +++ b/modules/csharp_api/demos/yolov8/README_cn.md @@ -0,0 +1,332 @@ +![OpenVinoSharp](https://socialify.git.ci/guojin-yan/OpenVinoSharp/image?description=1&descriptionEditable=💞%20OpenVINO%20wrapper%20for%20.NET💞%20&forks=1&issues=1&logo=https%3A%2F%2Fs2.loli.net%2F2023%2F01%2F26%2FylE1K5JPogMqGSW.png&name=1&owner=1&pattern=Circuit%20Board&pulls=1&stargazers=1&theme=Light) + +简体中文| [English](README.md) + +# OpenVinoSharp部署Yolov8模型实例 + +  OpenVinoSharp 3.0 版本较2.0版本做了较大程度上的更新,由原来的重构 C++ API 改为直接读取 OpenVINO™ 官方 C API,使得应用更加灵活,所支持的功能更加丰富。OpenVinoSharp 3.0 API 接口多参考 OpenVINO™ C++ API 实现,因此在使用时更加接近C++ API,这对熟悉使用C++ API的朋友会更加友好。 + +  此示例演示了如何使用OpenVinoSharp 3.0 版本 API 部署Yolov8 全系列模型。 + +  该示例支持Yolov8全系列模型,并且支持官方预训练模型以及个人训练模型。 + +  示例中主要会使用以下C# API: + +| Feature | API | Description | +| :----------------------: | ------------------------------------------------------------ | ------------------------------------------------------------ | +| OpenVINO Runtime Version | Ov.get_openvino_version() | Get Openvino API version. | +| Basic Infer Flow | Core.read_model(), core.compiled_model(), CompiledModel.create_infer_request(), InferRequest.get_input_tensor(), InferRequest.get_output_tensor(), InferRequest.get_tensor() | Common API to do inference: read and compile a model, create an infer request, configure input and output tensors. | +| Synchronous Infer | InferRequest.infer() | Do synchronous inference. | +| Model Operations | Model.get_friendly_name(), Model.get_const_input(), Model.get_const_output() | Get inputs and outputs of a model. | +| Node Operations | Node.get_name(), Node.get_type(), Node.get_shape( | Get node message. | +| Tensor Operations | Tensor.get_shape(), Tensor.set_data(), Tensor.get_size(), Tensor.get_data() | Get a tensor shape, size, data and set data. | +| Yolov8 Process | ResultProcess.process_det_result(), ResultProcess.process_seg_result(), ResultProcess.process_pose_result, ResultProcess.process_cls_result(), ResultProcess.read_class_names(), ResultProcess.draw_det_result(), ResultProcess.draw_pose_result(), ResultProcess.draw_seg_result(), ResultProcess.print_result() | Process and draw yolov8 result. | + +下方所列出信息已经经过代码运行验证测试,如有其他环境测试成功欢迎大家进行补充: + +| 选项 | 值 | +| -------- | ------------------------------------------------------- | +| 支持模型 | Yolov8-det、Yolov8-cls、Yolov8-pose、Yolov8-seg | +| 模型格式 | OpenVINO™ 工具包中间表示(\*.xml,\*.bin),ONNX (\*.onnx) | +| 支持设备 | CPU、iGPU、dGPU(未测试) | +| 运行环境 | Window 10 、Window 11; | +| 编译环境 | Visual Studio 11,.NET 6.0 | + +## 工作原理 + +  项目运行时,示例程序会读取用户指定路径模型、测试图片以及类别文件,准备模型推理测试的相关数据;将指定模型和图像加载到OpenVINO™ 推理核心并进行同步推理,然后将获取的推理数据加载到自定义的Yolov8数据处理类中进行结果处理。 + +  项目中使用的OpenVINO™相关组件已经封装到OpenVinoSharp中,无需安装在单独安装OpenVINO™。 + +## 项目依赖 + +  项目中所有依赖项均可以通过NuGet 包安装: + +- **OpenVinoSharp** + +  可以通过Visual Studio 自带的 NuGet 工具进行安装 + +  如果项目是通过**dotnet**编译,可以通过下面语句添加对应的包: + +``` +dotnet add package OpenVINO.CSharp.win +``` + +## 模型获取 + +  项目中所使用的模型全部由**ultralytics**平台下载,下面是下载示例: + +1. 安装ultralytics + + ``` + pip install ultralytics + ``` + +2. 导出 Yolov8模型 + + ``` + yolo export model=yolov8s.pt format=onnx #yolov8-det + yolo export model=yolov8s-cls.pt format=onnx #yolov8-cls + yolo export model=yolov8s-pose.pt format=onnx #yolov8-pose + yolo export model=yolov8s-seg.pt format=onnx #yolov8-seg + ``` + +3. 转为IR格式 + + IR格式此处通过OpenVINO™的模型优化工具实现,需要安装OpenVINO™ Python 版本,具体实现可以参考[Model Preparation — OpenVINO™ documentation](https://docs.openvino.ai/2023.0/openvino_docs_model_processing_introduction.html),也可以通过命令行实现: + + ``` + mo -input_model yolov8s.onnx + ``` + +## 快速构建 + +  目前已经实现Window环境下的快速实现,环境安装请参考[Windows 安装 OpenVINOSharp](./../../docs/cn/windows_install.md)。 + +  Linux环境还在开发中。 + +- **下载源码** + + 代码仓中已经提供了完整的项目代码和模型文件,通过Git下载项目源码。 + + ``` + git clone https://github.com/guojin-yan/OpenVINOSharp.git + cd OpenVINOSharp + ``` + +- **Visual Studio 编译** + +  如果使用Visual Studio 编译,可以通过解决方案打开``OpenVinoSharp.sln`` 解决方案,并按照[项目依赖](##项目依赖)中的方式安装项目依赖,然后项目中会增加``openvino2023.0``文件夹。 + +
+ +  最后项目构建和编译,只需要通过右击项目->生成即可。 + +- **dotnet编译** + +   如果项目通过dotnet编译,依次运行以下命令: + +``` +cd demos\yolov8 +dotnet add package OpenVinoSharp.win # 添加OpenVinoSharp包 +dotnet build # 编译项目 +``` + +  项目编译后,会在``\bin\Debug\net6.0``目录下生成可执行文件。 + +## 运行 + +- **Visual Studio 运行** + + 在 Visual Studio 平台运行该项目需要修改``Properties\launchSettings.json``文件指定程序命令行输入,``launchSettings.json`` 文件内容如下所示,在使用时需要添加命令行\即可即可。 + + ```json + { + "profiles": { + "yolov8": { + "commandName": "Project", + "commandLineArgs": "" + } + } + } + ``` + + 添加命令行内容后,重新生成项目并运行即可。 + + \参数主要内容如下: + + ```shell + + ``` + + 运行示例时,需要同时指定模型预测类型、模型路径、图片文件路径参数,预测类型输入包括: 'det'、'seg'、'pose'、'cls'四种类型;默认推理设备设置为'AUTO',对于'det'、'seg'预测,可以设置参数,如果设置该参数,会将结果绘制到图片上,如果未设置,会通过控制台打印出来 + + - Yolov8-det 模型推理参数为: + + ```shell + det ./../../../../../model/yolov8/yolov8s.xml ./../../../../../dataset/image/demo_2.jpg CPU ./../../../../../dataset/lable/COCO_lable.txt + ``` + + - Yolov8-cls 模型推理参数为: + + ```shell + cls ./../../../../../model/yolov8/yolov8s-cls.xml ./../../../../../dataset/image/demo_7.jpg CPU + ``` + + - Yolov8-pose 模型推理参数为: + + ```shell + pose ./../../../../../model/yolov8/yolov8s-pose.xml ./../../../../../dataset/image/demo_9.jpg CPU + ``` + + - Yolov8-seg 模型推理参数为: + + ```shell + seg ./../../../../../model/yolov8/yolov8s-seg.xml ./../../../../../dataset/image/demo_2.jpg CPU ./../../../../../dataset/lable/COCO_lable.txt + ``` + +- **dotnet运行** + + 如果通过dotnet运行,只需要运行以下命令即可 + + ```shell + dotnet run + ``` + + \参数设置如下: + + - Yolov8-det 模型推理参数为: + + ```shell + det ./../../model/yolov8/yolov8s.xml ./../../dataset/image/demo_2.jpg CPU ./../../dataset/lable/COCO_lable.txt + ``` + + - Yolov8-cls 模型推理参数为: + + ```shell + cls ./../../model/yolov8/yolov8s-cls.xml ./../../dataset/image/demo_7.jpg CPU + ``` + + - Yolov8-pose 模型推理参数为: + + ```shell + pose ./../../model/yolov8/yolov8s-pose.xml ./../../dataset/image/demo_9.jpg CPU + ``` + + - Yolov8-seg 模型推理参数为: + + ```shell + seg ./../../model/yolov8\\yolov8s-seg.xml ./../../dataset/image/demo_2.jpg CPU ./../../dataset/lable/COCO_lable.txt + ``` + +### 结果展示 + +程序运行会输出模型推理信息和推理结果: + +#### Yolov8-det 模型推理结果 + +```shell +PS E:\Git_space\OpenVinoSharp\demos\yolov8> dotnet run det ./../../model/yolov8/yolov8s.xml ./../../dataset/image/demo_2.jpg CPU ./../../dataset/lable/COCO_lable.txt +---- OpenVINO INFO---- +Description : OpenVINO Runtime +Build number: 2023.0.1-11005-fa1c41994f3-releases/2023/0 +Set inference device CPU. +[INFO] Loading model files: ./../../model/yolov8/yolov8s.xml +[INFO] model name: torch_jit +[INFO] inputs: +[INFO] input name: images +[INFO] input type: f32 +[INFO] input shape: Shape : [1, 3, 640, 640] +[INFO] outputs: +[INFO] output name: output0 +[INFO] output type: f32 +[INFO] output shape: Shape : [1, 84, 8400] +[INFO] Read image files: ./../../dataset/image/demo_2.jpg + + + Detection result : + +1: 0 0.89 (x:744 y:43 width:388 height:667) +2: 0 0.88 (x:149 y:202 width:954 height:507) +3: 27 0.72 (x:435 y:433 width:98 height:284) +``` + +
+ +#### Yolov8-pose 模型推理结果 + +```shell +PS E:\Git_space\OpenVinoSharp\demos\yolov8> dotnet run pose ./../../model/yolov8/yolov8s-pose.xml ./../../dataset/image/demo_9.jpg CPU +---- OpenVINO INFO---- +Description : OpenVINO Runtime +Build number: 2023.0.1-11005-fa1c41994f3-releases/2023/0 +Set inference device CPU. +[INFO] Loading model files: ./../../model/yolov8/yolov8s-pose.xml +[INFO] model name: torch_jit +[INFO] inputs: +[INFO] input name: images +[INFO] input type: f32 +[INFO] input shape: Shape : [1, 3, 640, 640] +[INFO] outputs: +[INFO] output name: output0 +[INFO] output type: f32 +[INFO] output shape: Shape : [1, 56, 8400] +[INFO] Read image files: ./../../dataset/image/demo_9.jpg + + + Classification result : + +1: 1 0.94 (x:104 y:22 width:151 height:365) Nose: (188 ,60 ,0.92) Left Eye: (192 ,52 ,0.83) Right Eye: (179 ,54 ,0.89) Left Ear: (197 ,52 ,0.48) Right Ear: (166 ,56 ,0.75) Left Shoulder: (212 ,91 ,0.92) Right Shoulder: (151 ,94 ,0.94) Left Elbow: (230 ,145 ,0.89) Right Elbow: (138 ,143 ,0.92) Left Wrist: (244 ,199 ,0.88) Right Wrist: (118 ,187 ,0.91) Left Hip: (202 ,191 ,0.97) Right Hip: (169 ,193 ,0.97) Left Knee: (183 ,271 ,0.96) Right Knee: (183 ,275 ,0.96) Left Ankle: (174 ,358 ,0.87) Right Ankle: (197 ,354 ,0.88) +``` + +
+ +#### Yolov8-seg 模型推理结果 + +```shell +PS E:\Git_space\OpenVinoSharp\demos\yolov8> dotnet run seg ./../../model/yolov8\\yolov8s-seg.xml ./../../dataset/image/demo_2.jpg CPU ./../../dataset/lable/COCO_lable.txt +---- OpenVINO INFO---- +Description : OpenVINO Runtime +Build number: 2023.0.1-11005-fa1c41994f3-releases/2023/0 +Set inference device CPU. +[INFO] Loading model files: ./../../model/yolov8\\yolov8s-seg.xml +[INFO] model name: torch_jit +[INFO] inputs: +[INFO] input name: images +[INFO] input type: f32 +[INFO] input shape: Shape : [1, 3, 640, 640] +[INFO] outputs: +[INFO] output name: output0 +[INFO] output type: f32 +[INFO] output shape: Shape : [1, 116, 8400] +[INFO] Read image files: ./../../dataset/image/demo_2.jpg + + + Segmentation result : + +1: 0 0.90 (x:745 y:41 width:402 height:671) +2: 0 0.86 (x:118 y:196 width:1011 height:515) +3: 27 0.70 (x:434 y:436 width:90 height:280) +``` + +
+ + + +#### Yolov8-cls 模型推理结果 + +```shell +PS E:\Git_space\OpenVinoSharp\demos\yolov8> dotnet run cls ./../../model/yolov8/yolov8s-cls.xml ./../../dataset/image/demo_7.jpg CPU +---- OpenVINO INFO---- +Description : OpenVINO Runtime +Build number: 2023.0.1-11005-fa1c41994f3-releases/2023/0 +Set inference device CPU. +[INFO] Loading model files: ./../../model/yolov8/yolov8s-cls.xml +[INFO] model name: torch_jit +[INFO] inputs: +[INFO] input name: images +[INFO] input type: f32 +[INFO] input shape: Shape : [1, 3, 224, 224] +[INFO] outputs: +[INFO] output name: output0 +[INFO] output type: f32 +[INFO] output shape: Shape : [1, 1000] +[INFO] Read image files: ./../../dataset/image/demo_7.jpg + + + Classification Top 10 result : + +classid probability +------- ----------- +294 0.992172 +269 0.002861 +296 0.002111 +295 0.000714 +270 0.000546 +276 0.000432 +106 0.000159 +362 0.000147 +260 0.000078 +272 0.000070 +``` + diff --git a/modules/csharp_api/demos/yolov8/yolov8.csproj b/modules/csharp_api/demos/yolov8/yolov8.csproj new file mode 100644 index 000000000..5bb58892f --- /dev/null +++ b/modules/csharp_api/demos/yolov8/yolov8.csproj @@ -0,0 +1,15 @@ + + + + Exe + net6.0 + enable + enable + + + + + + + + diff --git a/modules/csharp_api/docs/cn/linux_install.md b/modules/csharp_api/docs/cn/linux_install.md new file mode 100644 index 000000000..53a8b52c5 --- /dev/null +++ b/modules/csharp_api/docs/cn/linux_install.md @@ -0,0 +1,134 @@ +# OpenVINO C# API 在Linux 平台使用 + +  由于目前 OpenVINO C# API 还在开发阶段,未生成相应的 NuGet Package, 因此此处基于 Ubuntu 20.04 系统,提供了相应的使用案例,方便大家在Linux系统上使用 OpenVINO C# API。 + +## 一、配置 .NET 环境 + +  .NET 是一个免费的跨平台开源开发人员平台 ,用于构建多种应用程序。下面将演示 AIxBoard 如何在 Ubuntu 20.04 上安装 .NET环境,支持 .NET Core 2.0-3.1 系列 以及.NET 5-8 系列 ,如果你的 AIxBoard 使用的是其他Linux系统,你可以参考[在 Linux 发行版上安装 .NET - .NET | Microsoft Learn](https://learn.microsoft.com/zh-cn/dotnet/core/install/linux)。 + +### 1. 添加 Microsoft 包存储库 + +  使用 APT 进行安装可通过几个命令来完成。 安装 .NET 之前,请运行以下命令,将 Microsoft 包签名密钥添加到受信任密钥列表,并添加包存储库。 + +  打开终端并运行以下命令: + +```bash +wget https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb +sudo dpkg -i packages-microsoft-prod.deb +rm packages-microsoft-prod.deb +``` + +  下图为输入上面命令后控制台的输出: + +
+ +### 2. 安装 SDK + +  .NET SDK 使你可以通过 .NET 开发应用。 如果安装 .NET SDK,则无需安装相应的运行时。 若要安装 .NET SDK,请运行以下命令: + +```bash +sudo apt-get update +sudo apt-get install -y dotnet-sdk-3.1 +``` + +  下图为安装后控制台的输出: + +
+ + +### 3. 测试安装 + +  通过命令行可以检查 SDK 版本以及Runtime时版本。 + +``` +dotnet --list-sdks +dotnet --list-runtimes +``` + +  下图为输入测试命令后控制台的输出: + +
+ +  以上就是.NET环境的配置步骤,如果你的环境与本文不匹配,可以通过[.NET 文档 | Microsoft Learn](https://learn.microsoft.com/zh-cn/dotnet/) 获取更多安装步骤。 + +## 二、安装 OpenVINO C# API + +  OpenVINO™ 有两种安装方式: OpenVINO Runtime和OpenVINO Development Tools。OpenVINO Runtime包含用于在处理器设备上运行模型部署推理的核心库。OpenVINO Development Tools是一组用于处理OpenVINO和OpenVINO模型的工具,包括模型优化器、OpenVINO Runtime、模型下载器等。在此处我们只需要安装OpenVINO Runtime即可。 + +### 1. 下载 OpenVINO Runtime + +  访问[Download the Intel Distribution of OpenVINO Toolkit](https://www.intel.com/content/www/us/en/developer/tools/openvino-toolkit/download.html?ENVIRONMENT=DEV_TOOLS&OP_SYSTEM=WINDOWS&VERSION=v_2023_0_1&DISTRIBUTION=PIP)页面,按照下面流程选择相应的安装选项,在下载页面,由于我们的设备使用的是**Ubuntu20.04**,因此下载时按照指定的编译版本下载即可。 + +
+ +### 2. 解压安装包 + +  我们所下载的 OpenVINO Runtime 本质是一个C++依赖包,因此我们把它放到我们的系统目录下,这样在编译时会根据设置的系统变量获取依赖项。首先在系统文件夹下创建一个文件夹: + +```bash +sudo mkdir -p /opt/intel +``` + +  然后解压缩我们下载的安装文件,并将其移动到指定文件夹下: + +```bash +tar -xvzf l_openvino_toolkit_ubuntu20_2023.0.1.11005.fa1c41994f3_x86_64.tgz +sudo mv l_openvino_toolkit_ubuntu20_2023.0.1.11005.fa1c41994f3_x86_64 /opt/intel/openvino_2022.3.0 +``` + +### 3. 安装依赖 + +  接下来我们需要安装 OpenVINO Runtime 所许雅的依赖项,通过命令行输入以下命令即可: + +```bash +cd /opt/intel/openvino_2022.3.0/ +sudo -E ./install_dependencies/install_openvino_dependencies.sh +``` + +
+ +### 4. 配置环境变量 + +  安装完成后,我们需要配置环境变量,以保证在调用时系统可以获取对应的文件,通过命令行输入以下命令即可: + +```bash +source /opt/intel/openvino_2022.3.0/setupvars.sh +``` + +  以上就是 OpenVINO Runtime 环境的配置步骤,如果你的环境与本文不匹配,可以通过[Install OpenVINO™ Runtime — OpenVINO™ documentation — Version(2023.0)](https://docs.openvino.ai/2023.0/openvino_docs_install_guides_install_runtime.html)获取更多安装步骤。 + +### 5. 添加 OpenVINO™ C# API 依赖 + +  由于OpenVINO™ C# API当前正处于开发阶段,还未创建Linux版本的NuGet Package,因此需要通过下载项目源码以项目引用的方式使用。 + +- **下载源码** + + 通过Git下载项目源码,新建一个Terminal,并输入以下命令克隆远程仓库,将该项目放置在项目同级目录下。 + + ``` + git clone https://github.com/guojin-yan/OpenVINO-CSharp-API.git + cd OpenVINO-CSharp-API + ``` + +- **修改OpenVINO™ 依赖** + + 由于项目源码的OpenVINO™ 依赖与本文设置不同,因此需要修改OpenVINO™ 依赖项的路径,主要通过修改``OpenVINO-CSharp-API/src/CSharpAPI/native_methods/ov_base.cs``文件即可,修改内容如下: + + ``` + private const string dll_extern = "./openvino2023.0/openvino_c.dll"; + ---修改为---> + private const string dll_extern = "libopenvino_c.so"; + ``` + +- **添加项目依赖** + + 在Terminal输入以下命令,即可将OpenVINO™ C# API添加到AlxBoard_deploy_yolov8项目引用中。 + + ```shell + dotnet add reference ./../OpenVINO-CSharp-API/src/CSharpAPI/CSharpAPI.csproj + ``` + + + + + diff --git a/modules/csharp_api/docs/cn/windows_install.md b/modules/csharp_api/docs/cn/windows_install.md new file mode 100644 index 000000000..156e4c49e --- /dev/null +++ b/modules/csharp_api/docs/cn/windows_install.md @@ -0,0 +1,32 @@ +# OpenVINO™ C# API 在 Windows 平台使用 + +  OpenVINO™ C# API 主要基于 OpenVINO™ 和 C# 开发,支持 Windows 10/11版本,目前已经在 x64 架构下完成测试。 + +## C# 环境配置 + +  C# 是一种新式编程语言,不仅面向对象,还类型安全。 开发人员利用 C# 能够生成在 .NET 中运行的多种安全可靠的应用程序。C#环境安装可以参考下面的文章进行配置。 + +- [.NET 安装指南 - .NET | Microsoft Learn](https://learn.microsoft.com/zh-cn/dotnet/core/install/windows?tabs=net70) + +- [.NET Framework 安装指南 - .NET Framework | Microsoft Learn](https://learn.microsoft.com/zh-cn/dotnet/framework/install/) + +## OpenVINO™ C# API 安装 + +  由于在Windows环境下开发C#语言比较方便,因此目前开发了 OpenVINO™ C# API 的 NuGet Package ,在使用时直接通过 C# 的 NuGet Package进行安装即可。在打包NuGet Package时,同时将OpenVINO™ 官方编译的东塔链接库文件一并打包到NuGet Package中,因此此处只需要添加OpenVINO™ C# API即可使用。下面演示两种不同编译方式情况下的安装: + +- **Visual Studio 平台** + +  Visual Studio 编辑器自带了C# 的 **NuGet Package** 管理功能,因此可以直接通过 **NuGet Package** 进行安装。 + +- **dotnet** + +  dotnet是C#语言的编译平台,可以通过命令行快速编译C#项目,如果使用dotnet编译,可以通过以下方式安装OpenVINO™ C# API: + +``` +dotnet add package OpenVINO.CSharp.win +``` + +  **说明:**目前**.NET Framework 4.8**版本安装使用会出在问题,因此在项目生成后,需要将程序目录下openvino2023.0文件夹中的除**opencv_c.dll**文件移动到程序目录下,如图所示。 + +
+ diff --git a/modules/csharp_api/docs/en/linux_install.md b/modules/csharp_api/docs/en/linux_install.md new file mode 100644 index 000000000..7757821cb --- /dev/null +++ b/modules/csharp_api/docs/en/linux_install.md @@ -0,0 +1,132 @@ +# Using the OpenVINO C # API on the Linux + +  Due to the fact that the OpenVINO C # API is still in the development stage and no corresponding NuGet Package has been generated, corresponding use cases are provided based on the Ubuntu 20.04 system to facilitate everyone's use of the OpenVINO C # API on Linux systems. + +## Ⅰ. Install. NET + +   . NET is a free cross platform open source developer platform for building multiple applications. The following will demonstrate how AIxBoard can install the. NET environment on Ubuntu 20.04, supporting the. NET Core 2.0-3.1 series and. NET 5-8 series. If your AIxBoard is using another Linux system, you can refer to [Install .NET on Linux distributions - .NET | Microsoft Learn](https://learn.microsoft.com/en-us/dotnet/core/install/linux) + +### 1. Add Microsoft Package Repository + +  The installation using APT can be completed through several commands. Before installing. NET, please run the following command to add the Microsoft package signing key to the trusted key list and add the package repository. + +  Open the terminal and run the following command: + +```bash +wget https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb +sudo dpkg -i packages-microsoft-prod.deb +rm packages-microsoft-prod.deb +``` + +  The following figure shows the output of the console after entering the above command: + +
+ +### 2. Install SDK + +  The. NET SDK allows you to develop applications through. NET. If you install the. NET SDK, you do not need to install the corresponding runtime. To install the. NET SDK, run the following command: + +```bash +sudo apt-get update +sudo apt-get install -y dotnet-sdk-3.1 +``` + +  The following figure shows the output of the console after entering the above command: + +
+ + +### 3. Test installation + +  You can check the SDK version and runtime version through the command line. + +``` +dotnet --list-sdks +dotnet --list-runtimes +``` + +  The following figure shows the output of the console after entering the above command: + +
+ +  The above are the configuration steps for the. NET environment. If your environment does not match this article, you can obtain more installation steps through [.NET documentation | Microsoft Learn](https://learn.microsoft.com/en-us/dotnet/). + +## Ⅲ. Install OpenVINO Runtime + +  OpenVINO™ have two installation methods: OpenVINO Runtime and OpenVINO Development Tools. The OpenVINO Runtime contains a core library for running model deployment inference on processor devices. OpenVINO Development Tools is a set of tools used to process OpenVINO and OpenVINO models, including model optimizer, OpenVINO runtime, model downloader, and more. We only need to install OpenVINO Runtime here. + +### 1. Download OpenVINO Runtime + +  Visit the [Download the Intel Distribution of OpenVINO Toolkit](https://www.intel.com/content/www/us/en/developer/tools/openvino-toolkit/download.html?ENVIRONMENT=DEV_TOOLS&OP_SYSTEM=WINDOWS&VERSION=v_2023_0_1&DISTRIBUTION=PIP) page and follow the process below to select the corresponding installation options. On the download page, as our device is using **Ubuntu 20.04 **, download according to the specified compiled version. + +
+ +### 2. Unzip installation package + +  The OpenVINO Runtime we downloaded is essentially a C++dependency package, so we placed it in our system directory so that dependencies can be obtained during compilation based on the set system variables. First, create a folder under the system folder: + +```bash +sudo mkdir -p /opt/intel +``` + +  Then extract the installation files we downloaded and move them to the specified folder: + +```bash +tar -xvzf l_openvino_toolkit_ubuntu20_2023.0.1.11005.fa1c41994f3_x86_64.tgz +sudo mv l_openvino_toolkit_ubuntu20_2023.0.1.11005.fa1c41994f3_x86_64 /opt/intel/openvino_2022.3.0 +``` + +### 3. Installation dependencies + +  Next, we need to install the dependencies required by the OpenVINO Runtime. Enter the following command from the command line: + +```bash +cd /opt/intel/openvino_2022.3.0/ +sudo -E ./install_dependencies/install_openvino_dependencies.sh +``` + +
+ +### 4. Configure environment variables + +  After the installation is completed, we need to configure the environment variables to ensure that the system can obtain the corresponding files when calling. Enter the following command from the command line: + +```bash +source /opt/intel/openvino_2022.3.0/setupvars.sh +``` + +  The above are the configuration steps for the OpenVINO Runtime environment. If your environment does not match this article, you can obtain more installation steps through [Install OpenVINO™ Runtime — OpenVINO™ documentation — Version(2023.0)](https://docs.openvino.ai/2023.0/openvino_docs_install_guides_install_runtime.html). + +### 5. Add OpenVINO™ C# API Dependency + +  Due to the fact that OpenVINO™ C# API is currently in the development phase and has not yet created a Linux version of NuGet Package, + +- **Download source code** + + Due to OpenVINO ™ C # API is currently in the development stage and has not yet created a Linux version of NuGet Package. Therefore, it needs to be used by downloading the project source code as a project reference. + + ``` + git clone https://github.com/guojin-yan/OpenVINO-CSharp-API.git + cd OpenVINO-CSharp-API + ``` + + + +- **Modify OpenVINO ™ Dependency** + + Due to the OpenVINO™ dependency of the project source code being different from the settings in this article, it is necessary to modify the path of the OpenVINO™ dependency, mainly by modifying the``OpenVINO-CSharp-API/src/CSharpAPI/native_methods/ov_base.cs``. The modifications are as follows: + + ``` + private const string dll_extern = "./openvino2023.0/openvino_c.dll"; + ---Modify to---> + private const string dll_extern = "libopenvino_c.so"; + ``` + +- **Add Project Dependency** + + Enter the following command in Terminal to add OpenVINO™ C# API to AlxBoard_ Deploy_ Yolov8 project reference. + + ``` + dotnet add reference ./../OpenVINO-CSharp-API/src/CSharpAPI/CSharpAPI.csproj + ``` + diff --git a/modules/csharp_api/docs/en/windows_install.md b/modules/csharp_api/docs/en/windows_install.md new file mode 100644 index 000000000..acc8a225f --- /dev/null +++ b/modules/csharp_api/docs/en/windows_install.md @@ -0,0 +1,32 @@ +# Windows Installation OpenVINO™ C# API + +  OpenVINO™ C# API is mainly based on OpenVINO™ Developed with C #, supports Windows 10/11 version, and has been tested under x64 architecture. + +## C# Environmental Configuration + +  C# is a new programming language that is not only object-oriented, but also type safe. Developers can use C # to generate multiple secure and reliable applications running in. NET. The C# environment installation can be configured according to the following article. + +- [Install .NET on Windows - .NET | Microsoft Learn](https://learn.microsoft.com/en-us/dotnet/core/install/windows?tabs=net70) + +- [.NET Framework installation guide - .NET Framework | Microsoft Learn](https://learn.microsoft.com/en-us/dotnet/framework/install/) + +## OpenVINO™ C# API Installation + +  Due to the convenience of developing the C # language in the Windows environment, OpenVINO™ C# API's NuGet Package has been developed, which can be installed directly through the C # NuGet Package during use. When packaging NuGet Package, also include OpenVINO ™ The officially compiled Dongta Link Library file is packaged into the NuGet Package, so you only need to add OpenVINO™ C# API here to use it. The following demonstrates the installation under two different compilation methods: + +- **Visual Studio Platform** + +  The Visual Studio editor comes with the **NuGet Package ** management feature of C #, so it can be installed directly through the **NuGet Package **. + +- **dotnet** + +  Dotnet is a compilation platform for the C # language, which can quickly compile C # projects from the command line. If using dotnet compilation, OpenVINO™ C# API can be installed by: + +``` +dotnet add package OpenVINO.CSharp.win +``` + +  **Note: ** Currently, there may be issues with the installation and use of **. NET Framework version 4.8 **. Therefore, after the project is generated, it is necessary to move the **opencv_c.dll ** file from the openvino2023.0 folder in the program directory to the program directory, as shown in the figure: + +
+ From b5b7ba16735cdbd4edf37831058365d2cbbb3431 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Sat, 7 Oct 2023 17:00:44 +0800 Subject: [PATCH 02/40] Modify the Chinese language in the source code and add exception handling in the code. --- modules/csharp_api/csharp/CSharpAPI.csproj | 31 +++------ .../csharp/build/OpenVINO.CSharp.win.targets | 4 +- .../csharp/build/openvino2023.1/plugins.xml | 6 ++ modules/csharp_api/csharp/common/common.cs | 4 ++ .../csharp_api/csharp/core/compiled_model.cs | 6 +- modules/csharp_api/csharp/core/core.cs | 7 +- .../csharp_api/csharp/core/infer_request.cs | 8 +-- modules/csharp_api/csharp/core/model.cs | 10 ++- modules/csharp_api/csharp/core/node.cs | 6 +- modules/csharp_api/csharp/core/node_input.cs | 2 +- modules/csharp_api/csharp/core/node_output.cs | 2 +- .../csharp_api/csharp/core/partial_shape.cs | 6 +- modules/csharp_api/csharp/core/shape.cs | 10 +-- modules/csharp_api/csharp/core/tensor.cs | 7 +- .../csharp/exception/handle_exception.cs | 24 +++++-- .../csharp/native_methods/ov_base.cs | 2 +- .../csharp/preprocess/input_info.cs | 30 +++------ .../csharp/preprocess/input_model_info.cs | 16 ++--- .../csharp/preprocess/input_tensor_info.cs | 64 ++++++------------- .../csharp/preprocess/output_info.cs | 16 ++--- .../csharp/preprocess/output_tensor_info.cs | 16 ++--- .../csharp/preprocess/prepost_processor.cs | 63 ++++++------------ .../csharp/preprocess/preprocess_steps.cs | 8 +-- 23 files changed, 145 insertions(+), 203 deletions(-) create mode 100644 modules/csharp_api/csharp/build/openvino2023.1/plugins.xml diff --git a/modules/csharp_api/csharp/CSharpAPI.csproj b/modules/csharp_api/csharp/CSharpAPI.csproj index ab608ca1e..1ea258945 100644 --- a/modules/csharp_api/csharp/CSharpAPI.csproj +++ b/modules/csharp_api/csharp/CSharpAPI.csproj @@ -1,47 +1,34 @@  - - net6.0;net48 + net5.0;net6.0;net48 True True OpenVINO.CSharp.win - + OpenVINO C# API - 3.0.122-test-8 + 3.1.1 Guojin Yan Guojin Yan OpenVINO C# API - 基于C#平台调用OpenVINO套件部署深度学习模型。 -Based on the C # platform, call the OpenVINO suite to deploy a deep learning model. -目前版本为测试版本,会存在相关的问题,待后续更新会修改相应的错误;如有其他问题请联系作者解决。 + Based on the C # platform, call the OpenVINO suite to deploy a deep learning model. + https://github.com/guojin-yan/OpenVINO-CSharp-API https://github.com/guojin-yan/OpenVINO-CSharp-API git ../../nuget zh - NuGet.png - README.md - 该版本为OpenVINO™ C# API 3.0 预发行版本,功能还未完善,如使用中有问题,欢迎与我沟通联系。 -This version is a pre release version of OpenVINO™ C# API 3.0 and its features are not yet fully developed. If there are any issues during use, please feel free to contact me. + This version is a pre release version of OpenVINO™ C# API 3.0 and its features are not yet fully developed. If there are any issues during use, please feel free to contact me. OpenVinoSharp OpenVINOCSharp - + - + true - build\openvino2023.0\%(Filename)%(Extension) - - - True - \ - - - True - \ + build\openvino2023.1\%(Filename)%(Extension) diff --git a/modules/csharp_api/csharp/build/OpenVINO.CSharp.win.targets b/modules/csharp_api/csharp/build/OpenVINO.CSharp.win.targets index 33800aebc..f8389ae7e 100644 --- a/modules/csharp_api/csharp/build/OpenVINO.CSharp.win.targets +++ b/modules/csharp_api/csharp/build/OpenVINO.CSharp.win.targets @@ -1,8 +1,8 @@ - - %(RecursiverDir)openvino2023.0/%(Filename)%(Extension) + + %(RecursiverDir)openvino2023.1/%(Filename)%(Extension) PreserveNewest diff --git a/modules/csharp_api/csharp/build/openvino2023.1/plugins.xml b/modules/csharp_api/csharp/build/openvino2023.1/plugins.xml new file mode 100644 index 000000000..1c833ca03 --- /dev/null +++ b/modules/csharp_api/csharp/build/openvino2023.1/plugins.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/modules/csharp_api/csharp/common/common.cs b/modules/csharp_api/csharp/common/common.cs index 86b426de3..2b6ad211b 100644 --- a/modules/csharp_api/csharp/common/common.cs +++ b/modules/csharp_api/csharp/common/common.cs @@ -89,6 +89,10 @@ public enum ExceptionStatus : int /// UNKNOW_EXCEPTION /// UNKNOW_EXCEPTION = -17, + /// + /// PTR_NULL + /// + PTR_NULL = -100, } /// diff --git a/modules/csharp_api/csharp/core/compiled_model.cs b/modules/csharp_api/csharp/core/compiled_model.cs index 917457782..21facf951 100644 --- a/modules/csharp_api/csharp/core/compiled_model.cs +++ b/modules/csharp_api/csharp/core/compiled_model.cs @@ -16,7 +16,7 @@ namespace OpenVinoSharp /// A model is compiled by a specific device by applying multiple optimization /// transformations, then mapping to compute kernels. /// - public class CompiledModel + public class CompiledModel : IDisposable { /// /// [private]CompiledModel class pointer. @@ -40,12 +40,12 @@ public CompiledModel(IntPtr ptr) /// ~CompiledModel() { - dispose(); + Dispose(); } /// /// Release unmanaged resources /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { diff --git a/modules/csharp_api/csharp/core/core.cs b/modules/csharp_api/csharp/core/core.cs index 1ac5501cb..31be22ed6 100644 --- a/modules/csharp_api/csharp/core/core.cs +++ b/modules/csharp_api/csharp/core/core.cs @@ -14,7 +14,7 @@ namespace OpenVinoSharp /// are created multiple times and not shared between several Core instances.The recommended way is to have /// a single Core instance per application. /// - public class Core + public class Core : IDisposable { /// /// [private]Core class pointer. @@ -70,11 +70,11 @@ public Core(string xml_config_file = null) /// /// Core's destructor /// - ~Core() { dispose(); } + ~Core() { Dispose(); } /// /// Release unmanaged resources /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { @@ -320,6 +320,7 @@ public List get_available_devices() NativeMethods.ov_available_devices_free(devices_ptr); return devices; } + } } diff --git a/modules/csharp_api/csharp/core/infer_request.cs b/modules/csharp_api/csharp/core/infer_request.cs index 09465db8d..ff351f286 100644 --- a/modules/csharp_api/csharp/core/infer_request.cs +++ b/modules/csharp_api/csharp/core/infer_request.cs @@ -15,7 +15,7 @@ namespace OpenVinoSharp /// This is a class of infer request that can be run in asynchronous or synchronous manners. /// /// ov_runtime_c#_api - public class InferRequest + public class InferRequest : IDisposable { /// /// [private]InferRequest class pointer. @@ -40,18 +40,18 @@ public InferRequest(IntPtr ptr) /// ~InferRequest() { - dispose(); + Dispose(); } /// /// Release unmanaged resources /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { return; } - NativeMethods.ov_core_free(m_ptr); + NativeMethods.ov_infer_request_free(m_ptr); m_ptr = IntPtr.Zero; } diff --git a/modules/csharp_api/csharp/core/model.cs b/modules/csharp_api/csharp/core/model.cs index 8d99617ee..2bb355daa 100644 --- a/modules/csharp_api/csharp/core/model.cs +++ b/modules/csharp_api/csharp/core/model.cs @@ -13,7 +13,7 @@ namespace OpenVinoSharp /// /// A user-defined model /// - public class Model + public class Model : IDisposable { /// /// [private]Model class pointer. @@ -39,17 +39,21 @@ public Model(IntPtr ptr) /// /// Model's destructor /// - ~Model() { dispose(); } + ~Model() + { + Dispose(); + } /// /// Release unmanaged resources /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { return; } NativeMethods.ov_core_free(m_ptr); + m_ptr = IntPtr.Zero; } diff --git a/modules/csharp_api/csharp/core/node.cs b/modules/csharp_api/csharp/core/node.cs index 03ce722f2..542287fd1 100644 --- a/modules/csharp_api/csharp/core/node.cs +++ b/modules/csharp_api/csharp/core/node.cs @@ -12,7 +12,7 @@ namespace OpenVinoSharp /// zero or more nodes as arguments and one value, which is either a tensor /// or a (possibly empty) tuple of values. /// - public class Node + public class Node : IDisposable { /// /// The node type. @@ -57,12 +57,12 @@ public Node(IntPtr ptr, NodeType type) /// Default deconstruction. /// ~Node() { - dispose(); + Dispose(); } /// /// Release unmanaged resources. /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { diff --git a/modules/csharp_api/csharp/core/node_input.cs b/modules/csharp_api/csharp/core/node_input.cs index c3e940d30..73beb7c3e 100644 --- a/modules/csharp_api/csharp/core/node_input.cs +++ b/modules/csharp_api/csharp/core/node_input.cs @@ -33,7 +33,7 @@ public Input(Node node, ulong index) /// Release unmanaged resources. /// public void dispose() { - m_node.dispose(); + m_node.Dispose(); } /// /// Get the node referred to by this input handle. diff --git a/modules/csharp_api/csharp/core/node_output.cs b/modules/csharp_api/csharp/core/node_output.cs index 09e4bd80f..283de79ae 100644 --- a/modules/csharp_api/csharp/core/node_output.cs +++ b/modules/csharp_api/csharp/core/node_output.cs @@ -35,7 +35,7 @@ public Output(Node node, ulong index) /// public void dispose() { - m_node.dispose(); + m_node.Dispose(); } /// /// Get the node referred to by this output handle. diff --git a/modules/csharp_api/csharp/core/partial_shape.cs b/modules/csharp_api/csharp/core/partial_shape.cs index 9cfb0368b..f1700d266 100644 --- a/modules/csharp_api/csharp/core/partial_shape.cs +++ b/modules/csharp_api/csharp/core/partial_shape.cs @@ -20,7 +20,7 @@ namespace OpenVinoSharp /// Static rank, and static dimensions on all axes. /// (Informal notation examples: `{1,2,3,4}`, `{6}`, `{}`) /// - public class PartialShape + public class PartialShape : IDisposable { /// /// [private]Core class pointer. @@ -170,12 +170,12 @@ public PartialShape(Shape shape) /// ~PartialShape() { - dispose(); + Dispose(); } /// /// Release unmanaged resources. /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { diff --git a/modules/csharp_api/csharp/core/shape.cs b/modules/csharp_api/csharp/core/shape.cs index c25e3e859..9790fda80 100644 --- a/modules/csharp_api/csharp/core/shape.cs +++ b/modules/csharp_api/csharp/core/shape.cs @@ -15,7 +15,7 @@ namespace OpenVinoSharp /// Shape for a tensor. /// /// ov_runtime_c#_api - public class Shape : List + public class Shape : List, IDisposable { /// /// [struct] The shape ov_shape @@ -81,7 +81,7 @@ public Shape(long[] axis_lengths) int l = Marshal.SizeOf(typeof(ov_shape)); m_ptr = Marshal.AllocHGlobal(l); HandleException.handler( - NativeMethods.ov_shape_create((long)this.Count, ref axis_lengths[0], m_ptr)); + NativeMethods.ov_shape_create((long)this.Count, ref axis_lengths[0], m_ptr)); var temp = Marshal.PtrToStructure(m_ptr, typeof(ov_shape)); shape = (ov_shape)temp; } @@ -90,18 +90,18 @@ public Shape(long[] axis_lengths) /// ~Shape() { - dispose(); + Dispose(); } /// /// Release unmanaged resources /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { return; } - NativeMethods.ov_core_free(m_ptr); + NativeMethods.ov_shape_free(m_ptr); m_ptr = IntPtr.Zero; } /// diff --git a/modules/csharp_api/csharp/core/tensor.cs b/modules/csharp_api/csharp/core/tensor.cs index e71b8d2fa..f344d9d37 100644 --- a/modules/csharp_api/csharp/core/tensor.cs +++ b/modules/csharp_api/csharp/core/tensor.cs @@ -15,7 +15,7 @@ namespace OpenVinoSharp /// It can throw exceptions safely for the application, where it is properly handled. /// /// ov_runtime_c#_api - public class Tensor + public class Tensor : IDisposable { /// /// [private]Tensor class pointer. @@ -97,18 +97,19 @@ public Tensor(Tensor tensor) /// ~Tensor() { - dispose(); + Dispose(); } /// /// Release unmanaged resources /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { return; } NativeMethods.ov_tensor_free(m_ptr); + m_ptr = IntPtr.Zero; } /// diff --git a/modules/csharp_api/csharp/exception/handle_exception.cs b/modules/csharp_api/csharp/exception/handle_exception.cs index 83eccefcc..65d53feb3 100644 --- a/modules/csharp_api/csharp/exception/handle_exception.cs +++ b/modules/csharp_api/csharp/exception/handle_exception.cs @@ -17,7 +17,8 @@ static class HandleException /// /// public static void handler(ExceptionStatus status) { - if (ExceptionStatus.OK == status) { + if (ExceptionStatus.OK == status) + { return; } else if (ExceptionStatus.GENERAL_ERROR == status) @@ -28,7 +29,8 @@ public static void handler(ExceptionStatus status) { { not_implemented(); } - else if (ExceptionStatus.NETWORK_NOT_LOADED == status) { + else if (ExceptionStatus.NETWORK_NOT_LOADED == status) + { network_not_loaded(); } else if (ExceptionStatus.PARAMETER_MISMATCH == status) @@ -50,7 +52,9 @@ public static void handler(ExceptionStatus status) { else if (ExceptionStatus.REQUEST_BUSY == status) { request_busy(); - } else if (ExceptionStatus.RESULT_NOT_READY == status) { + } + else if (ExceptionStatus.RESULT_NOT_READY == status) + { result_not_ready(); } else if (ExceptionStatus.NOT_ALLOCATED == status) @@ -85,6 +89,10 @@ public static void handler(ExceptionStatus status) { { unknown_exception(); } + else if (ExceptionStatus.PTR_NULL == status) + { + ptr_null_exception(); + } } /// @@ -163,7 +171,7 @@ private static void request_busy() /// /// Throw RESULT_NOT_READY OpenVINOException. /// - /// result not ready! + /// result not ready! private static void result_not_ready() { throw new OVException(ExceptionStatus.RESULT_NOT_READY, "result not ready!"); @@ -232,5 +240,13 @@ private static void unknown_exception() { throw new OVException(ExceptionStatus.UNKNOW_EXCEPTION, "unknown exception!"); } + /// + /// Throw PTR_NULL OpenVINOException. + /// + /// + private static void ptr_null_exception() + { + throw new OVException(ExceptionStatus.UNKNOW_EXCEPTION, "ptr is null!"); + } } } diff --git a/modules/csharp_api/csharp/native_methods/ov_base.cs b/modules/csharp_api/csharp/native_methods/ov_base.cs index 80e115b4e..2ed76b8d6 100644 --- a/modules/csharp_api/csharp/native_methods/ov_base.cs +++ b/modules/csharp_api/csharp/native_methods/ov_base.cs @@ -11,6 +11,6 @@ namespace OpenVinoSharp /// public partial class NativeMethods { - private const string dll_extern = "./openvino2023.0/openvino_c.dll"; + private const string dll_extern = "./openvino2023.1/openvino_c.dll"; } } diff --git a/modules/csharp_api/csharp/preprocess/input_info.cs b/modules/csharp_api/csharp/preprocess/input_info.cs index a9bfb96e5..421e999a4 100644 --- a/modules/csharp_api/csharp/preprocess/input_info.cs +++ b/modules/csharp_api/csharp/preprocess/input_info.cs @@ -13,7 +13,7 @@ namespace OpenVinoSharp.preprocess /// - Preprocessing steps applied to user's input (InputInfo::preprocess) /// - Model's input info, which is a final input's info after preprocessing (InputInfo::model) /// - public class InputInfo + public class InputInfo : IDisposable { /// /// [private]InputInfo class pointer. @@ -41,11 +41,11 @@ public InputInfo(IntPtr ptr) /// /// Default destructor /// - ~InputInfo() { dispose(); } + ~InputInfo() { Dispose(); } /// /// Release unmanaged resources. /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { @@ -62,12 +62,8 @@ public void dispose() public InputTensorInfo tensor() { IntPtr input_tensor_ptr = IntPtr.Zero; - ExceptionStatus status = NativeMethods.ov_preprocess_input_info_get_tensor_info( - m_ptr, ref input_tensor_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("InputInfo tensor error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_input_info_get_tensor_info(m_ptr, ref input_tensor_ptr)); return new InputTensorInfo(input_tensor_ptr); } @@ -78,12 +74,8 @@ public InputTensorInfo tensor() public PreProcessSteps preprocess() { IntPtr preprocess_ptr = IntPtr.Zero; - ExceptionStatus status = NativeMethods.ov_preprocess_input_info_get_preprocess_steps( - m_ptr, ref preprocess_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("InputInfo preprocess error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_input_info_get_preprocess_steps(m_ptr, ref preprocess_ptr)); return new PreProcessSteps(preprocess_ptr); } @@ -94,12 +86,8 @@ public PreProcessSteps preprocess() public InputModelInfo model() { IntPtr model_ptr = IntPtr.Zero; - ExceptionStatus status = NativeMethods.ov_preprocess_input_info_get_model_info( - m_ptr, ref model_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("InputInfo preprocess error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_input_info_get_model_info(m_ptr, ref model_ptr)); return new InputModelInfo(model_ptr); } }; diff --git a/modules/csharp_api/csharp/preprocess/input_model_info.cs b/modules/csharp_api/csharp/preprocess/input_model_info.cs index f6e5e5b2b..d137783e7 100644 --- a/modules/csharp_api/csharp/preprocess/input_model_info.cs +++ b/modules/csharp_api/csharp/preprocess/input_model_info.cs @@ -16,7 +16,7 @@ namespace OpenVinoSharp.preprocess /// Support model has input parameter with shape {1, 3, 224, 224} and user needs to resize input image to model's /// dimensions. It can be done like this /// - public class InputModelInfo + public class InputModelInfo : IDisposable { /// /// [private]InputModelInfo class pointer. @@ -36,7 +36,7 @@ public InputModelInfo(IntPtr ptr) { if (ptr == IntPtr.Zero) { - System.Diagnostics.Debug.WriteLine("InputModelInfo init error : ptr is null!"); + HandleException.handler(ExceptionStatus.PTR_NULL); return; } this.m_ptr = ptr; @@ -44,11 +44,11 @@ public InputModelInfo(IntPtr ptr) /// /// Default destructor /// - ~InputModelInfo() { dispose(); } + ~InputModelInfo() { Dispose(); } /// /// Release unmanaged resources /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { @@ -65,12 +65,8 @@ public void dispose() /// Reference to 'this' to allow chaining with other calls in a builder-like manner public InputModelInfo set_layout(Layout layout) { - ExceptionStatus status = NativeMethods.ov_preprocess_input_model_info_set_layout( - m_ptr, layout.Ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("InputModelInfo set_layout error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_input_model_info_set_layout(m_ptr, layout.Ptr)); return this; } } diff --git a/modules/csharp_api/csharp/preprocess/input_tensor_info.cs b/modules/csharp_api/csharp/preprocess/input_tensor_info.cs index 6223ab14a..0d3a9bcba 100644 --- a/modules/csharp_api/csharp/preprocess/input_tensor_info.cs +++ b/modules/csharp_api/csharp/preprocess/input_tensor_info.cs @@ -12,7 +12,7 @@ namespace OpenVinoSharp.preprocess /// model's input parameter. User application can override particular parameters (like 'element_type') according to /// application's data and specify appropriate conversions in pre-processing steps /// - public class InputTensorInfo + public class InputTensorInfo : IDisposable { /// /// [private]InputTensorInfo class pointer. @@ -32,7 +32,7 @@ public InputTensorInfo(IntPtr ptr) { if (ptr == IntPtr.Zero) { - System.Diagnostics.Debug.WriteLine("InputTensorInfo init error : ptr is null!"); + HandleException.handler(ExceptionStatus.PTR_NULL); return; } this.m_ptr = ptr; @@ -40,11 +40,11 @@ public InputTensorInfo(IntPtr ptr) /// /// Default destructor /// - ~InputTensorInfo() { dispose(); } + ~InputTensorInfo() { Dispose(); } /// /// Release unmanaged resources /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { @@ -68,12 +68,8 @@ public void dispose() /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public InputTensorInfo set_color_format(ColorFormat format) { - ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_color_format( - m_ptr, (uint)format); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("InputTensorInfo set_color_format error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_input_tensor_info_set_color_format(m_ptr, (uint)format)); return this; } /// @@ -84,12 +80,8 @@ public InputTensorInfo set_color_format(ColorFormat format) /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public InputTensorInfo set_color_format(ColorFormat format, ulong sub_names_size) { - ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_color_format_with_subname( - m_ptr, (uint)format, sub_names_size); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("InputTensorInfo set_color_format error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_input_tensor_info_set_color_format_with_subname(m_ptr, (uint)format, sub_names_size)); return this; } @@ -100,12 +92,8 @@ public InputTensorInfo set_color_format(ColorFormat format, ulong sub_names_size /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public InputTensorInfo set_element_type(OvType type) { - ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_element_type( - m_ptr, (uint)type.get_type()); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("InputTensorInfo set_element_type error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_input_tensor_info_set_element_type(m_ptr, (uint)type.get_type())); return this; } @@ -119,12 +107,8 @@ public InputTensorInfo set_element_type(OvType type) /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public InputTensorInfo set_spatial_static_shape(ulong input_height, ulong input_width) { - ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_spatial_static_shape( - m_ptr, input_height, input_width); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("InputTensorInfo set_shape error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_input_tensor_info_set_spatial_static_shape(m_ptr, input_height, input_width)); return this; } @@ -136,12 +120,8 @@ public InputTensorInfo set_spatial_static_shape(ulong input_height, ulong input_ public InputTensorInfo set_memory_type(string memory_type) { sbyte[] c_mem_type = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(memory_type)); - ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_memory_type( - m_ptr, ref c_mem_type[0]); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("InputTensorInfo set_shape error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_input_tensor_info_set_memory_type(m_ptr, ref c_mem_type[0])); return this; } @@ -152,12 +132,8 @@ public InputTensorInfo set_memory_type(string memory_type) /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public InputTensorInfo set_layout(Layout layout) { - ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_layout( - m_ptr, layout.Ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("InputTensorInfo set_layout error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_input_tensor_info_set_layout(m_ptr, layout.Ptr)); return this; } @@ -177,12 +153,8 @@ public InputTensorInfo set_layout(Layout layout) /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public InputTensorInfo set_from(Tensor runtime_tensor) { - ExceptionStatus status = NativeMethods.ov_preprocess_input_tensor_info_set_from( - m_ptr, runtime_tensor.Ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("InputTensorInfo set_from error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_input_tensor_info_set_from(m_ptr, runtime_tensor.Ptr)); return this; } } diff --git a/modules/csharp_api/csharp/preprocess/output_info.cs b/modules/csharp_api/csharp/preprocess/output_info.cs index 31e7e6c85..c9bb4b1d3 100644 --- a/modules/csharp_api/csharp/preprocess/output_info.cs +++ b/modules/csharp_api/csharp/preprocess/output_info.cs @@ -13,7 +13,7 @@ namespace OpenVinoSharp.preprocess /// - Postprocessing steps applied to user's input (OutputInfo::postprocess) /// - User's desired output parameter information, which is a final one after preprocessing (OutputInfo::tensor) /// - public class OutputInfo + public class OutputInfo : IDisposable { /// /// [private]OutputInfo class pointer. @@ -33,7 +33,7 @@ public OutputInfo(IntPtr ptr) { if (ptr == IntPtr.Zero) { - System.Diagnostics.Debug.WriteLine("OutputInfo init error : ptr is null!"); + HandleException.handler(ExceptionStatus.PTR_NULL); return; } this.m_ptr = ptr; @@ -41,11 +41,11 @@ public OutputInfo(IntPtr ptr) /// /// Default destructor /// - ~OutputInfo() { dispose(); } + ~OutputInfo() { Dispose(); } /// /// Release unmanaged resources /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { @@ -62,12 +62,8 @@ public void dispose() public OutputTensorInfo tensor() { IntPtr output_tensor_ptr = IntPtr.Zero; - ExceptionStatus status = NativeMethods.ov_preprocess_output_info_get_tensor_info( - m_ptr, ref output_tensor_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("InputInfo tensor error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_output_info_get_tensor_info(m_ptr, ref output_tensor_ptr)); return new OutputTensorInfo(output_tensor_ptr); } } diff --git a/modules/csharp_api/csharp/preprocess/output_tensor_info.cs b/modules/csharp_api/csharp/preprocess/output_tensor_info.cs index b2f47e8c8..0366949ce 100644 --- a/modules/csharp_api/csharp/preprocess/output_tensor_info.cs +++ b/modules/csharp_api/csharp/preprocess/output_tensor_info.cs @@ -11,7 +11,7 @@ namespace OpenVinoSharp.preprocess /// (type/shape/etc) as model's output parameter. User application can override particular parameters (like /// 'element_type') according to application's data and specify appropriate conversions in post-processing steps /// - public class OutputTensorInfo + public class OutputTensorInfo : IDisposable { /// /// [private]OutputTensorInfo class pointer. @@ -31,7 +31,7 @@ public OutputTensorInfo(IntPtr ptr) { if (ptr == IntPtr.Zero) { - System.Diagnostics.Debug.WriteLine("OutputTensorInfo init error : ptr is null!"); + HandleException.handler(ExceptionStatus.PTR_NULL); return; } this.m_ptr = ptr; @@ -39,11 +39,11 @@ public OutputTensorInfo(IntPtr ptr) /// /// Default destructor /// - ~OutputTensorInfo() { dispose(); } + ~OutputTensorInfo() { Dispose(); } /// /// Release unmanaged resources /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { @@ -60,12 +60,8 @@ public void dispose() /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public OutputTensorInfo set_element_type(ElementType type) { - ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_output_set_element_type( - m_ptr, (uint)type); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("OutputTensorInfo set_element_type error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_output_set_element_type(m_ptr, (uint)type)); return this; } } diff --git a/modules/csharp_api/csharp/preprocess/prepost_processor.cs b/modules/csharp_api/csharp/preprocess/prepost_processor.cs index b201c2096..30c1ef2dc 100644 --- a/modules/csharp_api/csharp/preprocess/prepost_processor.cs +++ b/modules/csharp_api/csharp/preprocess/prepost_processor.cs @@ -18,7 +18,7 @@ namespace OpenVinoSharp.preprocess /// For advanced preprocessing scenarios, like combining several functions with multiple inputs/outputs into one, /// client's code can use transformation passes over ov::Model /// - public class PrePostProcessor + public class PrePostProcessor : IDisposable { /// /// [private]PrePostProcessor class pointer. @@ -36,20 +36,17 @@ public class PrePostProcessor /// model. public PrePostProcessor(Model model) { - ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_create(model.Ptr, ref m_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PrePostProcessor init error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_prepostprocessor_create(model.Ptr, ref m_ptr)); } /// /// Default destructor /// - ~PrePostProcessor() { dispose(); } + ~PrePostProcessor() { Dispose(); } /// /// Release unmanaged resources /// - public void dispose() { + public void Dispose() { if (m_ptr == IntPtr.Zero) { return; @@ -67,11 +64,8 @@ public void dispose() { public InputInfo input() { IntPtr input_ptr = IntPtr.Zero; - ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_get_input_info(m_ptr, ref input_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PrePostProcessor input error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_prepostprocessor_get_input_info(m_ptr, ref input_ptr)); return new InputInfo(input_ptr); } @@ -84,11 +78,8 @@ public InputInfo input(string tensor_name) { IntPtr input_ptr = IntPtr.Zero; sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(tensor_name)); - ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_get_input_info_by_name(m_ptr, ref c_tensor_name[0], ref input_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PrePostProcessor input error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_prepostprocessor_get_input_info_by_name(m_ptr, ref c_tensor_name[0], ref input_ptr)); return new InputInfo(input_ptr); } /// @@ -99,11 +90,8 @@ public InputInfo input(string tensor_name) public InputInfo input(ulong tensor_index) { IntPtr input_ptr = IntPtr.Zero; - ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_get_input_info_by_index(m_ptr, tensor_index, ref input_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PrePostProcessor input error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_prepostprocessor_get_input_info_by_index(m_ptr, tensor_index, ref input_ptr)); return new InputInfo(input_ptr); } @@ -116,11 +104,8 @@ public InputInfo input(ulong tensor_index) public OutputInfo output() { IntPtr input_ptr = IntPtr.Zero; - ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_get_output_info(m_ptr, ref input_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PrePostProcessor output error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_prepostprocessor_get_output_info(m_ptr, ref input_ptr)); return new OutputInfo(input_ptr); } @@ -133,11 +118,8 @@ public OutputInfo output(string tensor_name) { IntPtr input_ptr = IntPtr.Zero; sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(tensor_name)); - ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_get_output_info_by_name(m_ptr, ref c_tensor_name[0], ref input_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PrePostProcessor output error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_prepostprocessor_get_output_info_by_name(m_ptr, ref c_tensor_name[0], ref input_ptr)); return new OutputInfo(input_ptr); } @@ -149,11 +131,8 @@ public OutputInfo output(string tensor_name) public OutputInfo output(ulong tensor_index) { IntPtr input_ptr = IntPtr.Zero; - ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_get_output_info_by_index(m_ptr, tensor_index, ref input_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PrePostProcessor output error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_prepostprocessor_get_output_info_by_index(m_ptr, tensor_index, ref input_ptr)); return new OutputInfo(input_ptr); } @@ -164,12 +143,8 @@ public OutputInfo output(ulong tensor_index) public Model build() { IntPtr model_ptr = IntPtr.Zero; - ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_prepostprocessor_build( - m_ptr, ref model_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PrePostProcessor build error : " + status.ToString()); - } + HandleException.handler( + NativeMethods.ov_preprocess_prepostprocessor_build(m_ptr, ref model_ptr)); return new Model(model_ptr); } } diff --git a/modules/csharp_api/csharp/preprocess/preprocess_steps.cs b/modules/csharp_api/csharp/preprocess/preprocess_steps.cs index 6d6d0746b..3c90e1fee 100644 --- a/modules/csharp_api/csharp/preprocess/preprocess_steps.cs +++ b/modules/csharp_api/csharp/preprocess/preprocess_steps.cs @@ -11,7 +11,7 @@ namespace OpenVinoSharp.preprocess /// Preprocessing steps. Each step typically intends adding of some operation to input parameter /// User application can specify sequence of preprocessing steps in a builder-like manner /// - public class PreProcessSteps + public class PreProcessSteps : IDisposable { /// /// [private]PreProcessSteps class pointer. @@ -31,7 +31,7 @@ public PreProcessSteps(IntPtr ptr) { if (ptr == IntPtr.Zero) { - System.Diagnostics.Debug.WriteLine("PreProcessSteps init error : ptr is null!"); + HandleException.handler(ExceptionStatus.PTR_NULL); return; } this.m_ptr = ptr; @@ -39,11 +39,11 @@ public PreProcessSteps(IntPtr ptr) /// /// Default destructor /// - ~PreProcessSteps() { dispose(); } + ~PreProcessSteps() { Dispose(); } /// /// Release unmanaged resources /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { From 14404496c098641547c1cab50c609df478a92ab5 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 19 Oct 2023 10:58:58 +0800 Subject: [PATCH 03/40] Modify Dimension. is_ dynamic() judgment method, --- modules/csharp_api/csharp/core/dimension.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/csharp_api/csharp/core/dimension.cs b/modules/csharp_api/csharp/core/dimension.cs index afe6176ff..7d7f4e56f 100644 --- a/modules/csharp_api/csharp/core/dimension.cs +++ b/modules/csharp_api/csharp/core/dimension.cs @@ -78,7 +78,7 @@ public long get_min() /// Boolean, true is dynamic and false is static. public bool is_dynamic() { - return NativeMethods.ov_dimension_is_dynamic(m_dimension); + return (m_dimension.min == 0 && m_dimension.max == -1) ? true : false; } } } From b97e71e227a31b07cef0ccf9c7162ab4cb566330 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 19 Oct 2023 11:00:26 +0800 Subject: [PATCH 04/40] Optimize and improve the implementation of PartialShape class, and correct errors in PartialShape. --- modules/csharp_api/csharp/core/node.cs | 4 +- .../csharp_api/csharp/core/partial_shape.cs | 94 +++++-------------- .../csharp/native_methods/ov_partial_shape.cs | 10 +- modules/csharp_api/csharp/ov/ov_struct.cs | 23 +---- 4 files changed, 29 insertions(+), 102 deletions(-) diff --git a/modules/csharp_api/csharp/core/node.cs b/modules/csharp_api/csharp/core/node.cs index 542287fd1..94f7ce01f 100644 --- a/modules/csharp_api/csharp/core/node.cs +++ b/modules/csharp_api/csharp/core/node.cs @@ -107,12 +107,10 @@ public Shape get_shape() /// Returns the partial shape. public PartialShape get_partial_shape() { - int l = Marshal.SizeOf(typeof(Ov.ov_partial_shape)); - IntPtr shape_ptr = Marshal.AllocHGlobal(l); Ov.ov_partial_shape shape = new Ov.ov_partial_shape(); HandleException.handler( NativeMethods.ov_port_get_partial_shape(m_ptr, ref shape)); - return new PartialShape(shape_ptr); + return new PartialShape(shape); } /// diff --git a/modules/csharp_api/csharp/core/partial_shape.cs b/modules/csharp_api/csharp/core/partial_shape.cs index f1700d266..5740e7e6e 100644 --- a/modules/csharp_api/csharp/core/partial_shape.cs +++ b/modules/csharp_api/csharp/core/partial_shape.cs @@ -1,6 +1,7 @@ using OpenCvSharp; using System; using System.Collections.Generic; +using System.Drawing.Drawing2D; using System.Linq; using System.Runtime.InteropServices; using System.Text; @@ -20,16 +21,8 @@ namespace OpenVinoSharp /// Static rank, and static dimensions on all axes. /// (Informal notation examples: `{1,2,3,4}`, `{6}`, `{}`) /// - public class PartialShape : IDisposable + public class PartialShape { - /// - /// [private]Core class pointer. - /// - private IntPtr m_ptr = IntPtr.Zero; - /// - /// [public]Core class pointer. - /// - public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } @@ -44,37 +37,11 @@ public class PartialShape : IDisposable private Dimension[] dimensions; /// - /// Constructing partial shape by pointer. + /// Constructing partial shape by ov_partial_shape. /// - /// The partial shape ptr./param> - public PartialShape(IntPtr ptr) - { - if (ptr == IntPtr.Zero) - { - System.Diagnostics.Debug.WriteLine("Shape init error : ptr is null!"); - return; - } - this.m_ptr = ptr; - var temp = Marshal.PtrToStructure(ptr, typeof(ov_partial_shape)); - ov_partial_shape shape = (ov_partial_shape)temp; - Dimension rank_tmp = new Dimension(shape.rank); - - if (!rank_tmp.is_dynamic()){ - rank = rank_tmp; - IntPtr[] d_ptr = new IntPtr[rank.get_max()]; - Marshal.Copy(shape.dims, d_ptr, 0, (int)rank.get_min()); - - dimensions = new Dimension[rank.get_min()]; - for (int i = 0; i < rank.get_min(); ++i) - { - var temp1 = Marshal.PtrToStructure(ptr, typeof(Ov.ov_dimension)); - Dimension dim = new Dimension((Ov.ov_dimension)temp1); - dimensions[i] = dim; - } - } - else { - rank = rank_tmp; - } + /// ov_partial_shape struct. + public PartialShape(Ov.ov_partial_shape shape) { + partial_shape_convert(shape); } /// /// Constructing partial shape by dimensions. @@ -82,13 +49,6 @@ public PartialShape(IntPtr ptr) /// The partial shape dimensions array. public PartialShape(Dimension[] dimensions) { - Ov.ov_dimension[] ds = new Ov.ov_dimension[dimensions.Length]; - for (int i = 0; i < dimensions.Length; ++i) - { - ds[i] = dimensions[i].get_dimension(); - } - HandleException.handler( - NativeMethods.ov_partial_shape_create((long)dimensions.Length, ref ds[0], m_ptr)); this.dimensions = dimensions; rank = new Dimension(dimensions.Length, dimensions.Length); } @@ -107,13 +67,6 @@ public PartialShape(List dimensions) : this(dimensions.ToArray()) /// The partial shape dimensions array. public PartialShape(Dimension rank, Dimension[] dimensions) { - Ov.ov_dimension[] ds = new Ov.ov_dimension[dimensions.Length]; - for (int i = 0; i < dimensions.Length; ++i) - { - ds[i] = dimensions[i].get_dimension(); - } - HandleException.handler( - NativeMethods.ov_partial_shape_create_dynamic(rank.get_dimension(), ref ds[0], m_ptr)); this.dimensions = dimensions; this.rank = rank; } @@ -134,8 +87,6 @@ public PartialShape(Dimension rank, List dimensions) : this(rank, dim /// The partial shape dimensions array. public PartialShape(long rank, long[] dimensions) { - HandleException.handler( - NativeMethods.ov_partial_shape_create_static(rank, ref dimensions[0], m_ptr)); this.rank = new Dimension(rank); for (int i = 0; i < dimensions.Length; ++i) { @@ -156,13 +107,10 @@ public PartialShape(long rank, List dimensions) : this(rank, dimensions.To /// The shape public PartialShape(Shape shape) { + Ov.ov_partial_shape partial_shape = new ov_partial_shape(); HandleException.handler( - NativeMethods.ov_shape_to_partial_shape(shape.shape, m_ptr)); - this.rank = new Dimension(shape.Count); - for (int i = 0; i < dimensions.Length; ++i) - { - this.dimensions[i] = new Dimension(shape[i]); - } + NativeMethods.ov_shape_to_partial_shape(shape.shape, out partial_shape)); + partial_shape_convert(partial_shape); } /// @@ -170,21 +118,22 @@ public PartialShape(Shape shape) /// ~PartialShape() { - Dispose(); } /// - /// Release unmanaged resources. + /// Convert partial shape to PartialShape class. /// - public void Dispose() + /// ov_partial_shape struct + private void partial_shape_convert(Ov.ov_partial_shape shape) { - if (m_ptr == IntPtr.Zero) + rank = new Dimension(shape.rank); + long[] data = new long[rank.get_max() * 2]; + dimensions = new Dimension[rank.get_max()]; + Marshal.Copy(shape.dims, data, 0, (int)rank.get_max() * 2); + for (int i = 0; i < rank.get_max(); ++i) { - return; + dimensions[i] = new Dimension(data[2 * i], data[2 * i + 1]); } - NativeMethods.ov_partial_shape_free(m_ptr); - m_ptr = IntPtr.Zero; } - /// /// Get ov_partial_shape /// @@ -195,12 +144,13 @@ public ov_partial_shape get_partial_shape() partial_shape.rank = rank.get_dimension(); int l = Marshal.SizeOf(typeof(Ov.ov_dimension)); IntPtr[] ds_ptr = new IntPtr[rank.get_max()]; - for (int i = 0; i < rank.get_max(); ++i) { + for (int i = 0; i < rank.get_max(); ++i) + { IntPtr ptr = Marshal.AllocHGlobal(l); Marshal.StructureToPtr(dimensions[i], ptr, false); ds_ptr[i] = ptr; } - + IntPtr d_ptr = Marshal.AllocHGlobal((int)(l * rank.get_max())); Marshal.Copy(ds_ptr, 0, d_ptr, (int)rank.get_max()); partial_shape.dims = d_ptr; @@ -243,7 +193,7 @@ public bool is_static() { /// are static. /// `false` if this shape is static, else `true`. public bool is_dynamic() { - return NativeMethods.ov_partial_shape_is_dynamic(get_partial_shape()); + return rank.is_dynamic(); } /// diff --git a/modules/csharp_api/csharp/native_methods/ov_partial_shape.cs b/modules/csharp_api/csharp/native_methods/ov_partial_shape.cs index 8994a9bf5..c5084b5c4 100644 --- a/modules/csharp_api/csharp/native_methods/ov_partial_shape.cs +++ b/modules/csharp_api/csharp/native_methods/ov_partial_shape.cs @@ -27,7 +27,7 @@ public partial class NativeMethods public extern static ExceptionStatus ov_partial_shape_create( long rank, ref Ov.ov_dimension dims, - IntPtr partial_shape_obj); + out Ov.ov_partial_shape partial_shape_obj); /// /// Initialze a partial shape with static rank and dynamic dimension. @@ -48,7 +48,7 @@ public extern static ExceptionStatus ov_partial_shape_create( public extern static ExceptionStatus ov_partial_shape_create_dynamic( Ov.ov_dimension rank, ref Ov.ov_dimension dims, - IntPtr partial_shape_obj); + out Ov.ov_partial_shape partial_shape_obj); /// /// Initialize a partial shape with static rank and static dimension. @@ -62,7 +62,7 @@ public extern static ExceptionStatus ov_partial_shape_create_dynamic( public extern static ExceptionStatus ov_partial_shape_create_static( long rank, ref long dims, - IntPtr partial_shape_obj); + out Ov.ov_partial_shape partial_shape_obj); /// /// Release internal memory allocated in partial shape. @@ -70,7 +70,7 @@ public extern static ExceptionStatus ov_partial_shape_create_static( /// The object's internal memory will be released. [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] - public extern static void ov_partial_shape_free(IntPtr partial_shape); + public extern static void ov_partial_shape_free(ref Ov.ov_partial_shape partial_shape); /// /// Convert partial shape without dynamic data to a static shape. @@ -94,7 +94,7 @@ public extern static ExceptionStatus ov_partial_shape_to_shape( CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static ExceptionStatus ov_shape_to_partial_shape( Ov.ov_shape shape, - IntPtr partial_shape); + out Ov.ov_partial_shape partial_shape); /// /// Check this partial_shape whether is dynamic diff --git a/modules/csharp_api/csharp/ov/ov_struct.cs b/modules/csharp_api/csharp/ov/ov_struct.cs index 39acdb1a3..8cfe29cad 100644 --- a/modules/csharp_api/csharp/ov/ov_struct.cs +++ b/modules/csharp_api/csharp/ov/ov_struct.cs @@ -29,7 +29,7 @@ public long[] get_dims() return dims; } } - +#pragma warning disable CS1591 /// /// It represents a shape that may be partially or totally dynamic. /// @@ -43,24 +43,6 @@ public long[] get_dims() public struct ov_partial_shape { -#if NET7_0_OR_GREATER || NET6_0_OR_GREATER - /// - /// The rank - /// - public ov_dimension rank; - /// - /// The dimension - /// - public IntPtr dims = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(Dimension))); - /// - /// Default Constructor - /// - public ov_partial_shape() - { - rank = new ov_dimension(); - dims = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(Dimension))); - } -#else /// /// The rank /// @@ -69,10 +51,7 @@ public ov_partial_shape() /// The dimension /// public IntPtr dims; -#endif } - - /// /// This is a structure interface equal to ov::Rank /// From ae5ba76f3f35a7091cdaf433250160241899ce5b Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 19 Oct 2023 11:00:56 +0800 Subject: [PATCH 05/40] Update code. --- modules/csharp_api/csharp/CSharpAPI.sln | 25 +++++++++++++++++++++++++ modules/csharp_api/csharp/core/core.cs | 2 -- 2 files changed, 25 insertions(+), 2 deletions(-) create mode 100644 modules/csharp_api/csharp/CSharpAPI.sln diff --git a/modules/csharp_api/csharp/CSharpAPI.sln b/modules/csharp_api/csharp/CSharpAPI.sln new file mode 100644 index 000000000..0383d7865 --- /dev/null +++ b/modules/csharp_api/csharp/CSharpAPI.sln @@ -0,0 +1,25 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.6.33829.357 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "CSharpAPI", "CSharpAPI.csproj", "{56A1269F-3928-4367-84BE-0EA2877DFED1}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {56A1269F-3928-4367-84BE-0EA2877DFED1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {56A1269F-3928-4367-84BE-0EA2877DFED1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {56A1269F-3928-4367-84BE-0EA2877DFED1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {56A1269F-3928-4367-84BE-0EA2877DFED1}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {79883B4A-4EAE-42DE-A72B-7B6D47685E9A} + EndGlobalSection +EndGlobal diff --git a/modules/csharp_api/csharp/core/core.cs b/modules/csharp_api/csharp/core/core.cs index 31be22ed6..71d6a1476 100644 --- a/modules/csharp_api/csharp/core/core.cs +++ b/modules/csharp_api/csharp/core/core.cs @@ -99,7 +99,6 @@ public KeyValuePair get_versions(string device_name) { throw new ArgumentNullException(nameof(device_name)); } - ExceptionStatus status; int l = Marshal.SizeOf(typeof(CoreVersionList)); IntPtr ptr_core_version_s = Marshal.AllocHGlobal(l); sbyte[] c_device_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); @@ -111,7 +110,6 @@ public KeyValuePair get_versions(string device_name) CoreVersion core_version = (CoreVersion)temp2; KeyValuePair value = new KeyValuePair(core_version.device_name, core_version.version); NativeMethods.ov_core_versions_free(ptr_core_version_s); - return value; } From 732887c52ae60be37614c4bfc49461bd9c284bbf Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 19 Oct 2023 16:21:49 +0800 Subject: [PATCH 06/40] Simplify the code, remove OpenCvSharp dependencies from the code, and handle common models. --- modules/csharp_api/csharp/core/compiled_model.cs | 3 +-- modules/csharp_api/csharp/core/infer_request.cs | 3 +-- modules/csharp_api/csharp/core/model.cs | 1 - modules/csharp_api/csharp/core/partial_shape.cs | 3 +-- modules/csharp_api/csharp/core/tensor.cs | 6 +++--- modules/csharp_api/csharp/exception/exception.cs | 3 +-- modules/csharp_api/csharp/native_methods/ov_base.cs | 2 +- .../csharp_api/csharp/native_methods/ov_infer_request.cs | 4 +--- 8 files changed, 9 insertions(+), 16 deletions(-) diff --git a/modules/csharp_api/csharp/core/compiled_model.cs b/modules/csharp_api/csharp/core/compiled_model.cs index 21facf951..da28319da 100644 --- a/modules/csharp_api/csharp/core/compiled_model.cs +++ b/modules/csharp_api/csharp/core/compiled_model.cs @@ -1,5 +1,4 @@ -using OpenCvSharp; -using System; +using System; using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices; diff --git a/modules/csharp_api/csharp/core/infer_request.cs b/modules/csharp_api/csharp/core/infer_request.cs index ff351f286..31ad5d05d 100644 --- a/modules/csharp_api/csharp/core/infer_request.cs +++ b/modules/csharp_api/csharp/core/infer_request.cs @@ -1,5 +1,4 @@ -using OpenCvSharp; -using System; +using System; using System.Collections.Generic; using System.Linq; using System.Numerics; diff --git a/modules/csharp_api/csharp/core/model.cs b/modules/csharp_api/csharp/core/model.cs index 2bb355daa..f9266baf0 100644 --- a/modules/csharp_api/csharp/core/model.cs +++ b/modules/csharp_api/csharp/core/model.cs @@ -6,7 +6,6 @@ using System.Text; using System.Threading.Tasks; using System.Xml.Linq; -using static OpenCvSharp.FileStorage; namespace OpenVinoSharp { diff --git a/modules/csharp_api/csharp/core/partial_shape.cs b/modules/csharp_api/csharp/core/partial_shape.cs index 5740e7e6e..83769d310 100644 --- a/modules/csharp_api/csharp/core/partial_shape.cs +++ b/modules/csharp_api/csharp/core/partial_shape.cs @@ -1,5 +1,4 @@ -using OpenCvSharp; -using System; +using System; using System.Collections.Generic; using System.Drawing.Drawing2D; using System.Linq; diff --git a/modules/csharp_api/csharp/core/tensor.cs b/modules/csharp_api/csharp/core/tensor.cs index f344d9d37..9d45c54b0 100644 --- a/modules/csharp_api/csharp/core/tensor.cs +++ b/modules/csharp_api/csharp/core/tensor.cs @@ -46,11 +46,11 @@ public Tensor(IntPtr ptr) /// Tensor element type /// Tensor shape /// Image data - public Tensor(element.Type type, Shape shape, OvMat mat) + public Tensor(element.Type type, Shape shape, byte[] mat) { - int l =mat.mat_data.Length; + int l =mat.Length; IntPtr data = Marshal.AllocHGlobal(l); - Marshal.Copy(mat.mat_data, 0, data, (int)mat.mat_data_size); + Marshal.Copy(mat, 0, data, (int)mat.Length); HandleException.handler( NativeMethods.ov_tensor_create_from_host_ptr ((uint)type.get_type(), shape.shape, data, ref m_ptr)); diff --git a/modules/csharp_api/csharp/exception/exception.cs b/modules/csharp_api/csharp/exception/exception.cs index fcee37d34..83e081b1b 100644 --- a/modules/csharp_api/csharp/exception/exception.cs +++ b/modules/csharp_api/csharp/exception/exception.cs @@ -1,5 +1,4 @@ -using OpenCvSharp; -using System; +using System; using System.Collections.Generic; using System.Linq; using System.Runtime.Serialization; diff --git a/modules/csharp_api/csharp/native_methods/ov_base.cs b/modules/csharp_api/csharp/native_methods/ov_base.cs index 2ed76b8d6..b53dc3a86 100644 --- a/modules/csharp_api/csharp/native_methods/ov_base.cs +++ b/modules/csharp_api/csharp/native_methods/ov_base.cs @@ -11,6 +11,6 @@ namespace OpenVinoSharp /// public partial class NativeMethods { - private const string dll_extern = "./openvino2023.1/openvino_c.dll"; + private const string dll_extern = "openvino_c"; } } diff --git a/modules/csharp_api/csharp/native_methods/ov_infer_request.cs b/modules/csharp_api/csharp/native_methods/ov_infer_request.cs index 00a66f843..c8c0de851 100644 --- a/modules/csharp_api/csharp/native_methods/ov_infer_request.cs +++ b/modules/csharp_api/csharp/native_methods/ov_infer_request.cs @@ -1,12 +1,10 @@ -using OpenCvSharp; -using System; +using System; using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices; using System.Runtime.InteropServices.ComTypes; using System.Text; using System.Threading.Tasks; -using static OpenCvSharp.Stitcher; namespace OpenVinoSharp { From 6af235c0fbfd3331939c55b557459ec530f76828 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Tue, 24 Oct 2023 11:10:17 +0800 Subject: [PATCH 07/40] Modify formatting issues in the code. --- modules/csharp_api/csharp/common/element_type.cs | 2 -- modules/csharp_api/csharp/common/version.cs | 11 +++++------ modules/csharp_api/csharp/core/core.cs | 2 -- modules/csharp_api/csharp/core/layout.cs | 6 +++--- modules/csharp_api/csharp/core/node.cs | 3 ++- modules/csharp_api/csharp/core/node_output.cs | 7 ++++--- modules/csharp_api/csharp/core/partial_shape.cs | 8 ++------ 7 files changed, 16 insertions(+), 23 deletions(-) diff --git a/modules/csharp_api/csharp/common/element_type.cs b/modules/csharp_api/csharp/common/element_type.cs index dc20b464c..cefe0f5c3 100644 --- a/modules/csharp_api/csharp/common/element_type.cs +++ b/modules/csharp_api/csharp/common/element_type.cs @@ -35,7 +35,6 @@ public OvType(string type) : base (type) { } }; namespace element { - /// /// Enum to define possible element types /// ov_element_c#_api @@ -115,7 +114,6 @@ public enum Type_t /// u64 }; - /// /// [struct] Type information storage struct. /// diff --git a/modules/csharp_api/csharp/common/version.cs b/modules/csharp_api/csharp/common/version.cs index 0fafe4f0d..056c6a95a 100644 --- a/modules/csharp_api/csharp/common/version.cs +++ b/modules/csharp_api/csharp/common/version.cs @@ -21,7 +21,11 @@ public struct Version /// A null terminated description string /// public string description; - + /// + /// Constructs a Version. + /// + /// + /// public Version(string buildNumber, string description) { this.buildNumber = buildNumber; this.description = description; @@ -67,9 +71,4 @@ public struct CoreVersionList /// public ulong size; } - - - - - } diff --git a/modules/csharp_api/csharp/core/core.cs b/modules/csharp_api/csharp/core/core.cs index 71d6a1476..0c2995217 100644 --- a/modules/csharp_api/csharp/core/core.cs +++ b/modules/csharp_api/csharp/core/core.cs @@ -257,8 +257,6 @@ public CompiledModel compile_model(string model_path) NativeMethods.ov_core_compile_model_from_file(m_ptr, ref c_model[0], ref c_device[0], 0, ref compiled_model_ptr)); return new CompiledModel(compiled_model_ptr); } - - /// /// Reads a model and creates a compiled model from the IR/ONNX/PDPD file. /// diff --git a/modules/csharp_api/csharp/core/layout.cs b/modules/csharp_api/csharp/core/layout.cs index df3679be5..02b305065 100644 --- a/modules/csharp_api/csharp/core/layout.cs +++ b/modules/csharp_api/csharp/core/layout.cs @@ -25,7 +25,7 @@ namespace OpenVinoSharp /// - To set/get model's batch (see `ov::get_batch`/`ov::set_batch') it is required in general to specify 'N' dimension /// in layout for appropriate inputs /// - public class Layout + public class Layout : IDisposable { /// /// [private]Layout class pointer. @@ -66,13 +66,13 @@ public Layout(string layout_desc) /// ~Layout() { - dispose(); + Dispose(); } /// /// Release unmanaged resources. /// - public void dispose() + public void Dispose() { if (m_ptr == IntPtr.Zero) { diff --git a/modules/csharp_api/csharp/core/node.cs b/modules/csharp_api/csharp/core/node.cs index 94f7ce01f..25f7e1faf 100644 --- a/modules/csharp_api/csharp/core/node.cs +++ b/modules/csharp_api/csharp/core/node.cs @@ -56,7 +56,8 @@ public Node(IntPtr ptr, NodeType type) /// /// Default deconstruction. /// - ~Node() { + ~Node() + { Dispose(); } /// diff --git a/modules/csharp_api/csharp/core/node_output.cs b/modules/csharp_api/csharp/core/node_output.cs index 283de79ae..749814fca 100644 --- a/modules/csharp_api/csharp/core/node_output.cs +++ b/modules/csharp_api/csharp/core/node_output.cs @@ -10,7 +10,8 @@ namespace OpenVinoSharp /// A handle for one of a node's outputs. /// /// - public class Output { + public class Output : IDisposable + { private Node m_node; private ulong m_index = 0; /// @@ -28,12 +29,12 @@ public Output(Node node, ulong index) /// ~Output() { - dispose(); + Dispose(); } /// /// Release unmanaged resources. /// - public void dispose() + public void Dispose() { m_node.Dispose(); } diff --git a/modules/csharp_api/csharp/core/partial_shape.cs b/modules/csharp_api/csharp/core/partial_shape.cs index 83769d310..16c429779 100644 --- a/modules/csharp_api/csharp/core/partial_shape.cs +++ b/modules/csharp_api/csharp/core/partial_shape.cs @@ -22,9 +22,6 @@ namespace OpenVinoSharp /// public class PartialShape { - - - /// /// PartialShape rank. /// @@ -34,7 +31,6 @@ public class PartialShape /// PartialShape dimensions. /// private Dimension[] dimensions; - /// /// Constructing partial shape by ov_partial_shape. /// @@ -77,7 +73,6 @@ public PartialShape(Dimension rank, Dimension[] dimensions) /// The partial shape dimensions list. public PartialShape(Dimension rank, List dimensions) : this(rank, dimensions.ToArray()) { - } /// /// Constructing static partial shape by dimensions. @@ -98,7 +93,8 @@ public PartialShape(long rank, long[] dimensions) /// The partial shape rank. /// The partial shape dimensions list. public PartialShape(long rank, List dimensions) : this(rank, dimensions.ToArray()) - {} + { + } /// /// Constructing static partial shape by shape. From 35126ccc91b9f4b236adc32ce01c3a68f7f66afe Mon Sep 17 00:00:00 2001 From: yanguojin Date: Tue, 24 Oct 2023 11:18:32 +0800 Subject: [PATCH 08/40] Update READM.md and add a new version of NuGet Package --- modules/csharp_api/README.md | 94 +++++++++++++++++++++++++-------- modules/csharp_api/README_cn.md | 94 ++++++++++++++++++++++++++------- 2 files changed, 147 insertions(+), 41 deletions(-) diff --git a/modules/csharp_api/README.md b/modules/csharp_api/README.md index 0a068cf7e..270ea2e4b 100644 --- a/modules/csharp_api/README.md +++ b/modules/csharp_api/README.md @@ -1,9 +1,18 @@ -# OpenVINO™ C# API +![OpenVINO™ C# API](https://socialify.git.ci/guojin-yan/OpenVINO-CSharp-API/image?description=1&descriptionEditable=💞%20OpenVINO%20wrapper%20for%20.NET💞%20&forks=1&issues=1&logo=https%3A%2F%2Fs2.loli.net%2F2023%2F01%2F26%2FylE1K5JPogMqGSW.png&name=1&owner=1&pattern=Circuit%20Board&pulls=1&stargazers=1&theme=Light) - +

+ + + + + + +

[简体中文](README_cn.md) | English +## This is OpenVINO ™ C # API, this project is still under construction and its functions are not yet fully developed. If you have any problems using it, please feel free to communicate with me. If you are interested in this project, you can also join our development.🥰🥰🥰🥰🥰 + ## 📚 What is OpenVINO™ C# API ? [OpenVINO™](www.openvino.ai) is an open-source toolkit for optimizing and deploying AI inference. @@ -12,17 +21,25 @@ - Use models trained with popular frameworks like TensorFlow, PyTorch and more - Reduce resource demands and efficiently deploy on a range of Intel® platforms from edge to cloud -  This project is mainly based on OpenVINO ™ OpenVINO launched by tool kit ™ C # API, aimed at driving OpenVINO ™ Application on the C # platform. +  This project is based on OpenVINO™ The tool kit has launched OpenVINO™ C # API, aimed at driving OpenVINO™ Application in the C # field. OpenVINO ™ The C # API is based on OpenVINO™ Development, supported platforms, and OpenVINO ™ Consistent, please refer to OpenVINO™ for specific information。 + +## NuGet Package + +### Managed libraries + +| Package | Description | Link | +| --------------------------- | ------------------------------ | ------------------------------------------------------------ | +| **OpenVINO.CSharp.API** | OpenVINO C# API core libraries | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.API.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.API/) | +| **OpenVINO.CSharp.Windows** | All-in-one package for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.Windows.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.Windows/) | -  OpenVINO ™ The C # API is based on OpenVINO ™ C API development, supported platforms and OpenVINO ™ Consistent, please refer to OpenVINO for specific information ™。 +### Native bindings -## NuGet Package +| Package | Description | Link | +| ------------------------ | --------------------------- | ------------------------------------------------------------ | +| **OpenVINO.runtime.win** | Native bindings for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.win.svg)](https://www.nuget.org/packages/OpenVINO.runtime.win/) | +| | | | -  C # supports NuGet Package installation and one-stop installation on platforms such as Linux and Window. Therefore, in order to facilitate more users, a NuGet Package for use on the Window platform has been released for the convenience of everyone. -| Package | Description | Link | -| ----------------------- | ------------------------------------------------------------ | ------------------------------------------------------------ | -| **OpenVINO.CSharp.win** | OpenVINO™ C# API core libraries,comes with a complete OpenVINO 2023.0 dependency library | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.win.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.win/) | ## ⚙ How to install OpenVINO™ C# API? @@ -35,34 +52,69 @@ The following article provides installation methods for OpenVINO™ C# API on di ## 🏷How to use OpenVINO™ C# API? - **Quick start** - - [Deploying the Yolov8 full series model using OpenVINO™ C# API](demos/yolov8/README.md) - + - [Deploying the Yolov8 full series model using OpenVINO™ C# API](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/demos/yolov8/README.md) - **Simple usage** If you don't know how to use it, simply understand the usage method through the following code. ```c# +using OpenVINO™ C# API; namespace test { internal class Program { static void Main(string[] args) { - Core core = new Core(); - Model model = core.read_model("./model.xml"); - CompiledModel compiled_model = core.compiled_model(model, "AUTO"); - InferRequest infer_request = compiled_model.create_infer_request(); - Tensor input_tensor = infer_request.get_tensor("images"); - infer_request.infer(); - Tensor output_tensor = infer_request.get_tensor("output0"); - core.free(); + using Core core = new Core(); + using Model model = core.read_model("./model.xml"); + using CompiledModel compiled_model = core.compiled_model(model, "AUTO"); + using InferRequest infer_request = compiled_model.create_infer_request(); + using Tensor input_tensor = infer_request.get_tensor("images"); + infer_request.infer(); + using Tensor output_tensor = infer_request.get_tensor("output0"); } } } ``` -The classes and objects encapsulated in the project, such as Core, Model, Tensor, etc., are implemented by calling the C API interface and have unmanaged resources. They need to be handled by calling the **dispose() ** method, otherwise memory leakage may occur. +The classes and objects encapsulated in the project, such as Core, Model, Tensor, etc., are implemented by calling the C API interface and have unmanaged resources. They need to be handled by calling the **Dispose() ** method or `using` statement, otherwise memory leakage may occur. + +## 💻 Tutorial Examples + +- [Using OpenVINO™ C# API to Deploy the Yolov8 Model on the AIxBoard](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/tutorial_examples/AlxBoard_deploy_yolov8/README.md) +- [Pedestrian fall detection - Deploying PP-Human based on OpenVINO C # API](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/tutorial_examples/PP-Human_Fall_Detection/README.md) +- [Deploying RT-DETR based on OpenVINO](https://github.com/guojin-yan/RT-DETR-OpenVINO) ## 🗂 API Reference -If you want to learn more information, you can refer to: [OpenVINO™ C# API API Documented](https://guojin-yan.github.io/OpenVINO-CSharp-API.docs/) +If you want to learn more information, you can refer to: [OpenVINO™ C# API API Documented](https://guojin-yan.github.io/OpenVINO-CSharp-API.docs/index.html) + +## 🔃 Update log + +#### 🔥 **2023.10.22 :Update OpenVINO™ C# API ** + +- 🗳 **OpenVINO™ C# API :** + - Modify OpenVINO™ errors in the C # API, and integration of code sections to add exception handling mechanisms. +- 🛹**Application Cases:** + - Pedestrian fall detection - Deploying PP-Human based on OpenVINO C # API + - Deploying RT-DETR based on OpenVINO +- 🔮 **NuGet:** + - Abolish the previously released NuGet package, release updated installation packages, and release three types of NuGet packages, including **OpenVINO. CSharp. API **: core code package, **OpenVINO. CSharp. Windows **: Windows platform integration package, and **OpenVINO. runtime. win **: Windows platform runtime package. + +#### **2023.6.19 : release OpenVINO™ C# API 3.0** + +- 🗳OpenVINO™ C# API : + - Upgrade OpenVINO™ C# API 2.0 to OpenVINO™ C# API 3.0, changing from refactoring the C++API to directly reading OpenVino ™ The official C API makes the application more flexible and supports a richer range of functions. +- 🛹Application Cases: + - OpenVINO™ C# API Deployment Yolov8 Model Example。 +- 🔮NuGet: + - Create and publish NuGet package, release * * OpenVINO™ C# API. win 3.0.120 * *, including OpenVino 2023.0 dependencies. + +## 🎖 Contribute + +  If you are interested in OpenVINO ™ Interested in using C # and contributing to the open source community, welcome to join us and develop OpenVINO™ C# API together. +  If you have any ideas or improvement ideas for this project, please feel free to contact us for guidance on our work. + +## License + +The release of this project is certified under the [Apache 2.0 license](https://github.com/guojin-yan/OpenVINO™ C# API/blob/OpenVINO™ C# API3.0/LICENSE) . diff --git a/modules/csharp_api/README_cn.md b/modules/csharp_api/README_cn.md index 67a77952a..4c2297238 100644 --- a/modules/csharp_api/README_cn.md +++ b/modules/csharp_api/README_cn.md @@ -1,10 +1,19 @@ -# OpenVINO™ C# API - - +![OpenVINO™ C# API](https://socialify.git.ci/guojin-yan/OpenVINO-CSharp-API/image?description=1&descriptionEditable=💞%20OpenVINO%20wrapper%20for%20.NET💞%20&forks=1&issues=1&logo=https%3A%2F%2Fs2.loli.net%2F2023%2F01%2F26%2FylE1K5JPogMqGSW.png&name=1&owner=1&pattern=Circuit%20Board&pulls=1&stargazers=1&theme=Light) +

+ + + + + + 简体中文| [English](README.md) +## 这是OpenVINO™ C# API,该项目还在建设中,功能还未完善,如使用中有问题,欢迎与我沟通联系。如果对该项目感兴趣,也可以加入到我们的开发中来。🥰🥰🥰🥰 + + + ## 📚 简介 [OpenVINO™ ](www.openvino.ai)是一个用于优化和部署 AI 推理的开源工具包。 @@ -13,17 +22,25 @@ - 使用流行框架(如TensorFlow,PyTorch等)训练的模型 - 减少资源需求,并在从边缘到云的一系列英特尔®平台上高效部署 -  该项目主要是基于OpenVINO™工具套件推出的 OpenVINO™ C# API,旨在推动 OpenVINO™ 在C#平台的应用。 +  该项目基于OpenVINO™工具套件推出了 OpenVINO™ C# API,旨在推动 OpenVINO™在C#领域的应用。OpenVINO™ C# API 由于是基于 OpenVINO™ 开发,所支持的平台与OpenVINO™ 一致,具体信息可以参考 OpenVINO™。 + +## NuGet 包 -  OpenVINO™ C# API 由于是基于 OpenVINO™ C API 开发,所支持的平台与OpenVINO™ 一致,具体信息可以参考 OpenVINO™。 +### Managed libraries -## NuGet Package +| Package | Description | Link | +| --------------------------- | ------------------------------ | ------------------------------------------------------------ | +| **OpenVINO.CSharp.API** | OpenVINO C# API core libraries | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.API.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.API/) | +| **OpenVINO.CSharp.Windows** | All-in-one package for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.Windows.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.Windows/) | + +### Native bindings + +| Package | Description | Link | +| ------------------------ | --------------------------- | ------------------------------------------------------------ | +| **OpenVINO.runtime.win** | Native bindings for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.win.svg)](https://www.nuget.org/packages/OpenVINO.runtime.win/) | +| | | | -C# 支持 NuGet Package 方式安装程序包,在Linux、Window 等平台支持一站式安装使用,因此为了方便更多用户使用,目前发行了 Window 平台下使用的 NuGet Package ,方便大家使用。 -| Package | Description | Link | -| ----------------------- | ------------------------------------------------------------ | ------------------------------------------------------------ | -| **OpenVINO.CSharp.win** | OpenVINO™ C# API core libraries,附带完整的OpenVINO 2023.1依赖库 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.win.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.win/) | ## ⚙ 如何安装 @@ -37,37 +54,74 @@ C# 支持 NuGet Package 方式安装程序包,在Linux、Window 等平台支 - **快速体验** - [使用OpenVINO™ C# API部署Yolov8全系列模型](demos/yolov8/README_cn.md) + [使用OpenVINO™ C# API部署Yolov8全系列模型](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/demos/yolov8/README_cn.md) - **使用方法** 如果你不知道如何使用,通过下面代码简单了解使用方法。 ```c# +using OpenVinoSharp; // 引用命名空间 namespace test { internal class Program { static void Main(string[] args) { - Core core = new Core(); // 初始化 Core 核心 - Model model = core.read_model("./model.xml"); // 读取模型文件 - CompiledModel compiled_model = core.compiled_model(model, "AUTO"); // 将模型加载到设备 - InferRequest infer_request = compiled_model.create_infer_request(); // 创建推理通道 - Tensor input_tensor = infer_request.get_tensor("images"); // 获取输入节点Tensor + using Core core = new Core(); // 初始化 Core 核心 + using Model model = core.read_model("./model.xml"); // 读取模型文件 + using CompiledModel compiled_model = core.compiled_model(model, "AUTO"); // 将模型加载到设备 + using InferRequest infer_request = compiled_model.create_infer_request(); // 创建推理通道 + using Tensor input_tensor = infer_request.get_tensor("images"); // 获取输入节点Tensor infer_request.infer(); // 模型推理 - Tensor output_tensor = infer_request.get_tensor("output0"); // 获取输出节点Tensor - core.free(); // 清理 Core 非托管内存 + using Tensor output_tensor = infer_request.get_tensor("output0"); // 获取输出节点Tensor } } } ``` -项目中所封装的类、对象例如Core、Model、Tensor等,通过调用 C api 接口实现,具有非托管资源,需要调用**dispose()**方法处理,否则就会出现内存泄漏。 +项目中所封装的类、对象例如Core、Model、Tensor等,通过调用 C api 接口实现,具有非托管资源,需要调用**Dispose()**方法处理或者使用**using**,否则就会出现内存泄漏。 + +## 💻 应用案例 + +- [爱克斯开发板使用OpenVINO™ C# API部署Yolov8模型](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/tutorial_examples/AlxBoard_deploy_yolov8/README_cn.md) +- [行人摔倒检测 — 基于 OpenVINO C# API 部署PP-Human](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/tutorial_examples/PP-Human_Fall_Detection/README_cn.md) +- [基于 OpenVINO 部署 RT-DETR](https://github.com/guojin-yan/RT-DETR-OpenVINO) ## 🗂 API 文档 -如果想了解更多信息,可以参阅:[OpenVINO™ C# API API Documented](https://guojin-yan.github.io/OpenVINO-CSharp-API.docs/) +如果想了解更多信息,可以参阅:[OpenVINO™ C# API API Documented](https://guojin-yan.github.io/OpenVINO-CSharp-API.docs/index.html) + +## 🔃 更新日志 + +#### 🔥 **2023.10.22 :更新OpenVINO™ C# API ** + +- 🗳 **OpenVINO™ C# API 库:** + - 修改OpenVINO™ C# API 中的错误,并对代码板块进行整合,添加异常处理机制。 +- 🛹**应用案例:** + - 行人摔倒检测 — 基于 OpenVINO C# API 部署PP-Human + - 基于 OpenVINO 部署 RT-DETR +- 🔮 **NuGet包:** + - 废除之前发布的NuGet包,发布更新新的安装包,发布三类NuGet包,包括**OpenVINO.CSharp.API**:核心代码包,**OpenVINO.CSharp.Windows**:Windows平台整合包、**OpenVINO.runtime.win**:Windows平台运行库包。 + +#### **2023.6.19 :发布 OpenVINO™ C# API 3.0** + +- 🗳 **OpenVINO™ C# API 库:** + - 升级OpenVINO™ C# API 2.0 到 OpenVINO™ C# API 3.0 版本,由原来的重构 C++ API 改为直接读取 OpenVINO™ 官方 C API,使得应用更加灵活,所支持的功能更加丰富。 +- 🛹**应用案例:** + - OpenVINO™ C# API部署Yolov8模型实例。 +- 🔮 **NuGet包:** + - 制作并发布NuGet包,发布**OpenVINO™ C# API.win 3.0.120** ,包含OpenVINO 2023.0 依赖项。 + + + +## 🎖 贡献 + +  如果您对OpenVINO™ 在C#使用感兴趣,有兴趣对开源社区做出自己的贡献,欢迎加入我们,一起开发OpenVINO™ C# API。 + +  如果你对该项目有一些想法或改进思路,欢迎联系我们,指导下我们的工作。 +## 许可证书 +本项目的发布受[Apache 2.0 license](LICENSE)许可认证。 From f1cea9f179c019a94adaaf9f78d3d4b617501178 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Tue, 24 Oct 2023 11:25:15 +0800 Subject: [PATCH 09/40] Add the IDisposable interface to the Input class. --- modules/csharp_api/csharp/core/node_input.cs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/csharp_api/csharp/core/node_input.cs b/modules/csharp_api/csharp/core/node_input.cs index 73beb7c3e..aaf21b447 100644 --- a/modules/csharp_api/csharp/core/node_input.cs +++ b/modules/csharp_api/csharp/core/node_input.cs @@ -9,7 +9,7 @@ namespace OpenVinoSharp ///

/// A handle for one of a node's inputs. /// - public class Input + public class Input : IDisposable { private Node m_node; private ulong m_index = 0; @@ -27,12 +27,12 @@ public Input(Node node, ulong index) /// Default deconstruction. ///
~Input() { - dispose(); + Dispose(); } /// /// Release unmanaged resources. /// - public void dispose() { + public void Dispose() { m_node.Dispose(); } /// From 2b1c7e2747a81080bad516247a970189404fdc3b Mon Sep 17 00:00:00 2001 From: yanguojin Date: Tue, 24 Oct 2023 14:18:46 +0800 Subject: [PATCH 10/40] Clean up references to OpenCvSharp in the code and reduce its reliance on the three method libraries. --- modules/csharp_api/csharp/model/Yolov8.cs | 665 ------------------ modules/csharp_api/csharp/preprocess/OvMat.cs | 63 -- 2 files changed, 728 deletions(-) delete mode 100644 modules/csharp_api/csharp/model/Yolov8.cs delete mode 100644 modules/csharp_api/csharp/preprocess/OvMat.cs diff --git a/modules/csharp_api/csharp/model/Yolov8.cs b/modules/csharp_api/csharp/model/Yolov8.cs deleted file mode 100644 index 9cefd895b..000000000 --- a/modules/csharp_api/csharp/model/Yolov8.cs +++ /dev/null @@ -1,665 +0,0 @@ -using OpenCvSharp; -using OpenCvSharp.Dnn; -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text; -using System.Threading.Tasks; - - -namespace OpenVinoSharp.model.Yolov8 -{ - /// - /// Key point data - /// - public class Result - { - /// - /// Get Result Length - /// - public int length - { - get - { - return scores.Count; - } - } - - /// - /// Identification result class - /// - public List classes = new List(); - /// - /// Confidence value - /// - public List scores = new List(); - /// - /// Prediction box - /// - public List rects = new List(); - /// - /// Split Region - /// - public List masks = new List(); - /// - /// Key points of the human body - /// - public List poses = new List(); - - /// - /// object detection - /// - /// Predictiveness scores - /// Identification box - /// Identification class - public void add(float score, Rect rect, int cla) - { - scores.Add(score); - rects.Add(rect); - classes.Add(cla); - } - /// - /// 物体分割 - /// - /// Predictiveness scores - /// Identification box - /// Identification class - /// Semantic segmentation results - public void add(float score, Rect rect, int cla, Mat mask) - { - scores.Add(score); - rects.Add(rect); - classes.Add(cla); - masks.Add(mask); - } - /// - /// Key point prediction - /// - /// Predictiveness scores - /// Identification box - /// Key point data - public void add(float score, Rect rect, PoseData pose) - { - scores.Add(score); - rects.Add(rect); - poses.Add(pose); - } - } - /// - /// Human Key Point Data - /// - public struct PoseData - { - /// - /// Key point prediction score - /// - public float[] score; - /// - /// Key point prediction results. - /// - public List point; - /// - /// Default Constructor - /// - /// Key point prediction results. - /// Image scaling ratio. - public PoseData(float[] data, float[] scales) - { - score = new float[data.Length]; - point = new List(); - for (int i = 0; i < 17; i++) - { - Point p = new Point((int)(data[3 * i] * scales[0]), (int)(data[3 * i + 1] * scales[1])); - this.point.Add(p); - this.score[i] = data[3 * i + 2]; - } - } - /// - /// Convert PoseData to string. - /// - /// PoseData string. - public string to_string() - { - string[] point_str = new string[] { "Nose", "Left Eye", "Right Eye", "Left Ear", "Right Ear", - "Left Shoulder", "Right Shoulder", "Left Elbow", "Right Elbow", "Left Wrist", "Right Wrist", - "Left Hip", "Right Hip", "Left Knee", "Right Knee", "Left Ankle", "Right Ankle" }; - string ss = ""; - for (int i = 0; i < point.Count; i++) - { - ss += point_str[i] + ": (" + point[i].X.ToString("0") + " ," + point[i].Y.ToString("0") + " ," + score[i].ToString("0.00") + ") "; - } - return ss; - } - } - - /// - /// Yolov8 model inference result processing method. - /// - public class ResultProcess - { - /// - /// Identify Result Types - /// - public string[] class_names; - /// - /// Image information scaling ratio h, scaling ratio h, height, width - /// - public float[] scales; - /// - /// Confidence threshold - /// - public float score_threshold; - /// - /// Non maximum suppression threshold - /// - public float nms_threshold; - /// - /// Number of categories - /// - public int categ_nums = 0; - - - /// - /// SegmentationResult processing class construction - /// - /// scaling ratio h, scaling ratio h, height, width - /// score threshold - /// nms threshold - public ResultProcess(float[] scales, int categ_nums, float score_threshold = 0.3f, float nms_threshold = 0.5f) - { - this.scales = scales; - this.score_threshold = score_threshold; - this.nms_threshold = nms_threshold; - this.categ_nums = categ_nums; - } - - /// - /// Read local recognition result type file to memory - /// - /// file path - /// - /// Only the. txt file format is supported, and the content format for this category is as follows: - /// sea lion - /// Scottish deerhound - /// tiger cat - /// ··· - /// - public void read_class_names(string path) - { - - List str = new List(); - StreamReader sr = new StreamReader(path); - string line; - while ((line = sr.ReadLine()) != null) - { - str.Add(line); - } - - class_names = str.ToArray(); - } - - /// - /// Result process - /// - /// Model prediction output - /// Model recognition results - public KeyValuePair[] process_cls_result(float[] result) - { - List new_list = new List { }; - for (int i = 0; i < result.Length; i++) - { - new_list.Add(new float[] { (float)result[i], i }); - } - new_list.Sort((a, b) => b[0].CompareTo(a[0])); - - KeyValuePair[] cls = new KeyValuePair[10]; - for (int i = 0; i < 10; ++i) - { - cls[i] = new KeyValuePair((int)new_list[i][1], new_list[i][0]); - } - return cls; - } - - /// - /// Result drawing - /// - /// recognition result - /// source image - /// result image - public Mat draw_cls_result(KeyValuePair result, Mat image) - { - Cv2.PutText(image, class_names[result.Key] + ": " + result.Value.ToString("0.00"), - new Point(25, 30), HersheyFonts.HersheySimplex, 1, new Scalar(0, 0, 255), 2); - return image; - } - - - - /// - /// Result process - /// - /// Model prediction output - /// Model recognition results - public Result process_det_result(float[] result) - { - Mat result_data = new Mat(4 + categ_nums, 8400, MatType.CV_32F, result); - result_data = result_data.T(); - - // Storage results list - List position_boxes = new List(); - List class_ids = new List(); - List confidences = new List(); - // Preprocessing output results - for (int i = 0; i < result_data.Rows; i++) - { - Mat classes_scores = result_data.Row(i).ColRange(4, 4 + categ_nums);//GetArray(i, 5, classes_scores); - Point max_classId_point, min_classId_point; - double max_score, min_score; - // Obtain the maximum value and its position in a set of data - Cv2.MinMaxLoc(classes_scores, out min_score, out max_score, - out min_classId_point, out max_classId_point); - // Confidence level between 0 ~ 1 - // Obtain identification box information - if (max_score > 0.25) - { - float cx = result_data.At(i, 0); - float cy = result_data.At(i, 1); - float ow = result_data.At(i, 2); - float oh = result_data.At(i, 3); - int x = (int)((cx - 0.5 * ow) * this.scales[0]); - int y = (int)((cy - 0.5 * oh) * this.scales[1]); - int width = (int)(ow * this.scales[0]); - int height = (int)(oh * this.scales[1]); - Rect box = new Rect(); - box.X = x; - box.Y = y; - box.Width = width; - box.Height = height; - - position_boxes.Add(box); - class_ids.Add(max_classId_point.X); - confidences.Add((float)max_score); - } - } - - // NMS non maximum suppression - int[] indexes = new int[position_boxes.Count]; - CvDnn.NMSBoxes(position_boxes, confidences, this.score_threshold, this.nms_threshold, out indexes); - - Result re_result = new Result(); - // - for (int i = 0; i < indexes.Length; i++) - { - int index = indexes[i]; - re_result.add(confidences[index], position_boxes[index], class_ids[index]); - } - return re_result; - } - - /// - /// Result drawing - /// - /// recognition result - /// image - /// - public Mat draw_det_result(Result result, Mat image) - { - - // Draw recognition results on the image - for (int i = 0; i < result.length; i++) - { - //Console.WriteLine(result.rects[i]); - Cv2.Rectangle(image, result.rects[i], new Scalar(0, 0, 255), 2, LineTypes.Link8); - Cv2.Rectangle(image, new Point(result.rects[i].TopLeft.X, result.rects[i].TopLeft.Y + 30), - new Point(result.rects[i].BottomRight.X, result.rects[i].TopLeft.Y), new Scalar(0, 255, 255), -1); - Cv2.PutText(image, class_names[ result.classes[i]] + "-" + result.scores[i].ToString("0.00"), - new Point(result.rects[i].X, result.rects[i].Y + 25), - HersheyFonts.HersheySimplex, 0.8, new Scalar(0, 0, 0), 2); - } - return image; - } - - /// - /// sigmoid - /// - /// - /// - private float sigmoid(float a) - { - float b = 1.0f / (1.0f + (float)Math.Exp(-a)); - return b; - } - - /// - /// Result process - /// - /// detection output - /// segmentation output - /// - public Result process_seg_result(float[] detect, float[] proto) - { - Mat detect_data = new Mat(36 + categ_nums, 8400, MatType.CV_32F, detect); - Mat proto_data = new Mat(32, 25600, MatType.CV_32F, proto); - detect_data = detect_data.T(); - List position_boxes = new List(); - List class_ids = new List(); - List confidences = new List(); - List masks = new List(); - for (int i = 0; i < detect_data.Rows; i++) - { - - Mat classes_scores = detect_data.Row(i).ColRange(4, 4 + categ_nums);//GetArray(i, 5, classes_scores); - Point max_classId_point, min_classId_point; - double max_score, min_score; - Cv2.MinMaxLoc(classes_scores, out min_score, out max_score, - out min_classId_point, out max_classId_point); - - if (max_score > 0.25) - { - //Console.WriteLine(max_score); - - Mat mask = detect_data.Row(i).ColRange(4 + categ_nums, categ_nums + 36); - - float cx = detect_data.At(i, 0); - float cy = detect_data.At(i, 1); - float ow = detect_data.At(i, 2); - float oh = detect_data.At(i, 3); - int x = (int)((cx - 0.5 * ow) * this.scales[0]); - int y = (int)((cy - 0.5 * oh) * this.scales[1]); - int width = (int)(ow * this.scales[0]); - int height = (int)(oh * this.scales[1]); - Rect box = new Rect(); - box.X = x; - box.Y = y; - box.Width = width; - box.Height = height; - - position_boxes.Add(box); - class_ids.Add(max_classId_point.X); - confidences.Add((float)max_score); - masks.Add(mask); - } - } - - - int[] indexes = new int[position_boxes.Count]; - CvDnn.NMSBoxes(position_boxes, confidences, this.score_threshold, this.nms_threshold, out indexes); - - Result re_result = new Result(); // Output Result Class - // RGB images with colors - Mat rgb_mask = Mat.Zeros(new Size((int)scales[3], (int)scales[2]), MatType.CV_8UC3); - Random rd = new Random(); // Generate Random Numbers - for (int i = 0; i < indexes.Length; i++) - { - int index = indexes[i]; - // Division scope - Rect box = position_boxes[index]; - int box_x1 = Math.Max(0, box.X); - int box_y1 = Math.Max(0, box.Y); - int box_x2 = Math.Max(0, box.BottomRight.X); - int box_y2 = Math.Max(0, box.BottomRight.Y); - - // Segmentation results - Mat original_mask = masks[index] * proto_data; - for (int col = 0; col < original_mask.Cols; col++) - { - original_mask.At(0, col) = sigmoid(original_mask.At(0, col)); - } - // 1x25600 -> 160x160 Convert to original size - Mat reshape_mask = original_mask.Reshape(1, 160); - - //Console.WriteLine("m1.size = {0}", m1.Size()); - - // Split size after scaling - int mx1 = Math.Max(0, (int)((box_x1 / scales[0]) * 0.25)); - int mx2 = Math.Max(0, (int)((box_x2 / scales[0]) * 0.25)); - int my1 = Math.Max(0, (int)((box_y1 / scales[1]) * 0.25)); - int my2 = Math.Max(0, (int)((box_y2 / scales[1]) * 0.25)); - // Crop Split Region - Mat mask_roi = new Mat(reshape_mask, new OpenCvSharp.Range(my1, my2), new OpenCvSharp.Range(mx1, mx2)); - // Convert the segmented area to the actual size of the image - Mat actual_maskm = new Mat(); - Cv2.Resize(mask_roi, actual_maskm, new Size(box_x2 - box_x1, box_y2 - box_y1)); - // Binary segmentation region - for (int r = 0; r < actual_maskm.Rows; r++) - { - for (int c = 0; c < actual_maskm.Cols; c++) - { - float pv = actual_maskm.At(r, c); - if (pv > 0.5) - { - actual_maskm.At(r, c) = 1.0f; - } - else - { - actual_maskm.At(r, c) = 0.0f; - } - } - } - - // 预测 - Mat bin_mask = new Mat(); - actual_maskm = actual_maskm * 200; - actual_maskm.ConvertTo(bin_mask, MatType.CV_8UC1); - if ((box_y1 + bin_mask.Rows) >= scales[2]) - { - box_y2 = (int)scales[2] - 1; - } - if ((box_x1 + bin_mask.Cols) >= scales[3]) - { - box_x2 = (int)scales[3] - 1; - } - // Obtain segmentation area - Mat mask = Mat.Zeros(new Size((int)scales[3], (int)scales[2]), MatType.CV_8UC1); - bin_mask = new Mat(bin_mask, new OpenCvSharp.Range(0, box_y2 - box_y1), new OpenCvSharp.Range(0, box_x2 - box_x1)); - Rect roi = new Rect(box_x1, box_y1, box_x2 - box_x1, box_y2 - box_y1); - bin_mask.CopyTo(new Mat(mask, roi)); - // Color segmentation area - Cv2.Add(rgb_mask, new Scalar(rd.Next(0, 255), rd.Next(0, 255), rd.Next(0, 255)), rgb_mask, mask); - - re_result.add(confidences[index], position_boxes[index], class_ids[index], rgb_mask.Clone()); - - } - - return re_result; - } - - /// - /// Result drawing - /// - /// recognition result - /// image - /// - public Mat draw_seg_result(Result result, Mat image) - { - Mat masked_img = new Mat(); - // Draw recognition results on the image - for (int i = 0; i < result.length; i++) - { - Cv2.Rectangle(image, result.rects[i], new Scalar(0, 0, 255), 2, LineTypes.Link8); - Cv2.Rectangle(image, new Point(result.rects[i].TopLeft.X, result.rects[i].TopLeft.Y + 30), - new Point(result.rects[i].BottomRight.X, result.rects[i].TopLeft.Y), new Scalar(0, 255, 255), -1); - Cv2.PutText(image, class_names[result.classes[i]] + "-" + result.scores[i].ToString("0.00"), - new Point(result.rects[i].X, result.rects[i].Y + 25), - HersheyFonts.HersheySimplex, 0.8, new Scalar(0, 0, 0), 2); - Cv2.AddWeighted(image, 0.5, result.masks[i], 0.5, 0, masked_img); - } - return masked_img; - } - - /// - /// Result process - /// - /// Model prediction output - /// Model recognition results - public Result process_pose_result(float[] result) - { - Mat result_data = new Mat(56, 8400, MatType.CV_32F, result); - result_data = result_data.T(); - List position_boxes = new List(); - List confidences = new List(); - List pose_datas = new List(); - for (int i = 0; i < result_data.Rows; i++) - { - if (result_data.At(i, 4) > 0.25) - { - //Console.WriteLine(max_score); - float cx = result_data.At(i, 0); - float cy = result_data.At(i, 1); - float ow = result_data.At(i, 2); - float oh = result_data.At(i, 3); - int x = (int)((cx - 0.5 * ow) * this.scales[0]); - int y = (int)((cy - 0.5 * oh) * this.scales[1]); - int width = (int)(ow * this.scales[0]); - int height = (int)(oh * this.scales[1]); - Rect box = new Rect(); - box.X = x; - box.Y = y; - box.Width = width; - box.Height = height; - Mat pose_mat = result_data.Row(i).ColRange(5, 56); - float[] pose_data = new float[51]; - pose_mat.GetArray(out pose_data); - PoseData pose = new PoseData(pose_data, this.scales); - - position_boxes.Add(box); - - confidences.Add((float)result_data.At(i, 4)); - pose_datas.Add(pose); - } - } - - int[] indexes = new int[position_boxes.Count]; - CvDnn.NMSBoxes(position_boxes, confidences, this.score_threshold, this.nms_threshold, out indexes); - - Result re_result = new Result(); - for (int i = 0; i < indexes.Length; i++) - { - int index = indexes[i]; - re_result.add(confidences[index], position_boxes[index], pose_datas[index]); - //Console.WriteLine("rect: {0}, score: {1}", position_boxes[index], confidences[index]); - } - return re_result; - - } - /// - /// Result drawing - /// - /// recognition result - /// image - /// - public Mat draw_pose_result(Result result, Mat image, double visual_thresh) - { - - // 将识别结果绘制到图片上 - for (int i = 0; i < result.length; i++) - { - Cv2.Rectangle(image, result.rects[i], new Scalar(0, 0, 255), 2, LineTypes.Link8); - - draw_poses(result.poses[i], ref image, visual_thresh); - } - return image; - } - /// - /// Key point result drawing - /// - /// Key point data - /// image - public void draw_poses(PoseData pose, ref Mat image, double visual_thresh) - { - // Connection point relationship - int[,] edgs = new int[17, 2] { { 0, 1 }, { 0, 2}, {1, 3}, {2, 4}, {3, 5}, {4, 6}, {5, 7}, {6, 8}, - {7, 9}, {8, 10}, {5, 11}, {6, 12}, {11, 13}, {12, 14},{13, 15 }, {14, 16 }, {11, 12 } }; - // Color Library - Scalar[] colors = new Scalar[18] { new Scalar(255, 0, 0), new Scalar(255, 85, 0), new Scalar(255, 170, 0), - new Scalar(255, 255, 0), new Scalar(170, 255, 0), new Scalar(85, 255, 0), new Scalar(0, 255, 0), - new Scalar(0, 255, 85), new Scalar(0, 255, 170), new Scalar(0, 255, 255), new Scalar(0, 170, 255), - new Scalar(0, 85, 255), new Scalar(0, 0, 255), new Scalar(85, 0, 255), new Scalar(170, 0, 255), - new Scalar(255, 0, 255), new Scalar(255, 0, 170), new Scalar(255, 0, 85) }; - // Draw Keys - for (int p = 0; p < 17; p++) - { - if (pose.score[p] < visual_thresh) - { - continue; - } - - Cv2.Circle(image, pose.point[p], 2, colors[p], -1); - //Console.WriteLine(pose.point[p]); - } - // draw - for (int p = 0; p < 17; p++) - { - if (pose.score[edgs[p, 0]] < visual_thresh || pose.score[edgs[p, 1]] < visual_thresh) - { - continue; - } - - float[] point_x = new float[] { pose.point[edgs[p, 0]].X, pose.point[edgs[p, 1]].X }; - float[] point_y = new float[] { pose.point[edgs[p, 0]].Y, pose.point[edgs[p, 1]].Y }; - - Point center_point = new Point((int)((point_x[0] + point_x[1]) / 2), (int)((point_y[0] + point_y[1]) / 2)); - double length = Math.Sqrt(Math.Pow((double)(point_x[0] - point_x[1]), 2.0) + Math.Pow((double)(point_y[0] - point_y[1]), 2.0)); - int stick_width = 2; - Size axis = new Size(length / 2, stick_width); - double angle = (Math.Atan2((double)(point_y[0] - point_y[1]), (double)(point_x[0] - point_x[1]))) * 180 / Math.PI; - Point[] polygon = Cv2.Ellipse2Poly(center_point, axis, (int)angle, 0, 360, 1); - Cv2.FillConvexPoly(image, polygon, colors[p]); - - } - } - /// - /// Print and output image classification results - /// - /// classification results - public void print_result(KeyValuePair[] result) - { - Console.WriteLine("\n Classification Top 10 result : \n"); - Console.WriteLine("classid probability"); - Console.WriteLine("------- -----------"); - for (int i = 0; i < 10; ++i) - { - Console.WriteLine("{0} {1}", result[i].Key.ToString("0"), result[i].Value.ToString("0.000000")); - } - } - /// - /// Print out image prediction results - /// - /// prediction results - public void print_result(Result result) - { - if (result.poses.Count != 0) - { - Console.WriteLine("\n Classification result : \n"); - for (int i = 0; i < result.length; ++i) - { - string ss = (i + 1).ToString() + ": 1 " + result.scores[i].ToString("0.00") + " " + result.rects[i].ToString() - +" " + result.poses[i].to_string(); - Console.WriteLine(ss); - } - return; - } - - if (result.masks.Count != 0) - { - Console.WriteLine("\n Segmentation result : \n"); - for (int i = 0; i < result.length; ++i) - { - string ss = (i + 1).ToString() + ": " + result.classes[i]+ "\t" + result.scores[i].ToString("0.00") + " " + result.rects[i].ToString(); - Console.WriteLine(ss); - } - return; - } - Console.WriteLine("\n Detection result : \n"); - for (int i = 0; i < result.length; ++i) - { - string ss = (i + 1).ToString() + ": " + result.classes[i] + "\t" + result.scores[i].ToString("0.00") + " " + result.rects[i].ToString(); - Console.WriteLine(ss); - } - - } - - }; - -} diff --git a/modules/csharp_api/csharp/preprocess/OvMat.cs b/modules/csharp_api/csharp/preprocess/OvMat.cs deleted file mode 100644 index fb63cfbaa..000000000 --- a/modules/csharp_api/csharp/preprocess/OvMat.cs +++ /dev/null @@ -1,63 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Drawing; -using System.Drawing.Imaging; -using System.IO; -using System.Linq; -using System.Runtime.InteropServices; -using System.Text; -using System.Threading.Tasks; - -namespace OpenVinoSharp.preprocess -{ - public class OvMat - { - public byte[] mat_data { get; set; } - public ulong mat_data_size { get; set; } - public int mat_width { get; set; } - public int mat_height { get; set; } - public int mat_channels { get; set; } - public ElementType mat_type { get; set; } = ElementType.U8; - - public OvMat() { } - public OvMat(byte[] mat_data, ulong mat_data_size, int mat_width, int mat_height, int mat_channels, ElementType mat_type) - { - this.mat_data = mat_data; - this.mat_data_size = mat_data_size; - this.mat_width = mat_width; - this.mat_height = mat_height; - this.mat_channels = mat_channels; - this.mat_type = mat_type; - } - public OvMat(string image_path) - { - Bitmap img = new Bitmap(image_path); - Rectangle rect = new Rectangle(0, 0, img.Width, img.Height); - BitmapData bit = img.LockBits(rect, ImageLockMode.ReadWrite, img.PixelFormat); - byte[] byte_data = new byte[bit.Width * bit.Height * 3]; - Marshal.Copy(bit.Scan0, byte_data, 0, byte_data.Length); - this.mat_data = byte_data; - this.mat_data_size = (ulong)(img.Height * img.Width * 3); - this.mat_width = img.Width; - this.mat_height = img.Height; - this.mat_channels = 3; - this.mat_type = ElementType.U8; - img.Dispose(); - - } - public static OvMat read(string image_path) - { - Bitmap img = new Bitmap(image_path); - Rectangle rect = new Rectangle(0, 0, img.Width, img.Height); - BitmapData bit = img.LockBits(rect, ImageLockMode.ReadWrite, PixelFormat.Format24bppRgb); - img.UnlockBits(bit); - byte[] byte_data = new byte[bit.Width * bit.Height * 3]; - Marshal.Copy(bit.Scan0, byte_data, 0, byte_data.Length); - OvMat mat = new OvMat(byte_data, (ulong)(img.Height * img.Width * 3), img.Width, img.Height, 3, ElementType.U8); - - img.Dispose(); - return mat; - //return new OvMat(image_path); - } - } -} From 479db9c31de4e03863eb49d2b3aedfd4bc30d663 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Tue, 24 Oct 2023 14:20:26 +0800 Subject: [PATCH 11/40] Modify the new NuGet Package generation settings. --- modules/csharp_api/csharp/CSharpAPI.csproj | 33 ++++--------------- .../csharp/build/OpenVINO.CSharp.win.targets | 10 ------ .../csharp/build/openvino2023.1/plugins.xml | 6 ---- 3 files changed, 6 insertions(+), 43 deletions(-) delete mode 100644 modules/csharp_api/csharp/build/OpenVINO.CSharp.win.targets delete mode 100644 modules/csharp_api/csharp/build/openvino2023.1/plugins.xml diff --git a/modules/csharp_api/csharp/CSharpAPI.csproj b/modules/csharp_api/csharp/CSharpAPI.csproj index 1ea258945..6817c433a 100644 --- a/modules/csharp_api/csharp/CSharpAPI.csproj +++ b/modules/csharp_api/csharp/CSharpAPI.csproj @@ -1,13 +1,13 @@  - net5.0;net6.0;net48 + net5.0;net6.0;net48;netcoreapp3.1 True True - OpenVINO.CSharp.win + OpenVINO.CSharp.API OpenVINO C# API - 3.1.1 + 2023.1.0.1 Guojin Yan Guojin Yan OpenVINO C# API @@ -18,19 +18,15 @@ git ../../nuget zh + + This version is a pre release version of OpenVINO™ C# API 3.0 and its features are not yet fully developed. If there are any issues during use, please feel free to contact me. OpenVinoSharp - OpenVINOCSharp + OpenVINO_CSharp_API - - - true - build\openvino2023.1\%(Filename)%(Extension) - - @@ -42,23 +38,6 @@ - - - - - - - - - true - build\ - - - - - - - $(DefineConstants);DOTNET_FRAMEWORK; diff --git a/modules/csharp_api/csharp/build/OpenVINO.CSharp.win.targets b/modules/csharp_api/csharp/build/OpenVINO.CSharp.win.targets deleted file mode 100644 index f8389ae7e..000000000 --- a/modules/csharp_api/csharp/build/OpenVINO.CSharp.win.targets +++ /dev/null @@ -1,10 +0,0 @@ - - - - - %(RecursiverDir)openvino2023.1/%(Filename)%(Extension) - PreserveNewest - - - - \ No newline at end of file diff --git a/modules/csharp_api/csharp/build/openvino2023.1/plugins.xml b/modules/csharp_api/csharp/build/openvino2023.1/plugins.xml deleted file mode 100644 index 1c833ca03..000000000 --- a/modules/csharp_api/csharp/build/openvino2023.1/plugins.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file From 4ffc6956c14ea21a08ca8eb565700257ebb31dff Mon Sep 17 00:00:00 2001 From: yanguojin Date: Tue, 24 Oct 2023 14:21:37 +0800 Subject: [PATCH 12/40] Update README.md. --- modules/csharp_api/README.md | 12 ++++++------ modules/csharp_api/README_cn.md | 10 +++++----- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/modules/csharp_api/README.md b/modules/csharp_api/README.md index 270ea2e4b..9d42a0fb0 100644 --- a/modules/csharp_api/README.md +++ b/modules/csharp_api/README.md @@ -52,20 +52,20 @@ The following article provides installation methods for OpenVINO™ C# API on di ## 🏷How to use OpenVINO™ C# API? - **Quick start** - - [Deploying the Yolov8 full series model using OpenVINO™ C# API](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/demos/yolov8/README.md) + - [Deploying the Yolov8 full series model using OpenVINO™ C# API](demos/yolov8/README.md) - **Simple usage** If you don't know how to use it, simply understand the usage method through the following code. ```c# -using OpenVINO™ C# API; -namespace test +using OpenVinoSharp; +namespace test { internal class Program { static void Main(string[] args) { - using Core core = new Core(); + using Core core = new Core(); using Model model = core.read_model("./model.xml"); using CompiledModel compiled_model = core.compiled_model(model, "AUTO"); using InferRequest infer_request = compiled_model.create_infer_request(); @@ -81,8 +81,8 @@ The classes and objects encapsulated in the project, such as Core, Model, Tensor ## 💻 Tutorial Examples -- [Using OpenVINO™ C# API to Deploy the Yolov8 Model on the AIxBoard](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/tutorial_examples/AlxBoard_deploy_yolov8/README.md) -- [Pedestrian fall detection - Deploying PP-Human based on OpenVINO C # API](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/tutorial_examples/PP-Human_Fall_Detection/README.md) +- [Using OpenVINO™ C# API to Deploy the Yolov8 Model on the AIxBoard](tutorial_examples/AlxBoard_deploy_yolov8/README.md) +- [Pedestrian fall detection - Deploying PP-Human based on OpenVINO C # API](tutorial_examples\PP-Human_Fall_Detection\README.md) - [Deploying RT-DETR based on OpenVINO](https://github.com/guojin-yan/RT-DETR-OpenVINO) ## 🗂 API Reference diff --git a/modules/csharp_api/README_cn.md b/modules/csharp_api/README_cn.md index 4c2297238..568d27cc8 100644 --- a/modules/csharp_api/README_cn.md +++ b/modules/csharp_api/README_cn.md @@ -54,7 +54,7 @@ - **快速体验** - [使用OpenVINO™ C# API部署Yolov8全系列模型](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/demos/yolov8/README_cn.md) + [使用OpenVINO™ C# API部署Yolov8全系列模型](demos/yolov8/README_cn.md) - **使用方法** @@ -62,13 +62,13 @@ ```c# using OpenVinoSharp; // 引用命名空间 -namespace test +namespace test { internal class Program { static void Main(string[] args) { - using Core core = new Core(); // 初始化 Core 核心 + using Core core = new Core(); // 初始化 Core 核心 using Model model = core.read_model("./model.xml"); // 读取模型文件 using CompiledModel compiled_model = core.compiled_model(model, "AUTO"); // 将模型加载到设备 using InferRequest infer_request = compiled_model.create_infer_request(); // 创建推理通道 @@ -84,8 +84,8 @@ namespace test ## 💻 应用案例 -- [爱克斯开发板使用OpenVINO™ C# API部署Yolov8模型](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/tutorial_examples/AlxBoard_deploy_yolov8/README_cn.md) -- [行人摔倒检测 — 基于 OpenVINO C# API 部署PP-Human](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/tutorial_examples/PP-Human_Fall_Detection/README_cn.md) +- [爱克斯开发板使用OpenVINO™ C# API部署Yolov8模型](tutorial_examples/AlxBoard_deploy_yolov8/README_cn.md) +- [行人摔倒检测 — 基于 OpenVINO C# API 部署PP-Human](tutorial_examples\PP-Human_Fall_Detection\README_cn.md) - [基于 OpenVINO 部署 RT-DETR](https://github.com/guojin-yan/RT-DETR-OpenVINO) ## 🗂 API 文档 From 9cae894ebd71fe886570afd0828e900fb033aa96 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Tue, 24 Oct 2023 14:23:17 +0800 Subject: [PATCH 13/40] Modify the details in the code. --- modules/csharp_api/csharp/common/version.cs | 5 ++++- modules/csharp_api/csharp/core/compiled_model.cs | 6 +++++- modules/csharp_api/csharp/core/core.cs | 9 +++++---- modules/csharp_api/csharp/core/infer_request.cs | 4 +--- modules/csharp_api/csharp/core/layout.cs | 6 +++++- modules/csharp_api/csharp/core/model.cs | 9 +++++---- modules/csharp_api/csharp/core/node_input.cs | 9 ++++++++- modules/csharp_api/csharp/core/node_output.cs | 7 ++++++- 8 files changed, 39 insertions(+), 16 deletions(-) diff --git a/modules/csharp_api/csharp/common/version.cs b/modules/csharp_api/csharp/common/version.cs index 056c6a95a..9414fee71 100644 --- a/modules/csharp_api/csharp/common/version.cs +++ b/modules/csharp_api/csharp/common/version.cs @@ -8,7 +8,7 @@ namespace OpenVinoSharp { /// - /// [struct] Represents version information that describes plugins and the OpemVINO library + /// [struct] Represents version information that describes plugins and the OpenVINO library /// /// ov_runtime_c#_api public struct Version @@ -55,6 +55,9 @@ public struct CoreVersion /// A device name /// public string device_name; + /// + /// The OpenVINO version. + /// public Version version; } /// diff --git a/modules/csharp_api/csharp/core/compiled_model.cs b/modules/csharp_api/csharp/core/compiled_model.cs index da28319da..43bf5bc81 100644 --- a/modules/csharp_api/csharp/core/compiled_model.cs +++ b/modules/csharp_api/csharp/core/compiled_model.cs @@ -24,7 +24,11 @@ public class CompiledModel : IDisposable /// /// [private]CompiledModel class pointer. /// - public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + public IntPtr Ptr + { + get { return m_ptr; } + set { m_ptr = value; } + } /// /// Constructs CompiledModel from the initialized ptr. diff --git a/modules/csharp_api/csharp/core/core.cs b/modules/csharp_api/csharp/core/core.cs index 0c2995217..5ee01ba65 100644 --- a/modules/csharp_api/csharp/core/core.cs +++ b/modules/csharp_api/csharp/core/core.cs @@ -2,10 +2,8 @@ using System.Collections.Generic; using System.Runtime.InteropServices; - namespace OpenVinoSharp { - /// /// This class represents an OpenVINO runtime Core entity. /// ov_runtime_c#_api @@ -23,7 +21,11 @@ public class Core : IDisposable /// /// [public]Core class pointer. /// - public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + public IntPtr Ptr + { + get { return m_ptr; } + set { m_ptr = value; } + } /// /// Represent all available devices. @@ -316,7 +318,6 @@ public List get_available_devices() NativeMethods.ov_available_devices_free(devices_ptr); return devices; } - } } diff --git a/modules/csharp_api/csharp/core/infer_request.cs b/modules/csharp_api/csharp/core/infer_request.cs index 31ad5d05d..9ad8849b2 100644 --- a/modules/csharp_api/csharp/core/infer_request.cs +++ b/modules/csharp_api/csharp/core/infer_request.cs @@ -51,7 +51,6 @@ public void Dispose() return; } NativeMethods.ov_infer_request_free(m_ptr); - m_ptr = IntPtr.Zero; } /// @@ -298,8 +297,7 @@ public Tensor get_output_tensor() IntPtr tensor_ptr = IntPtr.Zero; HandleException.handler( - NativeMethods.ov_infer_request_get_output_tensor( - m_ptr, ref tensor_ptr)); + NativeMethods.ov_infer_request_get_output_tensor(m_ptr, ref tensor_ptr)); return new Tensor(tensor_ptr); } /// diff --git a/modules/csharp_api/csharp/core/layout.cs b/modules/csharp_api/csharp/core/layout.cs index 02b305065..f05a28fb9 100644 --- a/modules/csharp_api/csharp/core/layout.cs +++ b/modules/csharp_api/csharp/core/layout.cs @@ -34,7 +34,11 @@ public class Layout : IDisposable /// /// [public]Layout class pointer. /// - public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + public IntPtr Ptr + { + get { return m_ptr; } + set { m_ptr = value; } + } /// /// Constructs a Layout with static or dynamic layout information based on string representation. diff --git a/modules/csharp_api/csharp/core/model.cs b/modules/csharp_api/csharp/core/model.cs index f9266baf0..46a548800 100644 --- a/modules/csharp_api/csharp/core/model.cs +++ b/modules/csharp_api/csharp/core/model.cs @@ -21,7 +21,11 @@ public class Model : IDisposable /// /// [public]Model class pointer. /// - public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + public IntPtr Ptr + { + get { return m_ptr; } + set { m_ptr = value; } + } /// /// Default Constructor /// @@ -52,7 +56,6 @@ public void Dispose() return; } NativeMethods.ov_core_free(m_ptr); - m_ptr = IntPtr.Zero; } @@ -67,7 +70,6 @@ public string get_friendly_name() HandleException.handler( NativeMethods.ov_model_get_friendly_name(m_ptr, ref s_ptr)); string ss = Marshal.PtrToStringAnsi(s_ptr); - return ss; } /// @@ -143,7 +145,6 @@ public Node get_output(ulong index) IntPtr port_ptr = IntPtr.Zero; HandleException.handler( NativeMethods.ov_model_output_by_index(m_ptr, index, ref port_ptr)); - return new Node(port_ptr, Node.NodeType.e_nomal); } /// diff --git a/modules/csharp_api/csharp/core/node_input.cs b/modules/csharp_api/csharp/core/node_input.cs index aaf21b447..1700ea470 100644 --- a/modules/csharp_api/csharp/core/node_input.cs +++ b/modules/csharp_api/csharp/core/node_input.cs @@ -11,7 +11,13 @@ namespace OpenVinoSharp /// public class Input : IDisposable { + /// + /// The input node. + /// private Node m_node; + /// + /// The input node port index. + /// private ulong m_index = 0; /// /// Constructs a Output. @@ -32,7 +38,8 @@ public Input(Node node, ulong index) /// /// Release unmanaged resources. /// - public void Dispose() { + public void Dispose() + { m_node.Dispose(); } /// diff --git a/modules/csharp_api/csharp/core/node_output.cs b/modules/csharp_api/csharp/core/node_output.cs index 749814fca..b19251174 100644 --- a/modules/csharp_api/csharp/core/node_output.cs +++ b/modules/csharp_api/csharp/core/node_output.cs @@ -9,10 +9,15 @@ namespace OpenVinoSharp /// /// A handle for one of a node's outputs. /// - /// public class Output : IDisposable { + /// + /// The output node. + /// private Node m_node; + /// + /// The output node port index. + /// private ulong m_index = 0; /// /// Constructs a Output. From a5f7a52866585969779cc6b5f1240ce0b2d605cc Mon Sep 17 00:00:00 2001 From: yanguojin Date: Tue, 24 Oct 2023 14:31:26 +0800 Subject: [PATCH 14/40] Update README.md. --- modules/csharp_api/README.md | 3 +-- modules/csharp_api/README_cn.md | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/modules/csharp_api/README.md b/modules/csharp_api/README.md index 9d42a0fb0..f205e544f 100644 --- a/modules/csharp_api/README.md +++ b/modules/csharp_api/README.md @@ -37,7 +37,6 @@ | Package | Description | Link | | ------------------------ | --------------------------- | ------------------------------------------------------------ | | **OpenVINO.runtime.win** | Native bindings for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.win.svg)](https://www.nuget.org/packages/OpenVINO.runtime.win/) | -| | | | @@ -117,4 +116,4 @@ If you want to learn more information, you can refer to: [OpenVINO™ C# API API ## License -The release of this project is certified under the [Apache 2.0 license](https://github.com/guojin-yan/OpenVINO™ C# API/blob/OpenVINO™ C# API3.0/LICENSE) . +The release of this project is certified under the [Apache 2.0 license](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/LICENSE.txt) . diff --git a/modules/csharp_api/README_cn.md b/modules/csharp_api/README_cn.md index 568d27cc8..4d3150714 100644 --- a/modules/csharp_api/README_cn.md +++ b/modules/csharp_api/README_cn.md @@ -38,7 +38,6 @@ | Package | Description | Link | | ------------------------ | --------------------------- | ------------------------------------------------------------ | | **OpenVINO.runtime.win** | Native bindings for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.win.svg)](https://www.nuget.org/packages/OpenVINO.runtime.win/) | -| | | | @@ -123,5 +122,5 @@ namespace test ## 许可证书 -本项目的发布受[Apache 2.0 license](LICENSE)许可认证。 +本项目的发布受[Apache 2.0 license](https://github.com/guojin-yan/OpenVINO-CSharp-API/blob/csharp3.0/LICENSE.txt)许可认证。 From c0e18786a184cfa0fe14d42f25bf6bdcf7e6fabe Mon Sep 17 00:00:00 2001 From: yanguojin Date: Tue, 24 Oct 2023 16:27:15 +0800 Subject: [PATCH 15/40] Modify the PackageProjectUrl address. --- modules/csharp_api/csharp/CSharpAPI.csproj | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/csharp_api/csharp/CSharpAPI.csproj b/modules/csharp_api/csharp/CSharpAPI.csproj index 6817c433a..65f622aa4 100644 --- a/modules/csharp_api/csharp/CSharpAPI.csproj +++ b/modules/csharp_api/csharp/CSharpAPI.csproj @@ -13,8 +13,8 @@ OpenVINO C# API Based on the C # platform, call the OpenVINO suite to deploy a deep learning model. - https://github.com/guojin-yan/OpenVINO-CSharp-API - https://github.com/guojin-yan/OpenVINO-CSharp-API + https://github.com/openvinotoolkit/openvino_contrib/tree/master/modules/csharp_api + https://github.com/openvinotoolkit/openvino_contrib/tree/master/modules/csharp_api git ../../nuget zh From cc78ceae5c6998596c42e2492782e744b4b16ea7 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Wed, 25 Oct 2023 10:46:49 +0800 Subject: [PATCH 16/40] [Csharp API Extension] Add missing files & Configure CI . --- .ci/azure/linux.yml | 2 ++ .ci/azure/mac.yml | 2 ++ .ci/azure/windows.yml | 2 ++ 3 files changed, 6 insertions(+) diff --git a/.ci/azure/linux.yml b/.ci/azure/linux.yml index 9fd3dd162..103e668cd 100644 --- a/.ci/azure/linux.yml +++ b/.ci/azure/linux.yml @@ -6,6 +6,7 @@ trigger: paths: exclude: - modules/nvidia_plugin + - modules/csharp_api pr: branches: @@ -15,6 +16,7 @@ pr: paths: exclude: - modules/nvidia_plugin + - modules/csharp_api resources: repositories: diff --git a/.ci/azure/mac.yml b/.ci/azure/mac.yml index adafa2ee7..5c68fef7b 100644 --- a/.ci/azure/mac.yml +++ b/.ci/azure/mac.yml @@ -6,6 +6,7 @@ trigger: paths: exclude: - modules/nvidia_plugin + - modules/csharp_api pr: branches: @@ -15,6 +16,7 @@ pr: paths: exclude: - modules/nvidia_plugin + - modules/csharp_api resources: repositories: diff --git a/.ci/azure/windows.yml b/.ci/azure/windows.yml index fd49a097e..9bbcaf1d7 100644 --- a/.ci/azure/windows.yml +++ b/.ci/azure/windows.yml @@ -6,6 +6,7 @@ trigger: paths: exclude: - modules/nvidia_plugin + - modules/csharp_api pr: branches: @@ -15,6 +16,7 @@ pr: paths: exclude: - modules/nvidia_plugin + - modules/csharp_api resources: repositories: From 7111f13a209f04060150b5b56c833aa65639160b Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 2 Nov 2023 18:50:20 +0800 Subject: [PATCH 17/40] Modify the get_partial_shape() method of PartialShape class. --- .../csharp_api/csharp/core/partial_shape.cs | 32 +++++++++++-------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/modules/csharp_api/csharp/core/partial_shape.cs b/modules/csharp_api/csharp/core/partial_shape.cs index 16c429779..29df9327d 100644 --- a/modules/csharp_api/csharp/core/partial_shape.cs +++ b/modules/csharp_api/csharp/core/partial_shape.cs @@ -104,7 +104,7 @@ public PartialShape(Shape shape) { Ov.ov_partial_shape partial_shape = new ov_partial_shape(); HandleException.handler( - NativeMethods.ov_shape_to_partial_shape(shape.shape, out partial_shape)); + NativeMethods.ov_shape_to_partial_shape(shape.shape, ref partial_shape)); partial_shape_convert(partial_shape); } @@ -114,6 +114,7 @@ public PartialShape(Shape shape) ~PartialShape() { } + /// /// Convert partial shape to PartialShape class. /// @@ -135,21 +136,24 @@ private void partial_shape_convert(Ov.ov_partial_shape shape) /// return ov_partial_shape. public ov_partial_shape get_partial_shape() { - ov_partial_shape partial_shape = new ov_partial_shape(); - partial_shape.rank = rank.get_dimension(); - int l = Marshal.SizeOf(typeof(Ov.ov_dimension)); - IntPtr[] ds_ptr = new IntPtr[rank.get_max()]; - for (int i = 0; i < rank.get_max(); ++i) + Ov.ov_partial_shape shape_arr = new Ov.ov_partial_shape(); + shape_arr.rank = rank.get_dimension(); + List ov_dims = new List(); + for (int i = 0; i < shape_arr.rank.max; ++i) { - IntPtr ptr = Marshal.AllocHGlobal(l); - Marshal.StructureToPtr(dimensions[i], ptr, false); - ds_ptr[i] = ptr; + ov_dims.Add(dimensions[i].get_dimension()); } - - IntPtr d_ptr = Marshal.AllocHGlobal((int)(l * rank.get_max())); - Marshal.Copy(ds_ptr, 0, d_ptr, (int)rank.get_max()); - partial_shape.dims = d_ptr; - return partial_shape; + Ov.ov_dimension[] ds = ov_dims.ToArray(); + shape_arr.dims = Marshal.UnsafeAddrOfPinnedArrayElement(ds, 0); + return shape_arr; + } + /// + /// Get rank. + /// + /// + public Dimension get_rank() + { + return rank; } /// /// Get dimensions. From 13d44633d699c6e6aabae01958d6b08a5c31cbf4 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 2 Nov 2023 18:51:08 +0800 Subject: [PATCH 18/40] Modify the program method entry point name in ov_partial_shape. --- .../csharp/native_methods/ov_partial_shape.cs | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/modules/csharp_api/csharp/native_methods/ov_partial_shape.cs b/modules/csharp_api/csharp/native_methods/ov_partial_shape.cs index c5084b5c4..af6cb7a05 100644 --- a/modules/csharp_api/csharp/native_methods/ov_partial_shape.cs +++ b/modules/csharp_api/csharp/native_methods/ov_partial_shape.cs @@ -22,7 +22,7 @@ public partial class NativeMethods /// Static rank, and static dimensions on all axes. /// Examples: `{ 1,2,3,4}` or `{6}` or `{}` /// Status code of the operation: OK(0) for success. - [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + [DllImport(dll_extern, EntryPoint = "ov_partial_shape_create", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static ExceptionStatus ov_partial_shape_create( long rank, @@ -43,7 +43,7 @@ public extern static ExceptionStatus ov_partial_shape_create( /// Static rank, and static dimensions on all axes. /// Examples: `{ 1,2,3,4}` or `{6}` or `{}` /// Status code of the operation: OK(0) for success. - [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + [DllImport(dll_extern, EntryPoint = "ov_partial_shape_create_dynamic", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static ExceptionStatus ov_partial_shape_create_dynamic( Ov.ov_dimension rank, @@ -57,7 +57,7 @@ public extern static ExceptionStatus ov_partial_shape_create_dynamic( /// support dynamic and static dimension. /// The pointer of partial shape /// Status code of the operation: OK(0) for success. - [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + [DllImport(dll_extern, EntryPoint = "ov_partial_shape_create_static", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static ExceptionStatus ov_partial_shape_create_static( long rank, @@ -68,7 +68,7 @@ public extern static ExceptionStatus ov_partial_shape_create_static( /// Release internal memory allocated in partial shape. /// /// The object's internal memory will be released. - [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + [DllImport(dll_extern, EntryPoint = "ov_partial_shape_free", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static void ov_partial_shape_free(ref Ov.ov_partial_shape partial_shape); @@ -78,7 +78,7 @@ public extern static ExceptionStatus ov_partial_shape_create_static( /// The partial_shape pointer. /// The shape pointer. /// Status code of the operation: OK(0) for success. - [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + [DllImport(dll_extern, EntryPoint = "ov_partial_shape_to_shape", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static ExceptionStatus ov_partial_shape_to_shape( Ov.ov_partial_shape partial_shape, @@ -90,18 +90,18 @@ public extern static ExceptionStatus ov_partial_shape_to_shape( /// The shape. /// The partial_shape pointer. /// Status code of the operation: OK(0) for success. - [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + [DllImport(dll_extern, EntryPoint = "ov_shape_to_partial_shape", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static ExceptionStatus ov_shape_to_partial_shape( - Ov.ov_shape shape, - out Ov.ov_partial_shape partial_shape); + Ov.ov_shape shape, + ref Ov.ov_partial_shape partial_shape); /// /// Check this partial_shape whether is dynamic /// /// The partial_shape. /// Status code of the operation: OK(0) for success. - [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + [DllImport(dll_extern, EntryPoint = "ov_partial_shape_is_dynamic", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static bool ov_partial_shape_is_dynamic(Ov.ov_partial_shape partial_shape); @@ -110,7 +110,7 @@ public extern static ExceptionStatus ov_shape_to_partial_shape( /// /// The partial_shape pointer. /// A string reprensts partial_shape's content. - [DllImport(dll_extern, EntryPoint = "ov_get_openvino_version", + [DllImport(dll_extern, EntryPoint = "ov_partial_shape_to_string", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static string ov_partial_shape_to_string(Ov.ov_partial_shape partial_shape); } From 5df5e57545d5b62f40cb605df6eeb634d7aebd4d Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 2 Nov 2023 18:51:25 +0800 Subject: [PATCH 19/40] Modify the reshape(Dictionary partial_shapes) method. --- modules/csharp_api/csharp/core/model.cs | 29 ++++--------------------- 1 file changed, 4 insertions(+), 25 deletions(-) diff --git a/modules/csharp_api/csharp/core/model.cs b/modules/csharp_api/csharp/core/model.cs index 46a548800..ffc824804 100644 --- a/modules/csharp_api/csharp/core/model.cs +++ b/modules/csharp_api/csharp/core/model.cs @@ -65,7 +65,6 @@ public void Dispose() /// The friendly name for a model. public string get_friendly_name() { - IntPtr s_ptr = IntPtr.Zero; HandleException.handler( NativeMethods.ov_model_get_friendly_name(m_ptr, ref s_ptr)); @@ -438,32 +437,12 @@ public bool is_dynamic() /// The list of input tensor names and PartialShape. public void reshape(Dictionary partial_shapes) { - if (1 != partial_shapes.Count) + foreach (var partial_shape in partial_shapes) { - IntPtr[] tensor_names_ptr = new IntPtr[partial_shapes.Count]; - Ov.ov_partial_shape[] shapes = new Ov.ov_partial_shape[partial_shapes.Count]; - int i = 0; - foreach (var partial_shape in partial_shapes) - { - IntPtr p = Marshal.StringToHGlobalAnsi(partial_shape.Key); - tensor_names_ptr[i] = p; - shapes[i] = partial_shape.Value.get_partial_shape(); - } + sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(partial_shape.Key)); + PartialShape shape = partial_shape.Value; HandleException.handler( - NativeMethods.ov_model_reshape(m_ptr, tensor_names_ptr, - ref shapes[0], (ulong)partial_shapes.Count)); - } - else - { - foreach (var partial_shape in partial_shapes) - { - sbyte[] c_tensor_name = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(partial_shape.Key)); - Ov.ov_partial_shape shape = partial_shape.Value.get_partial_shape(); - HandleException.handler( - NativeMethods.ov_model_reshape_input_by_name(m_ptr, ref c_tensor_name[0], - shape)); - } - + NativeMethods.ov_model_reshape_input_by_name(m_ptr, ref c_tensor_name[0], shape.get_partial_shape())); } } /// From 34824ccc26e8c51881c15b81b28f0cb3d73f8b97 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Fri, 3 Nov 2023 14:38:54 +0800 Subject: [PATCH 20/40] Error modifying comments --- modules/csharp_api/csharp/base.cs | 25 +--------------------- modules/csharp_api/demos/yolov8/Program.cs | 2 +- 2 files changed, 2 insertions(+), 25 deletions(-) diff --git a/modules/csharp_api/csharp/base.cs b/modules/csharp_api/csharp/base.cs index b7f35c2ce..edd2a2b2b 100644 --- a/modules/csharp_api/csharp/base.cs +++ b/modules/csharp_api/csharp/base.cs @@ -10,7 +10,7 @@ namespace OpenVinoSharp /// /// OpenVINO wrapper for .NET. - /// This is the basic namespace of OpenVINO in Cshrp, + /// This is the basic namespace of OpenVINO in C#, /// and all classes and methods are within this method. /// OpenVinoSharp. /// @@ -42,29 +42,6 @@ class NamespaceDoc { } } - - - namespace model - { - /// - /// Processing methods for main common models. - /// OpenVinoSharp.model. - /// - [System.Runtime.CompilerServices.CompilerGeneratedAttribute()] - class NamespaceDoc - { - } - namespace Yolov8 { - /// - /// The processing methods of the main Yolov8 model. - /// OpenVinoSharp.model.Yolov8. - /// - [System.Runtime.CompilerServices.CompilerGeneratedAttribute()] - class NamespaceDoc - { - } - } - } } diff --git a/modules/csharp_api/demos/yolov8/Program.cs b/modules/csharp_api/demos/yolov8/Program.cs index 48ed06e6f..837608cee 100644 --- a/modules/csharp_api/demos/yolov8/Program.cs +++ b/modules/csharp_api/demos/yolov8/Program.cs @@ -59,7 +59,7 @@ static void yolov8_infer(string flg, string model_path, string image_path, stri print_model_info(model); // -------- Step 3. Loading a model to the device -------- - CompiledModel compiled_model = core.compiled_model(model, device); + CompiledModel compiled_model = core.compile_model(model, device); // -------- Step 4. Create an infer request -------- InferRequest infer_request = compiled_model.create_infer_request(); From 5118ed21e7e2dec647765d941954f24f57f6d955 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Mon, 6 Nov 2023 09:29:01 +0800 Subject: [PATCH 21/40] Update nuget package description. --- modules/csharp_api/README.md | 47 ++++++++++++++++++++++++++----- modules/csharp_api/README_cn.md | 49 +++++++++++++++++++++++++++------ 2 files changed, 81 insertions(+), 15 deletions(-) diff --git a/modules/csharp_api/README.md b/modules/csharp_api/README.md index f205e544f..a1e8a08a5 100644 --- a/modules/csharp_api/README.md +++ b/modules/csharp_api/README.md @@ -34,19 +34,52 @@ ### Native bindings -| Package | Description | Link | -| ------------------------ | --------------------------- | ------------------------------------------------------------ | -| **OpenVINO.runtime.win** | Native bindings for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.win.svg)](https://www.nuget.org/packages/OpenVINO.runtime.win/) | +| Package | Description | Link | +| ------------------------------------- | ------------------------------------ | ------------------------------------------------------------ | +| **OpenVINO.runtime.win** | Native bindings for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.win.svg)](https://www.nuget.org/packages/OpenVINO.runtime.win/) | +| **OpenVINO.runtime.ubuntu.22-x86_64** | Native bindings for ubuntu.22-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.ubuntu.22-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.ubuntu.22-x86_64/) | +| **OpenVINO.runtime.ubuntu.20-x86_64** | Native bindings for ubuntu.20-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.ubuntu.20-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.ubuntu.20-x86_64/) | +| **OpenVINO.runtime.ubuntu.18-x86_64** | Native bindings for ubuntu.18-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.ubuntu.18-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.ubuntu.18-x86_64/) | +| **OpenVINO.runtime.debian9-arm64** | Native bindings for debian9-arm64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.win.svg)](https://www.nuget.org/packages/OpenVINO.runtime.win/) | +| **OpenVINO.runtime.centos7-x86_64** | Native bindings for centos7-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.centos7-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.centos7-x86_64/) | +## ⚙ How to install OpenVINO™ C# API? +  The following provides OpenVINO ™ The installation method of C # API on different platforms can be customized according to the platform you are using. -## ⚙ How to install OpenVINO™ C# API? +### **Windows** -The following article provides installation methods for OpenVINO™ C# API on different platforms, which can be installed according to your own platform. +  Install the following package through the ``dotnet add package`` command or through Visual Studio + +```shell +dotnet add package OpenVINO.CSharp.API +dotnet add package OpenVINO.runtime.win +Or install =》 +dotnet add package OpenVINO.CSharp.Windows +``` -- [Windows](docs/en/windows_install.md) +### **Linux** + +  We have created the corresponding NuGet Package for the **Linux ** platform based on the official compiled platform, For example, using **ubuntu.22-x86_64** is installed using the ``dotnet add package`` command: + +```shell +dotnet add package OpenVINO.CSharp.API +dotnet add package OpenVINO.runtime.ubuntu.22-x86_64 +``` -- [Linux](docs/en/linux_install.md) +  After running the program once, add environment variables: + +``` +export LD_LIBRARY_PATH={Program generated executable file directory}/runtimes/ubuntu.22-x86_64/native +such as =》 +export LD_LIBRARY_PATH=/home/ygj/Program/sample1/bin/Debug/net6.0/runtimes/ubuntu.22-x86_64/native +``` + +  If for a brand new platform (without installing OpenVINO C++), it is necessary to install a dependent environment and switch to ``{Program generated executable file directory}/runtimes/ubuntu.22-x86'_ 64/native ``directory, run the following command: + +```shell +sudo -E ./install_openvino_dependencies.sh +``` ## 🏷How to use OpenVINO™ C# API? diff --git a/modules/csharp_api/README_cn.md b/modules/csharp_api/README_cn.md index 4d3150714..8fc740a0f 100644 --- a/modules/csharp_api/README_cn.md +++ b/modules/csharp_api/README_cn.md @@ -35,19 +35,54 @@ ### Native bindings -| Package | Description | Link | -| ------------------------ | --------------------------- | ------------------------------------------------------------ | -| **OpenVINO.runtime.win** | Native bindings for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.win.svg)](https://www.nuget.org/packages/OpenVINO.runtime.win/) | +| Package | Description | Link | +| ------------------------------------- | ------------------------------------ | ------------------------------------------------------------ | +| **OpenVINO.runtime.win** | Native bindings for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.win.svg)](https://www.nuget.org/packages/OpenVINO.runtime.win/) | +| **OpenVINO.runtime.ubuntu.22-x86_64** | Native bindings for ubuntu.22-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.ubuntu.22-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.ubuntu.22-x86_64/) | +| **OpenVINO.runtime.ubuntu.20-x86_64** | Native bindings for ubuntu.20-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.ubuntu.20-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.ubuntu.20-x86_64/) | +| **OpenVINO.runtime.ubuntu.18-x86_64** | Native bindings for ubuntu.18-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.ubuntu.18-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.ubuntu.18-x86_64/) | +| **OpenVINO.runtime.debian9-arm64** | Native bindings for debian9-arm64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.win.svg)](https://www.nuget.org/packages/OpenVINO.runtime.win/) | +| **OpenVINO.runtime.centos7-x86_64** | Native bindings for centos7-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.centos7-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.centos7-x86_64/) | ## ⚙ 如何安装 -以下文章提供了OpenVINO™ C# API在不同平台的安装方法,可以根据自己使用平台进行安装。 +以下提供了OpenVINO™ C# API在不同平台的安装方法,可以根据自己使用平台进行安装。 -- [Windows](docs/cn/windows_install.md) +### **Windows** -- [Linux](docs/cn/linux_install.md) +通过``dotnet add package``指令安装或通过Visual Studio安装以下程序包 + +```shell +dotnet add package OpenVINO.CSharp.API +dotnet add package OpenVINO.runtime.win +或者安装集成包=》 +dotnet add package OpenVINO.CSharp.Windows +``` + +### **Linux** + +   **linux**平台我们根据官方编译的平台制作了对应的NuGet Package,以**ubuntu.22-x86_64**为例,通过``dotnet add package``指令安装: + +```shell +dotnet add package OpenVINO.CSharp.API +dotnet add package OpenVINO.runtime.ubuntu.22-x86_64 +``` + +  运行一次程序后,添加环境变量: + +``` +export LD_LIBRARY_PATH={Program generated executable file directory}/runtimes/ubuntu.22-x86_64/native +例如=》 +export LD_LIBRARY_PATH=/home/ygj/Program/sample1/bin/Debug/net6.0/runtimes/ubuntu.22-x86_64/native +``` + +  如果对于一个全新平台(未安装过OpenVINO C++),需要安装一下依赖环境,切换到``{Program generated executable file directory}/runtimes/ubuntu.22-x86_64/native``目录下,运行以下指令: + +```shell +sudo -E ./install_openvino_dependencies.sh +``` ## 🏷开始使用 @@ -112,8 +147,6 @@ namespace test - 🔮 **NuGet包:** - 制作并发布NuGet包,发布**OpenVINO™ C# API.win 3.0.120** ,包含OpenVINO 2023.0 依赖项。 - - ## 🎖 贡献   如果您对OpenVINO™ 在C#使用感兴趣,有兴趣对开源社区做出自己的贡献,欢迎加入我们,一起开发OpenVINO™ C# API。 From e44b580a025a663e800924c89987e68cb2c41e9b Mon Sep 17 00:00:00 2001 From: yanguojin Date: Fri, 24 Nov 2023 14:09:16 +0800 Subject: [PATCH 22/40] Add C API ov_get_last_err_msg(), Set specific information for exception throwing errors. --- .../csharp/exception/handle_exception.cs | 36 +++++++++---------- .../csharp/native_methods/ov_common.cs | 8 +++++ 2 files changed, 26 insertions(+), 18 deletions(-) diff --git a/modules/csharp_api/csharp/exception/handle_exception.cs b/modules/csharp_api/csharp/exception/handle_exception.cs index 65d53feb3..92f2581f7 100644 --- a/modules/csharp_api/csharp/exception/handle_exception.cs +++ b/modules/csharp_api/csharp/exception/handle_exception.cs @@ -100,7 +100,7 @@ public static void handler(ExceptionStatus status) { /// /// general error! private static void general_error() { - throw new OVException(ExceptionStatus.GENERAL_ERROR, "general error!"); + throw new OVException(ExceptionStatus.GENERAL_ERROR, NativeMethods.ov_get_last_err_msg()); } /// /// Throw NOT_IMPLEMENTED OpenVINOException. @@ -108,7 +108,7 @@ private static void general_error() { /// not implemented! private static void not_implemented() { - throw new OVException(ExceptionStatus.NOT_IMPLEMENTED, "not implemented!"); + throw new OVException(ExceptionStatus.NOT_IMPLEMENTED, NativeMethods.ov_get_last_err_msg()); } /// @@ -117,7 +117,7 @@ private static void not_implemented() /// network not loaded! private static void network_not_loaded() { - throw new OVException(ExceptionStatus.NETWORK_NOT_LOADED, "network not loaded!"); + throw new OVException(ExceptionStatus.NETWORK_NOT_LOADED, NativeMethods.ov_get_last_err_msg()); } @@ -127,7 +127,7 @@ private static void network_not_loaded() /// parameter mismatch! private static void parameter_mismatch() { - throw new OVException(ExceptionStatus.PARAMETER_MISMATCH, "parameter mismatch!"); + throw new OVException(ExceptionStatus.PARAMETER_MISMATCH, NativeMethods.ov_get_last_err_msg()); } /// @@ -136,7 +136,7 @@ private static void parameter_mismatch() /// not found! private static void not_found() { - throw new OVException(ExceptionStatus.NOT_FOUND, "not found!"); + throw new OVException(ExceptionStatus.NOT_FOUND, NativeMethods.ov_get_last_err_msg()); } /// @@ -145,7 +145,7 @@ private static void not_found() /// out of bounds! private static void out_of_bounds() { - throw new OVException(ExceptionStatus.OUT_OF_BOUNDS, "out of bounds!"); + throw new OVException(ExceptionStatus.OUT_OF_BOUNDS, NativeMethods.ov_get_last_err_msg()); } @@ -155,7 +155,7 @@ private static void out_of_bounds() /// unexpection! private static void unexpection() { - throw new OVException(ExceptionStatus.UNEXPECTED, "unexpection!"); + throw new OVException(ExceptionStatus.UNEXPECTED, NativeMethods.ov_get_last_err_msg()); } @@ -166,7 +166,7 @@ private static void unexpection() /// request busy! private static void request_busy() { - throw new OVException(ExceptionStatus.REQUEST_BUSY, "request busy!"); + throw new OVException(ExceptionStatus.REQUEST_BUSY, NativeMethods.ov_get_last_err_msg()); } /// /// Throw RESULT_NOT_READY OpenVINOException. @@ -174,7 +174,7 @@ private static void request_busy() /// result not ready! private static void result_not_ready() { - throw new OVException(ExceptionStatus.RESULT_NOT_READY, "result not ready!"); + throw new OVException(ExceptionStatus.RESULT_NOT_READY, NativeMethods.ov_get_last_err_msg()); } /// /// Throw OpenVINOException. @@ -182,7 +182,7 @@ private static void result_not_ready() /// not allocated! private static void not_allocated() { - throw new OVException(ExceptionStatus.NOT_ALLOCATED, "not allocated!"); + throw new OVException(ExceptionStatus.NOT_ALLOCATED, NativeMethods.ov_get_last_err_msg()); } /// /// Throw INFER_NOT_STARTED OpenVINOException. @@ -190,7 +190,7 @@ private static void not_allocated() /// infer not started! private static void infer_not_started() { - throw new OVException(ExceptionStatus.INFER_NOT_STARTED, "infer not started!"); + throw new OVException(ExceptionStatus.INFER_NOT_STARTED, NativeMethods.ov_get_last_err_msg()); } /// /// Throw NETWORK_NOT_READ OpenVINOException. @@ -198,7 +198,7 @@ private static void infer_not_started() /// netword not read! private static void netword_not_read() { - throw new OVException(ExceptionStatus.NETWORK_NOT_READ, "netword not read!"); + throw new OVException(ExceptionStatus.NETWORK_NOT_READ, NativeMethods.ov_get_last_err_msg()); } /// /// Throw INFER_CANCELLED OpenVINOException. @@ -206,7 +206,7 @@ private static void netword_not_read() /// infer cancelled! private static void infer_cancelled() { - throw new OVException(ExceptionStatus.INFER_CANCELLED, "infer cancelled!"); + throw new OVException(ExceptionStatus.INFER_CANCELLED, NativeMethods.ov_get_last_err_msg()); } /// /// Throw INVALID_C_PARAM OpenVINOException. @@ -214,7 +214,7 @@ private static void infer_cancelled() /// invalid c param! private static void invalid_c_param() { - throw new OVException(ExceptionStatus.INVALID_C_PARAM, "invalid c param!"); + throw new OVException(ExceptionStatus.INVALID_C_PARAM, NativeMethods.ov_get_last_err_msg()); } /// /// Throw UNKNOWN_C_ERROR OpenVINOException. @@ -222,7 +222,7 @@ private static void invalid_c_param() /// unknown c error! private static void unknown_c_error() { - throw new OVException(ExceptionStatus.UNKNOWN_C_ERROR, "unknown c error!"); + throw new OVException(ExceptionStatus.UNKNOWN_C_ERROR, NativeMethods.ov_get_last_err_msg()); } /// /// Throw NOT_IMPLEMENT_C_METHOD OpenVINOException. @@ -230,7 +230,7 @@ private static void unknown_c_error() /// not implement c method! private static void not_implement_c_method() { - throw new OVException(ExceptionStatus.NOT_IMPLEMENT_C_METHOD, "not implement c method!"); + throw new OVException(ExceptionStatus.NOT_IMPLEMENT_C_METHOD, NativeMethods.ov_get_last_err_msg()); } /// /// Throw UNKNOW_EXCEPTION OpenVINOException. @@ -238,7 +238,7 @@ private static void not_implement_c_method() /// unknown exception! private static void unknown_exception() { - throw new OVException(ExceptionStatus.UNKNOW_EXCEPTION, "unknown exception!"); + throw new OVException(ExceptionStatus.UNKNOW_EXCEPTION, NativeMethods.ov_get_last_err_msg()); } /// /// Throw PTR_NULL OpenVINOException. @@ -246,7 +246,7 @@ private static void unknown_exception() /// private static void ptr_null_exception() { - throw new OVException(ExceptionStatus.UNKNOW_EXCEPTION, "ptr is null!"); + throw new OVException(ExceptionStatus.UNKNOW_EXCEPTION, NativeMethods.ov_get_last_err_msg()); } } } diff --git a/modules/csharp_api/csharp/native_methods/ov_common.cs b/modules/csharp_api/csharp/native_methods/ov_common.cs index ae342c292..9e4bf2711 100644 --- a/modules/csharp_api/csharp/native_methods/ov_common.cs +++ b/modules/csharp_api/csharp/native_methods/ov_common.cs @@ -26,5 +26,13 @@ public partial class NativeMethods CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static void ov_free(ref char content); + + /// + /// Get the last error msg. + /// + /// The last error msg. + [DllImport(dll_extern, EntryPoint = "ov_get_last_err_msg", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static string ov_get_last_err_msg(); } } From 78024a4e32bcfa8647562ccf7db939c7da60d77d Mon Sep 17 00:00:00 2001 From: yanguojin Date: Mon, 27 Nov 2023 19:33:47 +0800 Subject: [PATCH 23/40] Fix error and return text parsing error. --- .../csharp/exception/handle_exception.cs | 37 ++++++++++--------- .../csharp/native_methods/ov_common.cs | 2 +- 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/modules/csharp_api/csharp/exception/handle_exception.cs b/modules/csharp_api/csharp/exception/handle_exception.cs index 92f2581f7..10805f825 100644 --- a/modules/csharp_api/csharp/exception/handle_exception.cs +++ b/modules/csharp_api/csharp/exception/handle_exception.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Runtime.InteropServices; using System.Text; using System.Threading.Tasks; @@ -100,7 +101,7 @@ public static void handler(ExceptionStatus status) { /// /// general error! private static void general_error() { - throw new OVException(ExceptionStatus.GENERAL_ERROR, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.GENERAL_ERROR, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// /// Throw NOT_IMPLEMENTED OpenVINOException. @@ -108,7 +109,7 @@ private static void general_error() { /// not implemented! private static void not_implemented() { - throw new OVException(ExceptionStatus.NOT_IMPLEMENTED, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.NOT_IMPLEMENTED, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// @@ -117,7 +118,7 @@ private static void not_implemented() /// network not loaded! private static void network_not_loaded() { - throw new OVException(ExceptionStatus.NETWORK_NOT_LOADED, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.NETWORK_NOT_LOADED, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } @@ -127,7 +128,7 @@ private static void network_not_loaded() /// parameter mismatch! private static void parameter_mismatch() { - throw new OVException(ExceptionStatus.PARAMETER_MISMATCH, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.PARAMETER_MISMATCH, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// @@ -136,7 +137,7 @@ private static void parameter_mismatch() /// not found! private static void not_found() { - throw new OVException(ExceptionStatus.NOT_FOUND, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.NOT_FOUND, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// @@ -145,7 +146,7 @@ private static void not_found() /// out of bounds! private static void out_of_bounds() { - throw new OVException(ExceptionStatus.OUT_OF_BOUNDS, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.OUT_OF_BOUNDS, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } @@ -155,7 +156,7 @@ private static void out_of_bounds() /// unexpection! private static void unexpection() { - throw new OVException(ExceptionStatus.UNEXPECTED, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.UNEXPECTED, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } @@ -166,7 +167,7 @@ private static void unexpection() /// request busy! private static void request_busy() { - throw new OVException(ExceptionStatus.REQUEST_BUSY, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.REQUEST_BUSY, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// /// Throw RESULT_NOT_READY OpenVINOException. @@ -174,7 +175,7 @@ private static void request_busy() /// result not ready! private static void result_not_ready() { - throw new OVException(ExceptionStatus.RESULT_NOT_READY, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.RESULT_NOT_READY, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// /// Throw OpenVINOException. @@ -182,7 +183,7 @@ private static void result_not_ready() /// not allocated! private static void not_allocated() { - throw new OVException(ExceptionStatus.NOT_ALLOCATED, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.NOT_ALLOCATED, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// /// Throw INFER_NOT_STARTED OpenVINOException. @@ -190,7 +191,7 @@ private static void not_allocated() /// infer not started! private static void infer_not_started() { - throw new OVException(ExceptionStatus.INFER_NOT_STARTED, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.INFER_NOT_STARTED, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// /// Throw NETWORK_NOT_READ OpenVINOException. @@ -198,7 +199,7 @@ private static void infer_not_started() /// netword not read! private static void netword_not_read() { - throw new OVException(ExceptionStatus.NETWORK_NOT_READ, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.NETWORK_NOT_READ, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// /// Throw INFER_CANCELLED OpenVINOException. @@ -206,7 +207,7 @@ private static void netword_not_read() /// infer cancelled! private static void infer_cancelled() { - throw new OVException(ExceptionStatus.INFER_CANCELLED, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.INFER_CANCELLED, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// /// Throw INVALID_C_PARAM OpenVINOException. @@ -214,7 +215,7 @@ private static void infer_cancelled() /// invalid c param! private static void invalid_c_param() { - throw new OVException(ExceptionStatus.INVALID_C_PARAM, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.INVALID_C_PARAM, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// /// Throw UNKNOWN_C_ERROR OpenVINOException. @@ -222,7 +223,7 @@ private static void invalid_c_param() /// unknown c error! private static void unknown_c_error() { - throw new OVException(ExceptionStatus.UNKNOWN_C_ERROR, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.UNKNOWN_C_ERROR, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// /// Throw NOT_IMPLEMENT_C_METHOD OpenVINOException. @@ -230,7 +231,7 @@ private static void unknown_c_error() /// not implement c method! private static void not_implement_c_method() { - throw new OVException(ExceptionStatus.NOT_IMPLEMENT_C_METHOD, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.NOT_IMPLEMENT_C_METHOD, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// /// Throw UNKNOW_EXCEPTION OpenVINOException. @@ -238,7 +239,7 @@ private static void not_implement_c_method() /// unknown exception! private static void unknown_exception() { - throw new OVException(ExceptionStatus.UNKNOW_EXCEPTION, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.UNKNOW_EXCEPTION, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } /// /// Throw PTR_NULL OpenVINOException. @@ -246,7 +247,7 @@ private static void unknown_exception() /// private static void ptr_null_exception() { - throw new OVException(ExceptionStatus.UNKNOW_EXCEPTION, NativeMethods.ov_get_last_err_msg()); + throw new OVException(ExceptionStatus.UNKNOW_EXCEPTION, Marshal.PtrToStringAnsi(NativeMethods.ov_get_last_err_msg())); } } } diff --git a/modules/csharp_api/csharp/native_methods/ov_common.cs b/modules/csharp_api/csharp/native_methods/ov_common.cs index 9e4bf2711..0a0ff31e7 100644 --- a/modules/csharp_api/csharp/native_methods/ov_common.cs +++ b/modules/csharp_api/csharp/native_methods/ov_common.cs @@ -33,6 +33,6 @@ public partial class NativeMethods /// The last error msg. [DllImport(dll_extern, EntryPoint = "ov_get_last_err_msg", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] - public extern static string ov_get_last_err_msg(); + public extern static IntPtr ov_get_last_err_msg(); } } From 3ad503037d82768082d10fb3fff4bc76b6c9ecb0 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 30 Nov 2023 21:41:10 +0800 Subject: [PATCH 24/40] =?UTF-8?q?Add=20and=20fix=20set=5Fproperty()?= =?UTF-8?q?=E3=80=81get=5Fproperty()=20methods.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/csharp_api/csharp/common/property.cs | 163 ++++++++++++++++++ modules/csharp_api/csharp/core/core.cs | 33 +++- .../csharp/native_methods/ov_core.cs | 5 +- modules/csharp_api/csharp/ov/ov.cs | 1 + 4 files changed, 200 insertions(+), 2 deletions(-) create mode 100644 modules/csharp_api/csharp/common/property.cs diff --git a/modules/csharp_api/csharp/common/property.cs b/modules/csharp_api/csharp/common/property.cs new file mode 100644 index 000000000..cf6ddd25a --- /dev/null +++ b/modules/csharp_api/csharp/common/property.cs @@ -0,0 +1,163 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp +{ + /// + /// A header for advanced hardware specific properties for OpenVINO runtime devices. + /// To use in set_property, compile_model, import_model, get_property methods. + /// + public enum PropertyKey + { + // Read-only property key + /// + /// Read-only property to get a string list of supported read-only properties. + /// + SUPPORTED_PROPERTIES, + /// + /// Read-only property to get a list of available device IDs. + /// + AVAILABLE_DEVICES, + /// + /// Read-only property(uint32_t string) to get an unsigned integer value of optimaln + /// number of compiled model infer requests. + /// + OPTIMAL_NUMBER_OF_INFER_REQUESTS, + /// + /// Read-only property + RANGE_FOR_ASYNC_INFER_REQUESTS, + /// + /// Read-only property(string(unsigned int, unsigned int)) to provide information about a range for + /// streams on platforms where streams are supported + /// + RANGE_FOR_STREAMS, + /// + /// Read-only property to get a string value representing a full device name. + /// + FULL_DEVICE_NAME, + /// + /// Read-only property to get a string list of capabilities options per device. + /// + OPTIMIZATION_CAPABILITIES, + /// + /// Read-only property to get a name of name of a model + /// + NETWORK_NAME, + /// + /// Read-only property(uint32_t string) to query information optimal batch size for the given device + /// and the network + /// + OPTIMAL_BATCH_SIZE, + /// + /// Read-only property to get maximum batch size which does not cause performance degradation due + /// to memory swap impact. + /// + MAX_BATCH_SIZE, + + // Read-write property key + /// + /// Read-write property(string) to set/get the directory which will be used to store any data cached + /// by plugins. + /// + CACHE_DIR, + /// + /// Read-write property(uint32_t string) to set/get the number of executor logical partitions. + /// + NUM_STREAMS, + /// + /// Read-write property to set/get the name for setting CPU affinity per thread option. + /// + AFFINITY, + /// + /// Read-write property9int32_t string) to set/get the maximum number of threads that can be used + /// for inference tasks. + /// + INFERENCE_NUM_THREADS, + /// + /// Read-write property, it is high-level OpenVINO Performance Hints + /// + PERFORMANCE_HINT, + /// + /// Read-write property, it is high-level OpenVINO hint for using CPU pinning to bind CPU threads to processors + /// during inference + /// + ENABLE_CPU_PINNING, + /// + /// Read-write property, it is high-level OpenVINO Hints for the type of CPU core used during inference + /// + SCHEDULING_CORE_TYPE, + /// + /// Read-write property, it is high-level OpenVINO hint for using hyper threading processors during CPU inference + /// + ENABLE_HYPER_THREADING, + /// + /// Read-write property to set the hint for device to use specified precision for inference. + /// + INFERENCE_PRECISION_HINT, + /// + /// (Optional) Read-write property(uint32_t string) that backs the Performance Hints by giving + /// additional information on how many inference requests the application will be + /// keeping in flight usually this value comes from the actual use-case (e.g. + /// number of video-cameras, or other sources of inputs) + /// + PERFORMANCE_HINT_NUM_REQUESTS, + /// + /// Read-write property, high-level OpenVINO model priority hint. + /// + MODEL_PRIORITY, + /// + /// Read-write property for setting desirable log level. + /// + LOG_LEVEL, + /// + /// Read-write property(string) for setting performance counters option. + /// + PERF_COUNT, + /// + /// Read-write property(std::pair(std::string, Any)), device Priorities config option, + /// with comma-separated devices listed in the desired priority + /// + MULTI_DEVICE_PRIORITIES, + /// + /// Read-write property(string) for high-level OpenVINO Execution hint + /// unlike low-level properties that are individual (per-device), the hints are something that every device accepts + /// and turns into device-specific settings + /// Execution mode hint controls preferred optimization targets (performance or accuracy) for given model + /// + EXECUTION_MODE_HINT, + /// + /// Read-write property to set whether force terminate tbb when ov core destruction + /// + FORCE_TBB_TERMINATE, + /// + /// Read-write property to configure `mmap()` use for model read + /// + ENABLE_MMAP, + /// + /// Read-write property + /// + AUTO_BATCH_TIMEOUT, + } + + public static partial class Ov + { + /// + /// Get the read-write property(string) to set/get the directory which will be used to store any data cached by plugins. + /// + /// + /// The read-write property(string) to set/get the directory which will be used to store any data cached by plugins. + /// + /// The pair data. + public static KeyValuePair cache_dir(string dir) + { + return new KeyValuePair(PropertyKey.CACHE_DIR.ToString(), dir); + } + } +} diff --git a/modules/csharp_api/csharp/core/core.cs b/modules/csharp_api/csharp/core/core.cs index 5ee01ba65..6b8689114 100644 --- a/modules/csharp_api/csharp/core/core.cs +++ b/modules/csharp_api/csharp/core/core.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using System.Runtime.InteropServices; +using System.Text; namespace OpenVinoSharp { @@ -286,8 +287,38 @@ public CompiledModel compile_model(string model_path, string device_name) NativeMethods.ov_core_compile_model_from_file(m_ptr, ref c_model[0], ref c_device[0], 0, ref compiled_model_ptr)); return new CompiledModel(compiled_model_ptr); } + /// + /// Sets properties for a device, acceptable keys can be found in PropertyKey. + /// + /// Name of a device to load a model to. + /// + /// The read-write property(string) to set/get the directory which will be used to store any data cached by plugins. + /// + public void set_property(string device_name, KeyValuePair properties) + { + sbyte[] c_device = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); + IntPtr key = Marshal.StringToHGlobalAnsi(properties.Key); + IntPtr value = Marshal.StringToHGlobalAnsi(properties.Value); + HandleException.handler( + NativeMethods.ov_core_set_property(m_ptr, ref c_device[0], key, value)); + } - + /// + /// Gets properties related to device behaviour. + /// The method extracts information that can be set via the set_property method. + /// + /// Name of a device to load a model to. + /// A header for advanced hardware specific properties for OpenVINO runtime devices. + /// Properties related to device behaviour. + public string get_property(string device_name, PropertyKey key) + { + IntPtr value = IntPtr.Zero; + sbyte[] c_device = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); + sbyte[] c_key = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(key.ToString())); + HandleException.handler( + NativeMethods.ov_core_get_property(m_ptr, ref c_device[0], ref c_key[0], ref value)); + return Marshal.PtrToStringAnsi(value); + } /// /// Returns devices available for inference. /// Core objects go over all registered plugins and ask about available devices. diff --git a/modules/csharp_api/csharp/native_methods/ov_core.cs b/modules/csharp_api/csharp/native_methods/ov_core.cs index 65e039a09..6a67fe411 100644 --- a/modules/csharp_api/csharp/native_methods/ov_core.cs +++ b/modules/csharp_api/csharp/native_methods/ov_core.cs @@ -195,7 +195,10 @@ public extern static ExceptionStatus ov_core_compile_model_from_file( public extern static ExceptionStatus ov_core_set_property( IntPtr core, ref sbyte device_name); - + [DllImport(dll_extern, EntryPoint = "ov_core_set_property", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public static extern ExceptionStatus ov_core_set_property(IntPtr core, + ref sbyte device_name, IntPtr varg1, IntPtr varg2); /// /// Gets properties related to device behaviour. /// The method extracts information that can be set via the set_property method. diff --git a/modules/csharp_api/csharp/ov/ov.cs b/modules/csharp_api/csharp/ov/ov.cs index 65ca74df8..279ae7dcd 100644 --- a/modules/csharp_api/csharp/ov/ov.cs +++ b/modules/csharp_api/csharp/ov/ov.cs @@ -1,4 +1,5 @@ using System; +using System.Collections.Generic; using System.Runtime.InteropServices; namespace OpenVinoSharp From 38e01a60eace63b1e744314418b4c9c69afa780c Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:27:50 +0800 Subject: [PATCH 25/40] Add content_from_file(string file) method. --- modules/csharp_api/csharp/ov/ov.cs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/modules/csharp_api/csharp/ov/ov.cs b/modules/csharp_api/csharp/ov/ov.cs index 279ae7dcd..b653d0b0e 100644 --- a/modules/csharp_api/csharp/ov/ov.cs +++ b/modules/csharp_api/csharp/ov/ov.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using System.IO; using System.Runtime.InteropServices; namespace OpenVinoSharp @@ -30,5 +31,20 @@ public static Version get_openvino_version() NativeMethods.ov_version_free(ptr); return new_version; } + + public static byte[] content_from_file(string file) + { + FileStream fs = new FileStream(file, FileMode.Open, FileAccess.Read); + + long len = fs.Seek(0, SeekOrigin.End); + + + fs.Seek(0, SeekOrigin.Begin); + + byte[] data = new byte[len + 1]; + + fs.Read(data, 0, (int)len); + return data; + } } } \ No newline at end of file From d459f16148441df10d29824e6504e122e42cce5a Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:28:19 +0800 Subject: [PATCH 26/40] Fix struct ProfilingInfo error. --- modules/csharp_api/csharp/ov/ov_struct.cs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/modules/csharp_api/csharp/ov/ov_struct.cs b/modules/csharp_api/csharp/ov/ov_struct.cs index 8cfe29cad..f6ade49fc 100644 --- a/modules/csharp_api/csharp/ov/ov_struct.cs +++ b/modules/csharp_api/csharp/ov/ov_struct.cs @@ -109,6 +109,8 @@ public enum Status /// EXECUTED }; + + public Status status; /// /// The absolute time, in microseconds, that the node ran (in total). /// @@ -130,6 +132,7 @@ public enum Status /// public string node_type; }; + /// /// A list of profiling info data /// From e55a7a13659799049dc4f3c00d0354d693a1ba87 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:29:06 +0800 Subject: [PATCH 27/40] Fix method error. --- modules/csharp_api/csharp/core/core.cs | 208 +++++++++++++++++++++---- 1 file changed, 177 insertions(+), 31 deletions(-) diff --git a/modules/csharp_api/csharp/core/core.cs b/modules/csharp_api/csharp/core/core.cs index 6b8689114..280c0ad63 100644 --- a/modules/csharp_api/csharp/core/core.cs +++ b/modules/csharp_api/csharp/core/core.cs @@ -1,5 +1,7 @@ using System; using System.Collections.Generic; +using System.IO; +using System.Reflection; using System.Runtime.InteropServices; using System.Text; @@ -67,8 +69,6 @@ public Core(string xml_config_file = null) HandleException.handler( NativeMethods.ov_core_create(ref m_ptr)); } - - } /// /// Core's destructor @@ -180,34 +180,46 @@ public Model read_model(string model_path, Tensor weights) { throw new ArgumentNullException(nameof(weights)); } + FileStream fs = new FileStream(model_path, FileMode.Open, FileAccess.Read); + long len = fs.Seek(0, SeekOrigin.End); + fs.Seek(0, SeekOrigin.Begin); + byte[] data = new byte[len + 1]; + fs.Read(data, 0, (int)len); + fs.Close(); + IntPtr model_ptr = new IntPtr(); + HandleException.handler( + NativeMethods.ov_core_read_model_from_memory(m_ptr, ref data[0], weights.Ptr, ref model_ptr)); + return new Model(model_ptr); + } + /// + /// Reads models from IR / ONNX / PDPD / TF / TFLite formats. + /// + /// String with a model in IR / ONNX / PDPD / TF / TFLite format, + /// You can obtain input content through the Ov.content_from_file() method. + /// Shared pointer to a constant tensor with weights. + /// + public Model read_model(byte[] model_str, Tensor weights) + { IntPtr model_ptr = new IntPtr(); - sbyte[] c_model_path = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(model_path)); HandleException.handler( - NativeMethods.ov_core_read_model_from_memory(m_ptr, ref c_model_path[0], weights.Ptr, ref model_ptr)); + NativeMethods.ov_core_read_model_from_memory(m_ptr, ref model_str[0], weights.Ptr, ref model_ptr)); return new Model(model_ptr); } + /// /// Creates a compiled model from a source model object. /// /// Model object acquired from Core::read_model. + /// Optional map of pairs: (property name, property value) relevant only for this load operation. /// A compiled model. /// /// Users can create as many compiled models as they need and use /// them simultaneously (up to the limitation of the hardware resources). /// - public CompiledModel compile_model(Model model) + public CompiledModel compile_model(Model model, Dictionary properties = null) { - if (model == null) - { - throw new ArgumentNullException(nameof(model)); - } - IntPtr compiled_model_ptr = new IntPtr(); - string device_name = "AUTO"; - sbyte[] c_device = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); - HandleException.handler( - NativeMethods.ov_core_compile_model(m_ptr, model.m_ptr, ref c_device[0], 0, ref compiled_model_ptr)); - return new CompiledModel(compiled_model_ptr); + return compile_model(model, "AUTO", properties); } /// @@ -215,12 +227,13 @@ public CompiledModel compile_model(Model model) /// /// Model object acquired from Core::read_model. /// Name of a device to load a model to. + /// Optional map of pairs: (property name, property value) relevant only for this load operation. /// A compiled model. /// /// Users can create as many compiled models as they need and use /// them simultaneously (up to the limitation of the hardware resources). /// - public CompiledModel compile_model(Model model, string device_name) + public CompiledModel compile_model(Model model, string device_name, Dictionary properties=null) { if (model == null) { @@ -232,8 +245,51 @@ public CompiledModel compile_model(Model model, string device_name) } IntPtr compiled_model_ptr = new IntPtr(); sbyte[] c_device = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); - HandleException.handler( + if (properties == null) + { + HandleException.handler( NativeMethods.ov_core_compile_model(m_ptr, model.m_ptr, ref c_device[0], 0, ref compiled_model_ptr)); + } + else if (properties.Count==1) + { + List inputs = new List(); + foreach (var item in properties) + { + inputs.Add(Marshal.StringToHGlobalAnsi(item.Key)); + inputs.Add(Marshal.StringToHGlobalAnsi(item.Value)); + } + HandleException.handler( + NativeMethods.ov_core_compile_model(m_ptr, model.m_ptr, ref c_device[0], 2, ref compiled_model_ptr, + inputs[0], inputs[1])); + } + else if (properties.Count == 2) + { + List inputs = new List(); + foreach (var item in properties) + { + inputs.Add(Marshal.StringToHGlobalAnsi(item.Key)); + inputs.Add(Marshal.StringToHGlobalAnsi(item.Value)); + } + HandleException.handler( + NativeMethods.ov_core_compile_model(m_ptr, model.m_ptr, ref c_device[0], 4, ref compiled_model_ptr, + inputs[0], inputs[1], inputs[2], inputs[3])); + } + else if (properties.Count == 3) + { + List inputs = new List(); + foreach (var item in properties) + { + inputs.Add(Marshal.StringToHGlobalAnsi(item.Key)); + inputs.Add(Marshal.StringToHGlobalAnsi(item.Value)); + } + HandleException.handler( + NativeMethods.ov_core_compile_model(m_ptr, model.m_ptr, ref c_device[0], 6, ref compiled_model_ptr, + inputs[0], inputs[1], inputs[2], inputs[3], inputs[4], inputs[5])); + } + else + { + throw new Exception("Only supports parameter quantities of 0, 1, 2, and 3."); + } return new CompiledModel(compiled_model_ptr); } @@ -241,36 +297,28 @@ public CompiledModel compile_model(Model model, string device_name) /// Reads and loads a compiled model from the IR/ONNX/PDPD file to the default OpenVINO device selected by the AUTO plugin. /// /// Path to a model. + /// Optional map of pairs: (property name, property value) relevant only for this load operation. /// /// This can be more efficient than using the Core::read_model + Core::compile_model(model_in_memory_object) flow, /// especially for cases when caching is enabled and a cached model is availab /// /// A compiled model. - public CompiledModel compile_model(string model_path) + public CompiledModel compile_model(string model_path, Dictionary properties = null) { - if (string.IsNullOrEmpty(model_path)) - { - throw new ArgumentNullException(nameof(model_path)); - } - IntPtr compiled_model_ptr = new IntPtr(); - string device_name = "AUTO"; - sbyte[] c_model = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(model_path)); - sbyte[] c_device = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); - HandleException.handler( - NativeMethods.ov_core_compile_model_from_file(m_ptr, ref c_model[0], ref c_device[0], 0, ref compiled_model_ptr)); - return new CompiledModel(compiled_model_ptr); + return compile_model(model_path, "AUTO", properties); } /// /// Reads a model and creates a compiled model from the IR/ONNX/PDPD file. /// /// Path to a model. /// Name of a device to load a model to. + /// Optional map of pairs: (property name, property value) relevant only for this load operation. /// /// This can be more efficient than using the Core::read_model + Core::compile_model(model_in_memory_object) flow, /// especially for cases when caching is enabled and a cached model is availab /// /// A compiled model. - public CompiledModel compile_model(string model_path, string device_name) + public CompiledModel compile_model(string model_path, string device_name, Dictionary properties = null) { if (string.IsNullOrEmpty(model_path)) { @@ -283,8 +331,51 @@ public CompiledModel compile_model(string model_path, string device_name) IntPtr compiled_model_ptr = new IntPtr(); sbyte[] c_model = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(model_path)); sbyte[] c_device = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); - HandleException.handler( + if (properties == null) + { + HandleException.handler( NativeMethods.ov_core_compile_model_from_file(m_ptr, ref c_model[0], ref c_device[0], 0, ref compiled_model_ptr)); + } + else if (properties.Count == 1) + { + List inputs = new List(); + foreach (var item in properties) + { + inputs.Add(Marshal.StringToHGlobalAnsi(item.Key)); + inputs.Add(Marshal.StringToHGlobalAnsi(item.Value)); + } + HandleException.handler( + NativeMethods.ov_core_compile_model_from_file(m_ptr, ref c_model[0], ref c_device[0], 2, ref compiled_model_ptr, + inputs[0], inputs[1])); + } + else if (properties.Count == 2) + { + List inputs = new List(); + foreach (var item in properties) + { + inputs.Add(Marshal.StringToHGlobalAnsi(item.Key)); + inputs.Add(Marshal.StringToHGlobalAnsi(item.Value)); + } + HandleException.handler( + NativeMethods.ov_core_compile_model_from_file(m_ptr, ref c_model[0], ref c_device[0], 4, ref compiled_model_ptr, + inputs[0], inputs[1], inputs[2], inputs[3])); + } + else if (properties.Count == 3) + { + List inputs = new List(); + foreach (var item in properties) + { + inputs.Add(Marshal.StringToHGlobalAnsi(item.Key)); + inputs.Add(Marshal.StringToHGlobalAnsi(item.Value)); + } + HandleException.handler( + NativeMethods.ov_core_compile_model_from_file(m_ptr, ref c_model[0], ref c_device[0], 6, ref compiled_model_ptr, + inputs[0], inputs[1], inputs[2], inputs[3], inputs[4], inputs[5])); + } + else + { + throw new Exception("Only supports parameter quantities of 0, 1, 2, and 3."); + } return new CompiledModel(compiled_model_ptr); } /// @@ -303,6 +394,51 @@ public void set_property(string device_name, KeyValuePair proper NativeMethods.ov_core_set_property(m_ptr, ref c_device[0], key, value)); } + public void set_property(string device_name, Dictionary properties) + { + sbyte[] c_device = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); + + if (properties.Count == 1) + { + List inputs = new List(); + foreach (var item in properties) + { + inputs.Add(Marshal.StringToHGlobalAnsi(item.Key)); + inputs.Add(Marshal.StringToHGlobalAnsi(item.Value)); + } + HandleException.handler( + NativeMethods.ov_core_set_property(m_ptr, ref c_device[0], inputs[0], inputs[1])); + } + else if (properties.Count == 2) + { + List inputs = new List(); + foreach (var item in properties) + { + inputs.Add(Marshal.StringToHGlobalAnsi(item.Key)); + inputs.Add(Marshal.StringToHGlobalAnsi(item.Value)); + } + HandleException.handler( + NativeMethods.ov_core_set_property(m_ptr, ref c_device[0], inputs[0], inputs[1], inputs[2], inputs[3])); + } + else if (properties.Count == 3) + { + List inputs = new List(); + foreach (var item in properties) + { + inputs.Add(Marshal.StringToHGlobalAnsi(item.Key)); + inputs.Add(Marshal.StringToHGlobalAnsi(item.Value)); + } + HandleException.handler( + NativeMethods.ov_core_set_property(m_ptr, ref c_device[0], inputs[0], inputs[1], + inputs[2], inputs[3], inputs[4], inputs[5])); + } + else + { + throw new Exception("Only supports parameter quantities of 1, 2, and 3."); + } + + } + /// /// Gets properties related to device behaviour. /// The method extracts information that can be set via the set_property method. @@ -349,6 +485,16 @@ public List get_available_devices() NativeMethods.ov_available_devices_free(devices_ptr); return devices; } + + public CompiledModel import_model(string model_path, string device_name = "AUTO") + { + IntPtr value = IntPtr.Zero; + byte[] data = Ov.content_from_file(model_path); + sbyte[] c_device = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(device_name)); + HandleException.handler( + NativeMethods.ov_core_import_model(m_ptr, ref data[0], (ulong)data.Length, ref c_device[0], ref value)); + return new CompiledModel(value); + } } } From a6e3a0c14d5759fe451a8426cce84b3baf21e158 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:30:33 +0800 Subject: [PATCH 28/40] Add default constructor. --- modules/csharp_api/csharp/core/compiled_model.cs | 8 +++++++- modules/csharp_api/csharp/core/model.cs | 9 +++++++-- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/modules/csharp_api/csharp/core/compiled_model.cs b/modules/csharp_api/csharp/core/compiled_model.cs index 43bf5bc81..34cdf52c1 100644 --- a/modules/csharp_api/csharp/core/compiled_model.cs +++ b/modules/csharp_api/csharp/core/compiled_model.cs @@ -20,7 +20,7 @@ public class CompiledModel : IDisposable /// /// [private]CompiledModel class pointer. /// - private IntPtr m_ptr; + private IntPtr m_ptr = IntPtr.Zero; /// /// [private]CompiledModel class pointer. /// @@ -29,6 +29,12 @@ public IntPtr Ptr get { return m_ptr; } set { m_ptr = value; } } + /// + /// Default Constructor + /// + public CompiledModel() + { + } /// /// Constructs CompiledModel from the initialized ptr. diff --git a/modules/csharp_api/csharp/core/model.cs b/modules/csharp_api/csharp/core/model.cs index ffc824804..cd540bf74 100644 --- a/modules/csharp_api/csharp/core/model.cs +++ b/modules/csharp_api/csharp/core/model.cs @@ -29,13 +29,18 @@ public IntPtr Ptr /// /// Default Constructor /// + public Model() + { + } + /// + /// Constructs Model from the initialized ptr. + /// /// Model pointer. public Model(IntPtr ptr) { if (ptr == IntPtr.Zero) { - System.Diagnostics.Debug.WriteLine("Model init error : ptr is null!"); - return; + throw new OVException(ExceptionStatus.GENERAL_ERROR, "The ptr is null!"); } Ptr = ptr; } From eb84f0b37c4d338a69c748f651d3d0352aa5fb56 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:31:16 +0800 Subject: [PATCH 29/40] Add parameter constructor. --- modules/csharp_api/csharp/core/tensor.cs | 82 +++++++++++++++++++++--- 1 file changed, 74 insertions(+), 8 deletions(-) diff --git a/modules/csharp_api/csharp/core/tensor.cs b/modules/csharp_api/csharp/core/tensor.cs index 9d45c54b0..586abd929 100644 --- a/modules/csharp_api/csharp/core/tensor.cs +++ b/modules/csharp_api/csharp/core/tensor.cs @@ -26,6 +26,10 @@ public class Tensor : IDisposable /// public IntPtr Ptr { get { return m_ptr; } set { m_ptr = value; } } + /// + /// Default Constructor + /// + public Tensor() { } /// /// Constructs Tensor from the initialized pointer. /// @@ -46,14 +50,70 @@ public Tensor(IntPtr ptr) /// Tensor element type /// Tensor shape /// Image data - public Tensor(element.Type type, Shape shape, byte[] mat) + public Tensor(element.Type type, Shape shape, byte[] mat) + { + HandleException.handler( + NativeMethods.ov_tensor_create_from_host_ptr + ((uint)type.get_type(), shape.shape, Marshal.UnsafeAddrOfPinnedArrayElement(mat, 0), ref m_ptr)); + } + public Tensor(OvType type, Shape shape, byte[] mat) + :this(new element.Type(type.get_type()), shape, mat) + { + } + /// + /// Constructs Tensor using element type ,shape and input data. + /// + /// Tensor shape + /// Input data + public Tensor(Shape shape, float[] mat) + { + HandleException.handler( + NativeMethods.ov_tensor_create_from_host_ptr + ((uint)ElementType.F32, shape.shape, Marshal.UnsafeAddrOfPinnedArrayElement(mat, 0), ref m_ptr)); + } + /// + /// Constructs Tensor using element type ,shape and input data. + /// + /// Tensor shape + /// Input data + public Tensor(Shape shape, double[] mat) + { + HandleException.handler( + NativeMethods.ov_tensor_create_from_host_ptr + ((uint)ElementType.F64, shape.shape, Marshal.UnsafeAddrOfPinnedArrayElement(mat, 0), ref m_ptr)); + } + /// + /// Constructs Tensor using element type ,shape and input data. + /// + /// Tensor shape + /// Input data + public Tensor(Shape shape, int[] mat) + { + HandleException.handler( + NativeMethods.ov_tensor_create_from_host_ptr + ((uint)ElementType.I32, shape.shape, Marshal.UnsafeAddrOfPinnedArrayElement(mat, 0), ref m_ptr)); + } + /// + /// Constructs Tensor using element type ,shape and input data. + /// + /// Tensor shape + /// Input data + public Tensor(Shape shape, short[] mat) { - int l =mat.Length; - IntPtr data = Marshal.AllocHGlobal(l); - Marshal.Copy(mat, 0, data, (int)mat.Length); HandleException.handler( NativeMethods.ov_tensor_create_from_host_ptr - ((uint)type.get_type(), shape.shape, data, ref m_ptr)); + ((uint)ElementType.I16, shape.shape, Marshal.UnsafeAddrOfPinnedArrayElement(mat, 0), ref m_ptr)); + } + /// + /// Constructs Tensor using element type ,shape and input data. + /// + /// Tensor shape + /// Input data + public Tensor(Shape shape, long[] mat) + { + HandleException.handler( + NativeMethods.ov_tensor_create_from_host_ptr + ((uint)ElementType.I64, shape.shape, Marshal.UnsafeAddrOfPinnedArrayElement(mat, 0), ref m_ptr)); } /// /// Constructs Tensor using element type and shape. Wraps allocated host memory. @@ -68,7 +128,10 @@ public Tensor(element.Type type, Shape shape, IntPtr host_ptr) NativeMethods.ov_tensor_create_from_host_ptr ((uint)type.get_type(), shape.shape, host_ptr, ref m_ptr)); } - + public Tensor(OvType type, Shape shape, IntPtr host_ptr) + :this(new element.Type(type.get_type()), shape, host_ptr) + { + } /// /// Constructs Tensor using element type and shape. Allocate internal host storage using default allocator /// @@ -80,7 +143,10 @@ public Tensor(element.Type type, Shape shape) NativeMethods.ov_tensor_create ((uint)type.get_type(), shape.shape, ref m_ptr)); } - + public Tensor(OvType type, Shape shape) + : this(new element.Type(type.get_type()), shape) + { + } /// /// Default copy constructor /// @@ -212,7 +278,7 @@ public void set_data(T[] input_data) string t = typeof(T).ToString(); if (t == "System.Byte") { - float[] data = (float[])Convert.ChangeType(input_data, typeof(float[])); + byte[] data = (byte[])Convert.ChangeType(input_data, typeof(byte[])); Marshal.Copy(data, 0, data_ptr, length); } else if (t == "System.Int32") From 1a164077482680d604494a6d6001e7a48f2f8d95 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:32:06 +0800 Subject: [PATCH 30/40] Add set_tensor(Input port, Tensor tensor) method. --- .../csharp_api/csharp/core/infer_request.cs | 85 +++++++++++++++++-- 1 file changed, 80 insertions(+), 5 deletions(-) diff --git a/modules/csharp_api/csharp/core/infer_request.cs b/modules/csharp_api/csharp/core/infer_request.cs index 9ad8849b2..66058a3d4 100644 --- a/modules/csharp_api/csharp/core/infer_request.cs +++ b/modules/csharp_api/csharp/core/infer_request.cs @@ -110,6 +110,45 @@ public void set_tensor(Node node, Tensor tensor) /// /// Reference to a tensor. The element_type and shape of a tensor must match /// the model's input/output element_type and size. + public void set_tensor(Input port, Tensor tensor) + { + if (port.get_node().node_type == Node.NodeType.e_const) + { + HandleException.handler( + NativeMethods.ov_infer_request_set_tensor_by_const_port( + m_ptr, port.get_node().Ptr, tensor.Ptr)); + } + else + { + HandleException.handler( + NativeMethods.ov_infer_request_set_tensor_by_port( + m_ptr, port.get_node().Ptr, tensor.Ptr)); + } + } + /// + /// Sets an input/output tensor to infer. + /// + /// + /// Port of the input or output tensor. Use the following methods to get the ports: + /// - Model.input() + /// - Model.const_input() + /// - Model.inputs() + /// - Model.const_inputs() + /// - Model.output() + /// - Model.const_output() + /// - Model.outputs() + /// - Model.const_outputs() + /// - CompiledModel.input() + /// - CompiledModel.const_input() + /// - CompiledModel.inputs() + /// - CompiledModel.const_inputs() + /// - CompiledModel.output() + /// - CompiledModel.const_output() + /// - CompiledModel.outputs() + /// - CompiledModel.const_outputs() + /// + /// Reference to a tensor. The element_type and shape of a tensor must match + /// the model's input/output element_type and size. public void set_tensor(Output port, Tensor tensor) { if (port.get_node().node_type == Node.NodeType.e_const) @@ -243,6 +282,29 @@ public Tensor get_tensor(Output port) } return new Tensor(tensor_ptr); } + /// + /// Gets an input/output tensor for inference. + /// + /// If the tensor with the specified @p port is not found, an exception is thrown. + /// Port of the tensor to get. + /// Tensor for the port @p port. + public Tensor get_tensor(Input port) + { + IntPtr tensor_ptr = IntPtr.Zero; + if (port.get_node().node_type == Node.NodeType.e_const) + { + HandleException.handler( + NativeMethods.ov_infer_request_get_tensor_by_const_port( + m_ptr, port.get_node().Ptr, ref tensor_ptr)); + } + else + { + HandleException.handler( + NativeMethods.ov_infer_request_get_tensor_by_port( + m_ptr, port.get_node().Ptr, ref tensor_ptr)); + } + return new Tensor(tensor_ptr); + } /// /// Gets an input tensor for inference. @@ -362,18 +424,31 @@ public bool wait_for(long timeout) /// List of profiling information for operations in a model. public List get_profiling_info() { + int l = Marshal.SizeOf(typeof(ov_profiling_info_list)); + IntPtr ptr = Marshal.AllocHGlobal(l); ov_profiling_info_list profiling_info_list = new ov_profiling_info_list(); + + profiling_info_list.size = 0; + profiling_info_list.profiling_infos = IntPtr.Zero; + + Marshal.StructureToPtr(profiling_info_list, ptr, false); + HandleException.handler( - NativeMethods.ov_infer_request_get_profiling_info(m_ptr, ref profiling_info_list)); - IntPtr[] profiling_infos_ptr = new IntPtr[profiling_info_list.size]; - Marshal.Copy(profiling_info_list.profiling_infos, profiling_infos_ptr, 0, (int)profiling_info_list.size); + NativeMethods.ov_infer_request_get_profiling_info(m_ptr, ptr)); + + var tempp = Marshal.PtrToStructure(ptr, typeof(ov_profiling_info_list)); + profiling_info_list = (ov_profiling_info_list)tempp; + l = Marshal.SizeOf(typeof(Ov.ProfilingInfo)); + List profiling_infos = new List(); for (int i = 0; i < (int)profiling_info_list.size; ++i) { - var temp = Marshal.PtrToStructure(profiling_infos_ptr[i], typeof(Ov.ProfilingInfo)); - Ov.ProfilingInfo profiling_info = (Ov.ProfilingInfo)temp; + var tempt = Marshal.PtrToStructure(profiling_info_list.profiling_infos, typeof(Ov.ProfilingInfo)); + Ov.ProfilingInfo profiling_info = (Ov.ProfilingInfo)tempt; profiling_infos.Add(profiling_info); } + HandleException.handler( + NativeMethods.ov_profiling_info_list_free(ptr)); return profiling_infos; } } From 5629694a788fcf6fffa9b99e0f5ba29afa2d012e Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:32:35 +0800 Subject: [PATCH 31/40] Add exception handling. --- modules/csharp_api/csharp/core/layout.cs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/modules/csharp_api/csharp/core/layout.cs b/modules/csharp_api/csharp/core/layout.cs index f05a28fb9..78166b66f 100644 --- a/modules/csharp_api/csharp/core/layout.cs +++ b/modules/csharp_api/csharp/core/layout.cs @@ -58,11 +58,9 @@ public IntPtr Ptr public Layout(string layout_desc) { sbyte[] c_layout_desc = (sbyte[])((Array)System.Text.Encoding.Default.GetBytes(layout_desc)); - ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_layout_create(ref c_layout_desc[0], ref m_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("Layout init error : {0}!", status.ToString()); - } + HandleException.handler( + NativeMethods.ov_layout_create(ref c_layout_desc[0], ref m_ptr)); + } /// From e2c82a5b4b6eb3afd38e5da55a8d41c143510451 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:33:21 +0800 Subject: [PATCH 32/40] Add default param. --- modules/csharp_api/csharp/core/node_output.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/csharp_api/csharp/core/node_output.cs b/modules/csharp_api/csharp/core/node_output.cs index b19251174..2db547190 100644 --- a/modules/csharp_api/csharp/core/node_output.cs +++ b/modules/csharp_api/csharp/core/node_output.cs @@ -24,7 +24,7 @@ public class Output : IDisposable /// /// The node for the output handle. /// The index of the output. - public Output(Node node, ulong index) + public Output(Node node, ulong index=0) { m_node = node; m_index = index; From 7fd8eabd493a8b537ca469aac87189b9a305d15c Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:33:58 +0800 Subject: [PATCH 33/40] Add an indefinite parameter construction method. --- modules/csharp_api/csharp/core/shape.cs | 34 +++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/modules/csharp_api/csharp/core/shape.cs b/modules/csharp_api/csharp/core/shape.cs index 9790fda80..bee3b3c73 100644 --- a/modules/csharp_api/csharp/core/shape.cs +++ b/modules/csharp_api/csharp/core/shape.cs @@ -55,7 +55,6 @@ public Shape(IntPtr ptr) /// Initialized list public Shape(List axis_lengths) { - for (int i = 0; i < axis_lengths.Count; ++i) { this.Add(axis_lengths[i]); @@ -73,7 +72,6 @@ public Shape(List axis_lengths) /// Initialized array public Shape(long[] axis_lengths) { - for (int i = 0; i < axis_lengths.Length; ++i) { this.Add(axis_lengths[i]); @@ -86,6 +84,25 @@ public Shape(long[] axis_lengths) shape = (ov_shape)temp; } /// + /// Constructs Shape from the initialized array. + /// + /// Any length parameter + public Shape(params int[] data) + { + long[] axis_lengths = new long[data.Length]; + for (int i = 0; i < data.Length; ++i) + { + this.Add(data[i]); + axis_lengths[i] = data[i]; + } + int l = Marshal.SizeOf(typeof(ov_shape)); + m_ptr = Marshal.AllocHGlobal(l); + HandleException.handler( + NativeMethods.ov_shape_create((long)this.Count, ref axis_lengths[0], m_ptr)); + var temp = Marshal.PtrToStructure(m_ptr, typeof(ov_shape)); + shape = (ov_shape)temp; + } + /// /// Shape's destructor /// ~Shape() @@ -123,5 +140,18 @@ public string to_string() s += "}"; return s; } + /// + /// Obtain the product of all shape parameters + /// + /// The product of all shape parameters + public long data_size() + { + long d = 1; + foreach (var i in this) + { + d *= i; + } + return d; + } } } From b40f64737855b0c9a664e09ba4e4089f77e18a09 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:34:23 +0800 Subject: [PATCH 34/40] Fix method error. --- modules/csharp_api/csharp/core/partial_shape.cs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/modules/csharp_api/csharp/core/partial_shape.cs b/modules/csharp_api/csharp/core/partial_shape.cs index 29df9327d..1fe0accd4 100644 --- a/modules/csharp_api/csharp/core/partial_shape.cs +++ b/modules/csharp_api/csharp/core/partial_shape.cs @@ -82,6 +82,7 @@ public PartialShape(Dimension rank, List dimensions) : this(rank, dim public PartialShape(long rank, long[] dimensions) { this.rank = new Dimension(rank); + this.dimensions = new Dimension[dimensions.Length]; for (int i = 0; i < dimensions.Length; ++i) { this.dimensions[i] = new Dimension(dimensions[i]); @@ -169,7 +170,8 @@ public Dimension[] get_dimensions() { /// The shape. public Shape to_shape() { - IntPtr shape_ptr = IntPtr.Zero; + int l = Marshal.SizeOf(typeof(Ov.ov_shape)); + IntPtr shape_ptr = Marshal.AllocHGlobal(l); HandleException.handler( NativeMethods.ov_partial_shape_to_shape(get_partial_shape(), shape_ptr)); return new Shape(shape_ptr); From b242267adce85cdfa8661bfbec239513cc9bd64e Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:35:07 +0800 Subject: [PATCH 35/40] Add indefinite parameter method interface. --- .../csharp/native_methods/ov_core.cs | 99 ++++++++++++++++--- 1 file changed, 86 insertions(+), 13 deletions(-) diff --git a/modules/csharp_api/csharp/native_methods/ov_core.cs b/modules/csharp_api/csharp/native_methods/ov_core.cs index 6a67fe411..145c42bb7 100644 --- a/modules/csharp_api/csharp/native_methods/ov_core.cs +++ b/modules/csharp_api/csharp/native_methods/ov_core.cs @@ -119,6 +119,14 @@ public extern static ExceptionStatus ov_core_read_model( ref sbyte bin_path, ref IntPtr model); + [DllImport(dll_extern, EntryPoint = "ov_core_read_model_from_memory_buffer", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_read_model_from_memory_buffer(IntPtr core, + ref byte model_path, + ulong str_size, + IntPtr weights, + ref IntPtr model); + /// /// Reads models from IR / ONNX / PDPD / TF / TFLite formats. /// @@ -138,7 +146,7 @@ public extern static ExceptionStatus ov_core_read_model( [DllImport(dll_extern, EntryPoint = "ov_core_read_model_from_memory", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static ExceptionStatus ov_core_read_model_from_memory(IntPtr core, - ref sbyte model_path, + ref byte model_path, IntPtr weights, ref IntPtr model); @@ -163,6 +171,38 @@ public extern static ExceptionStatus ov_core_compile_model( ulong property_args_size, ref IntPtr compiled_model); + [DllImport(dll_extern, EntryPoint = "ov_core_compile_model", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_compile_model( + IntPtr core, + IntPtr model, + ref sbyte device_name, + ulong property_args_size, + ref IntPtr compiled_model, + IntPtr varg1, IntPtr varg2); + [DllImport(dll_extern, EntryPoint = "ov_core_compile_model", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_compile_model( + IntPtr core, + IntPtr model, + ref sbyte device_name, + ulong property_args_size, + ref IntPtr compiled_model, + IntPtr varg1, IntPtr varg2, + IntPtr varg3, IntPtr varg4); + + [DllImport(dll_extern, EntryPoint = "ov_core_compile_model", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_compile_model( + IntPtr core, + IntPtr model, + ref sbyte device_name, + ulong property_args_size, + ref IntPtr compiled_model, + IntPtr varg1, IntPtr varg2, + IntPtr varg3, IntPtr varg4, + IntPtr varg5, IntPtr varg6); + /// /// Reads a model and creates a compiled model from the IR/ONNX/PDPD file. /// This can be more efficient than using the ov_core_read_model_from_XXX + ov_core_compile_model flow, @@ -183,6 +223,37 @@ public extern static ExceptionStatus ov_core_compile_model_from_file( ref sbyte device_name, ulong property_args_size, ref IntPtr compiled_model); + [DllImport(dll_extern, EntryPoint = "ov_core_compile_model_from_file", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_compile_model_from_file( + IntPtr core, + ref sbyte model_path, + ref sbyte device_name, + ulong property_args_size, + ref IntPtr compiled_model, + IntPtr varg1, IntPtr varg2); + [DllImport(dll_extern, EntryPoint = "ov_core_compile_model_from_file", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_compile_model_from_file( + IntPtr core, + ref sbyte model_path, + ref sbyte device_name, + ulong property_args_size, + ref IntPtr compiled_model, + IntPtr varg1, IntPtr varg2, + IntPtr varg3, IntPtr varg4); + [DllImport(dll_extern, EntryPoint = "ov_core_compile_model_from_file", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_compile_model_from_file( + IntPtr core, + ref sbyte model_path, + ref sbyte device_name, + ulong property_args_size, + ref IntPtr compiled_model, + IntPtr varg1, IntPtr varg2, + IntPtr varg3, IntPtr varg4, + IntPtr varg5, IntPtr varg6); + /// /// Sets properties for a device, acceptable keys can be found in ov_property_key_xxx. @@ -192,13 +263,16 @@ public extern static ExceptionStatus ov_core_compile_model_from_file( /// Status code of the operation: OK(0) for success. [DllImport(dll_extern, EntryPoint = "ov_core_set_property", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] - public extern static ExceptionStatus ov_core_set_property( - IntPtr core, - ref sbyte device_name); + public static extern ExceptionStatus ov_core_set_property(IntPtr core, + ref sbyte device_name, IntPtr varg1, IntPtr varg2, IntPtr varg3, IntPtr varg4); [DllImport(dll_extern, EntryPoint = "ov_core_set_property", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] - public static extern ExceptionStatus ov_core_set_property(IntPtr core, + public static extern ExceptionStatus ov_core_set_property(IntPtr core, ref sbyte device_name, IntPtr varg1, IntPtr varg2); + [DllImport(dll_extern, EntryPoint = "ov_core_set_property", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public static extern ExceptionStatus ov_core_set_property(IntPtr core, + ref sbyte device_name, IntPtr varg1, IntPtr varg2, IntPtr varg3, IntPtr varg4, IntPtr varg5, IntPtr varg6); /// /// Gets properties related to device behaviour. /// The method extracts information that can be set via the set_property method. @@ -250,7 +324,7 @@ public extern static ExceptionStatus ov_core_get_available_devices( CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static ExceptionStatus ov_core_import_model( IntPtr core, - ref sbyte content, + ref byte content, ulong content_size, ref sbyte device_name, ref IntPtr compiled_model); @@ -323,13 +397,12 @@ public extern static ExceptionStatus ov_core_compile_model_with_context( /// /// A pointer to the ov_core_t instance. /// Name of a device to get a default shared context from. - /// A pointer to the referenced remote context. + /// A pointer to the referenced remote context. /// Status code of the operation: OK(0) for success. - [DllImport(dll_extern, EntryPoint = "ov_core_compile_model_with_context", - CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] - public extern static ExceptionStatus ov_core_compile_model_with_context( - IntPtr core, - ref sbyte device_name, - ref IntPtr context); + [DllImport(dll_extern, EntryPoint = "ov_core_get_default_context", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_core_get_default_context(IntPtr core, ref sbyte device_name, ref IntPtr context); + } + } From e0a86504bb025dce8a0fc64697df4be05523772c Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:35:46 +0800 Subject: [PATCH 36/40] Fix error. --- modules/csharp_api/csharp/native_methods/ov_infer_request.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/csharp_api/csharp/native_methods/ov_infer_request.cs b/modules/csharp_api/csharp/native_methods/ov_infer_request.cs index c8c0de851..1c125c68c 100644 --- a/modules/csharp_api/csharp/native_methods/ov_infer_request.cs +++ b/modules/csharp_api/csharp/native_methods/ov_infer_request.cs @@ -260,7 +260,7 @@ public extern static ExceptionStatus ov_infer_request_infer( /// Status code of the operation: OK(0) for success. [DllImport(dll_extern, EntryPoint = "ov_infer_request_get_profiling_info", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] - public extern static ExceptionStatus ov_infer_request_get_profiling_info(IntPtr infer_request, ref Ov.ov_profiling_info_list profiling_infos); + public extern static ExceptionStatus ov_infer_request_get_profiling_info(IntPtr infer_request, IntPtr profiling_infos); /// /// Release the memory allocated by ov_profiling_info_list_t. From 113490051194d639f1dd751cc9cd53921082171f Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:36:21 +0800 Subject: [PATCH 37/40] Fix interface name error. --- .../native_methods/ov_prepostprocess.cs | 29 ++++++++++++++++--- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/modules/csharp_api/csharp/native_methods/ov_prepostprocess.cs b/modules/csharp_api/csharp/native_methods/ov_prepostprocess.cs index b783084b0..a44ace04a 100644 --- a/modules/csharp_api/csharp/native_methods/ov_prepostprocess.cs +++ b/modules/csharp_api/csharp/native_methods/ov_prepostprocess.cs @@ -63,7 +63,7 @@ public extern static ExceptionStatus ov_preprocess_prepostprocessor_get_input_in /// The order of input. /// A pointer to the ov_preprocess_input_info_t. /// Status code of the operation: OK(0) for success. - [DllImport(dll_extern, EntryPoint = "ov_preprocess_prepostprocessor_free", + [DllImport(dll_extern, EntryPoint = "ov_preprocess_prepostprocessor_get_input_info_by_index", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static ExceptionStatus ov_preprocess_prepostprocessor_get_input_info_by_index( IntPtr preprocess, @@ -74,7 +74,7 @@ public extern static ExceptionStatus ov_preprocess_prepostprocessor_get_input_in /// Release the memory allocated by ov_preprocess_input_info_t. /// /// A pointer to the ov_preprocess_input_info_t to free memory. - [DllImport(dll_extern, EntryPoint = "ov_preprocess_prepostprocessor_free", + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_info_free", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] public extern static void ov_preprocess_input_info_free( IntPtr preprocess_input_info); @@ -242,8 +242,29 @@ public extern static ExceptionStatus ov_preprocess_input_tensor_info_set_color_f public extern static ExceptionStatus ov_preprocess_input_tensor_info_set_color_format_with_subname( IntPtr preprocess_input_tensor_info, uint color_format, - ulong sub_names_size); - + ulong sub_names_size, + IntPtr k1); + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_tensor_info_set_color_format_with_subname", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_tensor_info_set_color_format_with_subname( + IntPtr preprocess_input_tensor_info, + uint color_format, + ulong sub_names_size, + IntPtr k1, IntPtr k2); + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_tensor_info_set_color_format_with_subname", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_tensor_info_set_color_format_with_subname( + IntPtr preprocess_input_tensor_info, + uint color_format, + ulong sub_names_size, + IntPtr k1, IntPtr k2, IntPtr k3); + [DllImport(dll_extern, EntryPoint = "ov_preprocess_input_tensor_info_set_color_format_with_subname", + CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] + public extern static ExceptionStatus ov_preprocess_input_tensor_info_set_color_format_with_subname( + IntPtr preprocess_input_tensor_info, + uint color_format, + ulong sub_names_size, + IntPtr k1, IntPtr k2, IntPtr k3, IntPtr k4); /// /// Set ov_preprocess_input_tensor_info_t spatial_static_shape. From 545b5d23819c4b10b55d36063e306ebecf647831 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:37:03 +0800 Subject: [PATCH 38/40] Fix error. --- .../csharp/preprocess/input_info.cs | 3 +- .../csharp/preprocess/input_tensor_info.cs | 43 +++++++---- .../csharp/preprocess/output_tensor_info.cs | 4 +- .../csharp/preprocess/preprocess_steps.cs | 76 ++++++------------- 4 files changed, 53 insertions(+), 73 deletions(-) diff --git a/modules/csharp_api/csharp/preprocess/input_info.cs b/modules/csharp_api/csharp/preprocess/input_info.cs index 421e999a4..0ba19993e 100644 --- a/modules/csharp_api/csharp/preprocess/input_info.cs +++ b/modules/csharp_api/csharp/preprocess/input_info.cs @@ -33,8 +33,7 @@ public InputInfo(IntPtr ptr) { if (ptr == IntPtr.Zero) { - System.Diagnostics.Debug.WriteLine("InputInfo init error : ptr is null!"); - return; + throw new OVException(ExceptionStatus.GENERAL_ERROR, "The ptr is null!"); } this.m_ptr = ptr; } diff --git a/modules/csharp_api/csharp/preprocess/input_tensor_info.cs b/modules/csharp_api/csharp/preprocess/input_tensor_info.cs index 0d3a9bcba..6246f9aef 100644 --- a/modules/csharp_api/csharp/preprocess/input_tensor_info.cs +++ b/modules/csharp_api/csharp/preprocess/input_tensor_info.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Runtime.InteropServices; using System.Text; using System.Threading.Tasks; @@ -66,22 +67,34 @@ public void Dispose() /// /// Color format of input image. /// Reference to 'this' to allow chaining with other calls in a builder-like manner. - public InputTensorInfo set_color_format(ColorFormat format) + public InputTensorInfo set_color_format(ColorFormat format, params string[] properties) { - HandleException.handler( - NativeMethods.ov_preprocess_input_tensor_info_set_color_format(m_ptr, (uint)format)); - return this; - } - /// - /// - /// - /// - /// - /// Reference to 'this' to allow chaining with other calls in a builder-like manner. - public InputTensorInfo set_color_format(ColorFormat format, ulong sub_names_size) - { - HandleException.handler( - NativeMethods.ov_preprocess_input_tensor_info_set_color_format_with_subname(m_ptr, (uint)format, sub_names_size)); + IntPtr[] p = new IntPtr[properties.Length]; + for (int i = 0; i < properties.Length; ++i) + { + p[i] = Marshal.StringToHGlobalAnsi(properties[i]); + } + switch (p.Length) + { + case 0: + HandleException.handler(NativeMethods.ov_preprocess_input_tensor_info_set_color_format(m_ptr, (uint)format)); + break; + case 1: + NativeMethods.ov_preprocess_input_tensor_info_set_color_format_with_subname(m_ptr, (uint)format, (ulong)properties.Length, p[0]); + break; + case 2: + NativeMethods.ov_preprocess_input_tensor_info_set_color_format_with_subname(m_ptr, (uint)format, (ulong)properties.Length, p[0], p[1]); + break; + case 3: + NativeMethods.ov_preprocess_input_tensor_info_set_color_format_with_subname(m_ptr, (uint)format, (ulong)properties.Length, p[0], p[1], p[2]); + break; + case 4: + NativeMethods.ov_preprocess_input_tensor_info_set_color_format_with_subname(m_ptr, (uint)format, (ulong)properties.Length, p[0], p[1], p[2], p[3]); + break; + default: + throw new ArgumentOutOfRangeException("Properties count > 4 not supported"); + + } return this; } diff --git a/modules/csharp_api/csharp/preprocess/output_tensor_info.cs b/modules/csharp_api/csharp/preprocess/output_tensor_info.cs index 0366949ce..c49a5e10f 100644 --- a/modules/csharp_api/csharp/preprocess/output_tensor_info.cs +++ b/modules/csharp_api/csharp/preprocess/output_tensor_info.cs @@ -58,10 +58,10 @@ public void Dispose() /// /// Element type for user's output tensor. /// Reference to 'this' to allow chaining with other calls in a builder-like manner. - public OutputTensorInfo set_element_type(ElementType type) + public OutputTensorInfo set_element_type(OvType type) { HandleException.handler( - NativeMethods.ov_preprocess_output_set_element_type(m_ptr, (uint)type)); + NativeMethods.ov_preprocess_output_set_element_type(m_ptr, (uint)type.get_type())); return this; } } diff --git a/modules/csharp_api/csharp/preprocess/preprocess_steps.cs b/modules/csharp_api/csharp/preprocess/preprocess_steps.cs index 3c90e1fee..e820ee0e1 100644 --- a/modules/csharp_api/csharp/preprocess/preprocess_steps.cs +++ b/modules/csharp_api/csharp/preprocess/preprocess_steps.cs @@ -61,12 +61,9 @@ public void Dispose() /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public PreProcessSteps resize(ResizeAlgorithm resize) { - ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_preprocess_steps_resize( - m_ptr, (int)resize); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PreProcessSteps resize error : {0}!", status.ToString()); - } + HandleException.handler(NativeMethods.ov_preprocess_preprocess_steps_resize( + m_ptr, (int)resize)); + return this; } @@ -78,12 +75,9 @@ public PreProcessSteps resize(ResizeAlgorithm resize) /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public PreProcessSteps scale(float value) { - ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_preprocess_steps_scale( - m_ptr, value); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PreProcessSteps resize error : {0}!", status.ToString()); - } + HandleException.handler(NativeMethods.ov_preprocess_preprocess_steps_scale( + m_ptr, value)); + return this; } @@ -94,12 +88,8 @@ public PreProcessSteps scale(float value) /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public PreProcessSteps mean(float value) { - ExceptionStatus status = (ExceptionStatus)NativeMethods.ov_preprocess_preprocess_steps_mean( - m_ptr, value); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PreProcessSteps mean error : {0}!", status.ToString()); - } + HandleException.handler(NativeMethods.ov_preprocess_preprocess_steps_mean( + m_ptr, value)); return this; } @@ -115,12 +105,8 @@ public PreProcessSteps mean(float value) /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public PreProcessSteps crop(int[] begin, int[] end) { - ExceptionStatus status = NativeMethods.ov_preprocess_preprocess_steps_crop( - m_ptr, ref begin[0], begin.Length, ref end[0], end.Length); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PreProcessSteps crop error : {0}!", status.ToString()); - } + HandleException.handler(NativeMethods.ov_preprocess_preprocess_steps_crop( + m_ptr, ref begin[0], begin.Length, ref end[0], end.Length)); return this; } /// @@ -136,12 +122,8 @@ public PreProcessSteps crop(int[] begin, int[] end) /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public PreProcessSteps crop(List begin, List end) { - ExceptionStatus status = NativeMethods.ov_preprocess_preprocess_steps_crop( - m_ptr, ref begin.ToArray()[0], begin.Count, ref end.ToArray()[0], end.Count); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PreProcessSteps crop error : {0}!", status.ToString()); - } + HandleException.handler(NativeMethods.ov_preprocess_preprocess_steps_crop( + m_ptr, ref begin.ToArray()[0], begin.Count, ref end.ToArray()[0], end.Count)); return this; } @@ -166,12 +148,9 @@ public PreProcessSteps crop(List begin, List end) /// public PreProcessSteps convert_layout(Layout layout) { - ExceptionStatus status = NativeMethods.ov_preprocess_preprocess_steps_convert_layout( - m_ptr, layout.Ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PreProcessSteps convert_layout error : {0}!", status.ToString()); - } + HandleException.handler(NativeMethods.ov_preprocess_preprocess_steps_convert_layout( + m_ptr, layout.Ptr)); + return this; } @@ -193,12 +172,8 @@ public PreProcessSteps convert_layout(Layout layout) /// public PreProcessSteps reverse_channels() { - ExceptionStatus status = NativeMethods.ov_preprocess_preprocess_steps_reverse_channels( - m_ptr); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PreProcessSteps reverse_channels error : {0}!", status.ToString()); - } + HandleException.handler(NativeMethods.ov_preprocess_preprocess_steps_reverse_channels( + m_ptr)); return this; } @@ -210,12 +185,8 @@ public PreProcessSteps reverse_channels() /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public PreProcessSteps convert_color(ColorFormat format) { - ExceptionStatus status = NativeMethods.ov_preprocess_preprocess_steps_convert_color( - m_ptr, (uint)format); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PreProcessSteps convert_element_type error : {0}!", status.ToString()); - } + HandleException.handler(NativeMethods.ov_preprocess_preprocess_steps_convert_color( + m_ptr, (uint)format)); return this; } @@ -226,12 +197,9 @@ public PreProcessSteps convert_color(ColorFormat format) /// Reference to 'this' to allow chaining with other calls in a builder-like manner. public PreProcessSteps convert_element_type(OvType type) { - ExceptionStatus status = NativeMethods.ov_preprocess_preprocess_steps_convert_element_type( - m_ptr, (uint)type.get_type()); - if (status != 0) - { - System.Diagnostics.Debug.WriteLine("PreProcessSteps convert_element_type error : {0}!", status.ToString()); - } + HandleException.handler(NativeMethods.ov_preprocess_preprocess_steps_convert_element_type( + m_ptr, (uint)type.get_type())); + return this; } } From d8cd17382d935765ed7d8e56e796209ed0f30b71 Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:49:15 +0800 Subject: [PATCH 39/40] Add unit tests. --- modules/csharp_api/csharp/CSharpAPI.sln | 6 + .../tests/csharp_api_unit_tests/Usings.cs | 1 + .../tests/csharp_api_unit_tests/base_test.cs | 92 +++ .../core/CompiledModelTests.cs | 373 ++++++++++++ .../csharp_api_unit_tests/core/CoreTests.cs | 236 ++++++++ .../core/InferRequestTests.cs | 519 +++++++++++++++++ .../csharp_api_unit_tests/core/InputTests.cs | 115 ++++ .../csharp_api_unit_tests/core/ModelTests.cs | 544 ++++++++++++++++++ .../csharp_api_unit_tests/core/NodeTests.cs | 84 +++ .../csharp_api_unit_tests/core/OutputTests.cs | 114 ++++ .../core/PartialShapeTests.cs | 153 +++++ .../csharp_api_unit_tests/core/ShapeTests.cs | 67 +++ .../csharp_api_unit_tests/core/TensorTests.cs | 258 +++++++++ .../csharp_api_unit_tests.csproj | 23 + .../tests/csharp_api_unit_tests/ov/OvTests.cs | 28 + .../preprocess/InputInfoTests.cs | 82 +++ .../preprocess/InputModelInfoTests.cs | 45 ++ .../preprocess/InputTensorInfoTests.cs | 150 +++++ .../preprocess/OutputInfoTests.cs | 44 ++ .../preprocess/OutputTensorInfoTests.cs | 42 ++ .../preprocess/PrePostProcessorTests.cs | 132 +++++ .../preprocess/PreProcessStepsTests.cs | 231 ++++++++ 22 files changed, 3339 insertions(+) create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/Usings.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/base_test.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/core/CompiledModelTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/core/CoreTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/core/InferRequestTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/core/InputTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/core/ModelTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/core/NodeTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/core/OutputTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/core/PartialShapeTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/core/ShapeTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/core/TensorTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/csharp_api_unit_tests.csproj create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/ov/OvTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/preprocess/InputInfoTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/preprocess/InputModelInfoTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/preprocess/InputTensorInfoTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/preprocess/OutputInfoTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/preprocess/OutputTensorInfoTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/preprocess/PrePostProcessorTests.cs create mode 100644 modules/csharp_api/tests/csharp_api_unit_tests/preprocess/PreProcessStepsTests.cs diff --git a/modules/csharp_api/csharp/CSharpAPI.sln b/modules/csharp_api/csharp/CSharpAPI.sln index 0383d7865..22f660b7f 100644 --- a/modules/csharp_api/csharp/CSharpAPI.sln +++ b/modules/csharp_api/csharp/CSharpAPI.sln @@ -5,6 +5,8 @@ VisualStudioVersion = 17.6.33829.357 MinimumVisualStudioVersion = 10.0.40219.1 Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "CSharpAPI", "CSharpAPI.csproj", "{56A1269F-3928-4367-84BE-0EA2877DFED1}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "csharp_api_unit_tests", "..\tests\csharp_api_unit_tests\csharp_api_unit_tests.csproj", "{DD506CD5-C670-4354-879C-42EF1A2A7DD5}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -15,6 +17,10 @@ Global {56A1269F-3928-4367-84BE-0EA2877DFED1}.Debug|Any CPU.Build.0 = Debug|Any CPU {56A1269F-3928-4367-84BE-0EA2877DFED1}.Release|Any CPU.ActiveCfg = Release|Any CPU {56A1269F-3928-4367-84BE-0EA2877DFED1}.Release|Any CPU.Build.0 = Release|Any CPU + {DD506CD5-C670-4354-879C-42EF1A2A7DD5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DD506CD5-C670-4354-879C-42EF1A2A7DD5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DD506CD5-C670-4354-879C-42EF1A2A7DD5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DD506CD5-C670-4354-879C-42EF1A2A7DD5}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/Usings.cs b/modules/csharp_api/tests/csharp_api_unit_tests/Usings.cs new file mode 100644 index 000000000..ab67c7ea9 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/Usings.cs @@ -0,0 +1 @@ +global using Microsoft.VisualStudio.TestTools.UnitTesting; \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/base_test.cs b/modules/csharp_api/tests/csharp_api_unit_tests/base_test.cs new file mode 100644 index 000000000..a91d1addf --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/base_test.cs @@ -0,0 +1,92 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.Tests +{ + public class OVBaseTest + { + public class TestModelInfo + { + public string model_xml = "..\\..\\..\\..\\..\\tests\\test_data\\model\\yolov8\\yolov8s.xml"; + public string model_bin = "..\\..\\..\\..\\..\\tests\\test_data\\model\\yolov8\\yolov8s.bin"; + public string input_name = "images"; + public string output_name = "output0"; + + public OvType input_type = new OvType(ElementType.F32); + public Shape input_shape = new Shape(new long[] { 1, 3, 640, 640 }); + + public OvType output_type = new OvType(ElementType.F16); + public Shape output_shape = new Shape(new long[] { 1, 84, 8400 }); + } + TestModelInfo model_info = new TestModelInfo(); + + private string device = "CPU"; + public string get_model_xml_file_name() + { + if (!File.Exists(model_info.model_xml)) + { + Assert.Fail(); + } + return model_info.model_xml; + } + public string get_model_bin_file_name() + { + if (!File.Exists(model_info.model_bin)) + { + Assert.Fail(); + } + return model_info.model_bin; + } + public string get_device() + { + return device; + } + + public string model_input_name() + { + return model_info.input_name; + } + public string model_output_name() + { + return model_info.output_name; + } + + public Shape model_input_shape() + { + return model_info.input_shape; + } + + public OvType model_input_type() + { + return model_info.input_type; + } + + public Shape model_output_shape() + { + return model_info.output_shape; + } + + public OvType model_output_type() + { + return model_info.output_type; + } + + public byte[] content_from_file(string file) + { + FileStream fs = new FileStream(get_model_bin_file_name(), FileMode.Open, FileAccess.Read); + + long len = fs.Seek(0, SeekOrigin.End); + + + fs.Seek(0, SeekOrigin.Begin); + + byte[] data = new byte[len + 1]; + + fs.Read(data, 0, (int)len); + return data; + } + } +} diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/core/CompiledModelTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/core/CompiledModelTests.cs new file mode 100644 index 000000000..7c7195862 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/core/CompiledModelTests.cs @@ -0,0 +1,373 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.Tests +{ + [TestClass()] + public class CompiledModelTests : OVBaseTest + { + [TestMethod()] + public void CompiledModel_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void Dispose_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + compiled_model.Dispose(); + Assert.IsTrue(compiled_model.Ptr == IntPtr.Zero); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void create_infer_request_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_input_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + Node input = compiled_model.get_input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + input.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_input_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + Node input = compiled_model.get_input(model_input_name()); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + input.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_input_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + Node input = compiled_model.get_input(0); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + input.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_output_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + Node output = compiled_model.get_output(); + Assert.IsTrue(output.Ptr != IntPtr.Zero); + output.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_output_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + Node output = compiled_model.get_output(model_output_name()); + Assert.IsTrue(output.Ptr != IntPtr.Zero); + output.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_output_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + Node output = compiled_model.get_output(0); + Assert.IsTrue(output.Ptr != IntPtr.Zero); + output.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_inputs_size_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + ulong size = compiled_model.get_inputs_size(); + Assert.IsTrue(size > 0); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_outputs_size_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + ulong size = compiled_model.get_outputs_size(); + Assert.IsTrue(size > 0); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void input_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + Input input = compiled_model.input(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + input.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void input_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + Input input = compiled_model.input(model_input_name()); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + input.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void input_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + Input input = compiled_model.input(0); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + input.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void output_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + Output output = compiled_model.output(); + Assert.IsTrue(output.get_node().Ptr != IntPtr.Zero); + output.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void output_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + Output output = compiled_model.output(model_output_name()); + Assert.IsTrue(output.get_node().Ptr != IntPtr.Zero); + output.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void output_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + Output output = compiled_model.output(0); + Assert.IsTrue(output.get_node().Ptr != IntPtr.Zero); + output.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void inputs_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + List inputs = compiled_model.inputs(); + Assert.IsTrue(inputs.Count > 0); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void outputs_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + List outputs = compiled_model.outputs(); + Assert.IsTrue(outputs.Count > 0); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_runtime_model_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + Model runtime = compiled_model.get_runtime_model(); + Assert.IsTrue(runtime.Ptr != IntPtr.Zero); + runtime.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void export_model_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model,get_device()); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + compiled_model.export_model("test_exported_model.blob"); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void set_property_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model, "BATCH:" + get_device() + "(4)"); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + KeyValuePair key = new KeyValuePair(PropertyKey.AUTO_BATCH_TIMEOUT.ToString(), "5000"); + compiled_model.set_property(key); + string result = compiled_model.get_property("AUTO_BATCH_TIMEOUT"); + Assert.AreEqual("5000", result); + } + + [TestMethod()] + public void get_property_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model, get_device()); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + string result = compiled_model.get_property("SUPPORTED_PROPERTIES"); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_context_test() + { + Assert.Fail(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/core/CoreTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/core/CoreTests.cs new file mode 100644 index 000000000..9d3de78d3 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/core/CoreTests.cs @@ -0,0 +1,236 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.Tests +{ + [TestClass()] + public class CoreTests : OVBaseTest + { + + + [TestMethod()] + public void Core_test() + { + Core core = new Core(); + Assert.IsTrue(core.Ptr != IntPtr.Zero); + } + + [TestMethod()] + public void Dispose_test() + { + Core core = new Core(); + core.Dispose(); + Assert.IsTrue(core.Ptr == IntPtr.Zero); + } + + [TestMethod()] + public void get_versions_test() + { + var core = new Core(); + KeyValuePair ver = core.get_versions(get_device()); + Assert.IsNotNull(ver.Key, ver.Value.buildNumber, ver.Value.description); + } + + [TestMethod()] + public void read_model_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + model.Dispose(); + model = core.read_model(get_model_xml_file_name(), get_model_bin_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void read_model_test1() + { + byte[] data = content_from_file(get_model_bin_file_name()); + + Shape shape = new Shape(new List { 1, data.Length }); + Tensor tensor = new Tensor(new element.Type(element.Type_t.u8), shape, data); + + Core core = new Core(); + Assert.IsTrue(core.Ptr != IntPtr.Zero); + Model model = core.read_model(get_model_xml_file_name(), tensor); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + } + + [TestMethod()] + public void compile_model_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled = core.compile_model(model); + Assert.IsTrue(compiled.Ptr != IntPtr.Zero); + compiled.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void compile_model_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + + Dictionary latency = new Dictionary(); + latency.Add("PERFORMANCE_HINT", "1"); + + CompiledModel compiled = core.compile_model(model, get_device(), latency); + Assert.IsTrue(compiled.Ptr != IntPtr.Zero); + latency.Add("PERFORMANCE", "1"); + compiled = core.compile_model(get_model_xml_file_name(), get_device(), latency); + Assert.IsTrue(compiled.Ptr != IntPtr.Zero); + compiled.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void compile_model_test2() + { + var core = new Core(); + Dictionary latency = new Dictionary(); + latency.Add("PERFORMANCE_HINT", "1"); + CompiledModel compiled = core.compile_model(get_model_xml_file_name(), latency); + Assert.IsTrue(compiled.Ptr != IntPtr.Zero); + latency.Add("PERFORMANCE", "1"); + compiled = core.compile_model(get_model_xml_file_name(), get_device(), latency); + Assert.IsTrue(compiled.Ptr != IntPtr.Zero); + compiled.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void compile_model_test3() + { + var core = new Core(); + Dictionary latency = new Dictionary(); + latency.Add("PERFORMANCE_HINT", "1"); + CompiledModel compiled = core.compile_model(get_model_xml_file_name(), get_device(), latency); + Assert.IsTrue(compiled.Ptr != IntPtr.Zero); + latency.Add("PERFORMANCE", "1"); + compiled = core.compile_model(get_model_xml_file_name(), get_device(), latency); + Assert.IsTrue(compiled.Ptr != IntPtr.Zero); + compiled.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_available_devices_test() + { + var core = new Core(); + List devicces = core.get_available_devices(); + Assert.IsNotNull(devicces); + core.Dispose(); + } + + [TestMethod()] + public void set_property_enum_Test() + { + var core = new Core(); + KeyValuePair key = new KeyValuePair(PropertyKey.LOG_LEVEL.ToString(), "WARNING"); + core.set_property(get_device(), key); + core.Dispose(); + } + //[TestMethod()] + //public void set_property_invalid_number_property_arguments_Test() + //{ + // var core = new Core(); + // Dictionary dict = new Dictionary(); + // dict.Add(PropertyKey.INFERENCE_NUM_THREADS.ToString(), "12"); + // dict.Add(PropertyKey.NUM_STREAMS.ToString(), "7"); + // core.set_property(get_device(), dict); + // string s = core.get_property(get_device(), PropertyKey.INFERENCE_NUM_THREADS); + // Assert.AreEqual("12", s); + // s = core.get_property(get_device(), PropertyKey.NUM_STREAMS); + // Assert.AreEqual("7", s); + // core.Dispose(); + //} + + [TestMethod()] + public void set_property_enum_invalid_Test() + { + var core = new Core(); + KeyValuePair key = new KeyValuePair(PropertyKey.PERFORMANCE_HINT.ToString(), "LATENCY"); + core.set_property(get_device(), key); + string s = core.get_property(get_device(), PropertyKey.PERFORMANCE_HINT); + Assert.AreEqual("LATENCY", s); + + //key = new KeyValuePair(PropertyKey.PERFORMANCE_HINT.ToString(), "LATENCY_TEST"); + //core.set_property(get_device(), key); + //s = core.get_property(get_device(), PropertyKey.PERFORMANCE_HINT); + //Assert.AreEqual("LATENCY_TEST", s); + + key = new KeyValuePair(PropertyKey.ENABLE_CPU_PINNING.ToString(), "YES"); + core.set_property(get_device(), key); + s = core.get_property(get_device(), PropertyKey.ENABLE_CPU_PINNING); + Assert.AreEqual("YES", s); + + //key = new KeyValuePair(PropertyKey.ENABLE_CPU_PINNING.ToString(), "INVALID_VAL"); + //core.set_property(get_device(), key); + //s = core.get_property(get_device(), PropertyKey.ENABLE_CPU_PINNING); + //Assert.AreEqual("INVALID_VAL", s); + + key = new KeyValuePair(PropertyKey.SCHEDULING_CORE_TYPE.ToString(), "PCORE_ONLY"); + core.set_property(get_device(), key); + s = core.get_property(get_device(), PropertyKey.SCHEDULING_CORE_TYPE); + Assert.AreEqual("PCORE_ONLY", s); + + //key = new KeyValuePair(PropertyKey.SCHEDULING_CORE_TYPE.ToString(), "INVALID_VAL"); + //core.set_property(get_device(), key); + //s = core.get_property(get_device(), PropertyKey.SCHEDULING_CORE_TYPE); + //Assert.AreEqual("INVALID_VAL", s); + + key = new KeyValuePair(PropertyKey.ENABLE_HYPER_THREADING.ToString(), "YES"); + core.set_property(get_device(), key); + s = core.get_property(get_device(), PropertyKey.ENABLE_HYPER_THREADING); + Assert.AreEqual("YES", s); + + //key = new KeyValuePair(PropertyKey.ENABLE_HYPER_THREADING.ToString(), "INVALID_VAL"); + //core.set_property(get_device(), key); + //s = core.get_property(get_device(), PropertyKey.ENABLE_HYPER_THREADING); + //Assert.AreEqual("INVALID_VAL", s); + + core.Dispose(); + } + + + + [TestMethod()] + public void get_propertyTest() + { + var core = new Core(); + core.set_property(get_device(), Ov.cache_dir("./model")); + string s = core.get_property(get_device(), PropertyKey.CACHE_DIR); + Assert.IsNotNull(s); + } + + [TestMethod()] + public void set_propertyTest1() + { + var core = new Core(); + Dictionary dict = new Dictionary(); + dict.Add(Ov.cache_dir("./model").Key, Ov.cache_dir("./model").Value); + core.set_property(get_device(), dict); + string s = core.get_property(get_device(), PropertyKey.CACHE_DIR); + Assert.IsNotNull(s); + } + + [TestMethod()] + public void import_modelTest() + { + Assert.Fail(); + } + } +} diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/core/InferRequestTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/core/InferRequestTests.cs new file mode 100644 index 000000000..71613941d --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/core/InferRequestTests.cs @@ -0,0 +1,519 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.Tests +{ + [TestClass()] + public class InferRequestTests : OVBaseTest + { + [TestMethod()] + public void InferRequestTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest request = compiled_model.create_infer_request(); + Assert.IsTrue(request.Ptr != IntPtr.Zero); + request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void DisposeTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest request = compiled_model.create_infer_request(); + Assert.IsTrue(request.Ptr != IntPtr.Zero); + request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void set_tensorTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_input_shape().data_size()]; + data[1] = 15.62f; + Tensor input_tensor = new Tensor(model_input_shape(), data); + float[] d = input_tensor.get_data((int)model_input_shape().data_size()); + infer_request.set_tensor(model_input_name(), input_tensor); + input_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void set_tensorTest1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_input_shape().data_size()]; + Node node = model.get_input(); + Tensor input_tensor = new Tensor(model_input_shape(), data); + infer_request.set_tensor(node, input_tensor); + input_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void set_tensorTest2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_input_shape().data_size()]; + Input node = model.input(); + Tensor input_tensor = new Tensor(model_input_shape(), data); + infer_request.set_tensor(node, input_tensor); + input_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void set_tensorTest3() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_input_shape().data_size()]; + Node node = model.get_input(); + Output node_output = new Output(node); + Tensor input_tensor = new Tensor(model_input_shape(), data); + infer_request.set_tensor(node_output, input_tensor); + input_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + [TestMethod()] + public void set_input_tensorTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_input_shape().data_size()]; + Tensor input_tensor = new Tensor(model_input_shape(), data); + infer_request.set_input_tensor(0, input_tensor); + input_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void set_input_tensorTest1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_input_shape().data_size()]; + Tensor input_tensor = new Tensor(model_input_shape(), data); + infer_request.set_input_tensor(input_tensor); + input_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void set_output_tensorTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_output_shape().data_size()]; + Tensor output_tensor = new Tensor(model_output_shape(), data); + infer_request.set_output_tensor(0, output_tensor); + output_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void set_output_tensorTest1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_output_shape().data_size()]; + Tensor output_tensor = new Tensor(model_output_shape(), data); + infer_request.set_output_tensor(output_tensor); + output_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_tensorTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + + Tensor tensor = infer_request.get_tensor(model_input_name()); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_tensorTest1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + Node node = model.get_input(); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + Tensor tensor = infer_request.get_tensor(node); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + node.Dispose(); + tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_tensorTest2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + Input node = model.input(); + Assert.IsTrue(node.get_node().Ptr != IntPtr.Zero); + Tensor tensor = infer_request.get_tensor(node); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + node.Dispose(); + tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_tensorTest3() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + Output node = model.output(); + Assert.IsTrue(node.get_node().Ptr != IntPtr.Zero); + Tensor tensor = infer_request.get_tensor(node); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + node.Dispose(); + tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_input_tensorTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + Tensor tensor = infer_request.get_input_tensor(0); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_input_tensorTest1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + Tensor tensor = infer_request.get_input_tensor(); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_output_tensorTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + Tensor tensor = infer_request.get_output_tensor(0); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_output_tensorTest1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + Tensor tensor = infer_request.get_output_tensor(); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void inferTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_input_shape().data_size()]; + Tensor input_tensor = new Tensor(model_input_shape(), data); + infer_request.set_tensor(model_input_name(), input_tensor); + infer_request.infer(); + input_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void cancelTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_input_shape().data_size()]; + Tensor input_tensor = new Tensor(model_input_shape(), data); + infer_request.set_tensor(model_input_name(), input_tensor); + infer_request.cancel(); + input_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void start_asyncTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_input_shape().data_size()]; + Tensor input_tensor = new Tensor(model_input_shape(), data); + infer_request.set_tensor(model_input_name(), input_tensor); + infer_request.start_async(); + infer_request.wait(); + + Tensor tensor = infer_request.get_output_tensor(); + + tensor.Dispose(); + input_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void waitTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_input_shape().data_size()]; + Tensor input_tensor = new Tensor(model_input_shape(), data); + infer_request.set_tensor(model_input_name(), input_tensor); + infer_request.start_async(); + infer_request.wait(); + + Tensor tensor = infer_request.get_output_tensor(); + + tensor.Dispose(); + input_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void wait_forTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_input_shape().data_size()]; + Tensor input_tensor = new Tensor(model_input_shape(), data); + infer_request.set_tensor(model_input_name(), input_tensor); + infer_request.start_async(); + infer_request.wait_for(1000000); + + Tensor tensor = infer_request.get_output_tensor(); + + tensor.Dispose(); + input_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_profiling_infoTest() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + CompiledModel compiled_model = core.compile_model(model,"CPU"); + Assert.IsTrue(compiled_model.Ptr != IntPtr.Zero); + InferRequest infer_request = compiled_model.create_infer_request(); + Assert.IsTrue(infer_request.Ptr != IntPtr.Zero); + float[] data = new float[model_input_shape().data_size()]; + Tensor input_tensor = new Tensor(model_input_shape(), data); + infer_request.set_tensor(model_input_name(), input_tensor); + infer_request.infer(); + Tensor tensor = infer_request.get_output_tensor(); + float[] data1 = tensor.get_data((int)tensor.get_size()); + List pro = infer_request.get_profiling_info(); + Assert.IsTrue (pro.Count > 0); + input_tensor.Dispose(); + infer_request.Dispose(); + compiled_model.Dispose(); + model.Dispose(); + core.Dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/core/InputTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/core/InputTests.cs new file mode 100644 index 000000000..65fa22069 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/core/InputTests.cs @@ -0,0 +1,115 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.Tests +{ + [TestClass()] + public class InputTests : OVBaseTest + { + [TestMethod()] + public void Input_test() + { + } + + [TestMethod()] + public void Dispose_test() + { + + } + + [TestMethod()] + public void get_node_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Input input = model.input(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + Node node = input.get_node(); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_index_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Input input = model.input(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + ulong index = input.get_index(); + Assert.IsNotNull(index); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_element_type_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Input input = model.input(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + OvType type = input.get_element_type(); + Assert.IsNotNull(type); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_shape_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Input input = model.input(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + Shape shape = input.get_shape(); + Assert.IsNotNull(shape); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_any_name_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Input input = model.input(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + string name = input.get_any_name(); + Assert.IsNotNull(name); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_partial_shape_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Input input = model.input(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + PartialShape shape = input.get_partial_shape(); + Assert.IsNotNull(shape); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/core/ModelTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/core/ModelTests.cs new file mode 100644 index 000000000..9d5d25acf --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/core/ModelTests.cs @@ -0,0 +1,544 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.Tests +{ + [TestClass()] + public class ModelTests : OVBaseTest + { + [TestMethod()] + public void Model_test() + { + Assert.IsTrue(true); + } + + [TestMethod()] + public void Dispose_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + model.Dispose(); + Assert.IsTrue(model.Ptr == IntPtr.Zero); + core.Dispose(); + } + + [TestMethod()] + public void get_friendly_name_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + string name = model.get_friendly_name(); + Assert.IsTrue(name != ""); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_input_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_input(); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_input_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_input(0); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_input_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_input(model_input_name()); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_output_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_output(); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_output_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_output(0); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_output_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_output(model_output_name()); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_const_input_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_const_input(); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_const_input_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_const_input(0); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_const_input_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_const_input(model_input_name()); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_const_output_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_const_output(); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_const_output_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + model.Dispose(); + model = core.read_model(get_model_xml_file_name(), + get_model_bin_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_const_output(0); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_const_output_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_const_output(model_output_name()); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void input_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Input input = model.input(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void input_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Input input = model.input(0); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void input_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Input input = model.input(model_input_name()); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void const_input_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Input input = model.const_input(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void const_input_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Input input = model.const_input(0); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void const_input_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Input input = model.const_input(model_input_name()); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void output_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Output output = model.output(); + Assert.IsTrue(output.get_node().Ptr != IntPtr.Zero); + output.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void output_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Output output = model.output(0); + Assert.IsTrue(output.get_node().Ptr != IntPtr.Zero); + output.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void output_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Output output = model.output(model_output_name()); + Assert.IsTrue(output.get_node().Ptr != IntPtr.Zero); + output.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void const_output_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Output output = model.const_output(); + Assert.IsTrue(output.get_node().Ptr != IntPtr.Zero); + output.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void const_output_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Output output = model.const_output(0); + Assert.IsTrue(output.get_node().Ptr != IntPtr.Zero); + output.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void const_output_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Output output = model.const_output(model_output_name()); + Assert.IsTrue(output.get_node().Ptr != IntPtr.Zero); + output.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_inputs_size_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + ulong size = model.get_inputs_size(); + Assert.IsTrue(size > 0); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_outputs_size_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + ulong size = model.get_outputs_size(); + Assert.IsTrue(size > 0); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void inputs_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + ulong size = model.get_inputs_size(); + Assert.IsTrue(size > 0); + List inputs = model.inputs(); + Assert.IsTrue(inputs.Count == (int)size); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void outputs_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + ulong size = model.get_outputs_size(); + Assert.IsTrue(size > 0); + List outputs = model.outputs(); + Assert.IsTrue(outputs.Count == (int)size); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void const_inputs_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + ulong size = model.get_inputs_size(); + Assert.IsTrue(size > 0); + List inputs = model.const_inputs(); + Assert.IsTrue(inputs.Count == (int)size); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void const_outputs_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + + Assert.IsTrue(model.Ptr != IntPtr.Zero); + ulong size = model.get_outputs_size(); + Assert.IsTrue(size > 0); + List outputs = model.const_outputs(); + Assert.IsTrue(outputs.Count == (int)size); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void is_dynamic_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + model.Dispose(); + model = core.read_model(get_model_xml_file_name(), get_model_bin_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + bool flag = model.is_dynamic(); + Assert.IsTrue(!flag); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void reshape_test() + { + Shape shape = new Shape(new long[4] { 1, 3, 640, 640 }); + + PartialShape partial = new PartialShape(shape); + + Dictionary pairs = new Dictionary(); + + Assert.IsTrue(partial.get_partial_shape().rank.max == 4); + pairs.Add(model_input_name(), partial); + + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + model.reshape(pairs); + + PartialShape shape1 = model.get_input().get_partial_shape(); + + model.Dispose(); + core.Dispose(); + + } + + [TestMethod()] + public void reshape_test1() + { + Shape shape = new Shape(new long[4] { 1, 3, 640, 640 }); + PartialShape partial = new PartialShape(shape); + Assert.IsTrue(partial.get_partial_shape().rank.max == 4); + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + model.reshape(partial); + PartialShape shape1 = model.get_input().get_partial_shape(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void reshape_test2() + { + Shape shape = new Shape(new long[4] { 1, 3, 640, 640 }); + + PartialShape partial = new PartialShape(shape); + + Dictionary pairs = new Dictionary(); + + Assert.IsTrue(partial.get_partial_shape().rank.max == 4); + pairs.Add(0, partial); + + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + model.reshape(pairs); + + PartialShape shape1 = model.get_input().get_partial_shape(); + + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void reshape_test3() + { + Shape shape = new Shape(new long[4] { 1, 3, 640, 640 }); + + PartialShape partial = new PartialShape(shape); + + Dictionary pairs = new Dictionary(); + + Assert.IsTrue(partial.get_partial_shape().rank.max == 4); + + + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + + Node input = model.get_input(); + pairs.Add(input, partial); + + model.reshape(pairs); + + PartialShape shape1 = model.get_input().get_partial_shape(); + + model.Dispose(); + core.Dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/core/NodeTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/core/NodeTests.cs new file mode 100644 index 000000000..a9b3d0ab9 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/core/NodeTests.cs @@ -0,0 +1,84 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.Tests +{ + [TestClass()] + public class NodeTests : OVBaseTest + { + [TestMethod()] + public void Node_test() + { + } + + [TestMethod()] + public void Dispose_test() + { + } + + [TestMethod()] + public void get_shape_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_const_input(model_input_name()); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + Shape shape = node.get_shape(); + Assert.IsNotNull(shape); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_partial_shape_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_const_input(model_input_name()); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + PartialShape shape = node.get_partial_shape(); + Assert.IsNotNull(shape); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_name_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_const_input(model_input_name()); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + string name = node.get_name(); + Assert.IsNotNull(name); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_element_type_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Node node = model.get_const_input(model_input_name()); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + OvType type = node.get_element_type(); + Assert.IsNotNull(type); + node.Dispose(); + model.Dispose(); + core.Dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/core/OutputTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/core/OutputTests.cs new file mode 100644 index 000000000..ceead7ac6 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/core/OutputTests.cs @@ -0,0 +1,114 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.Tests +{ + [TestClass()] + public class OutputTests : OVBaseTest + { + [TestMethod()] + public void Output_test() + { + } + + [TestMethod()] + public void Dispose_test() + { + } + + [TestMethod()] + public void get_node_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Output input = model.output(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + Node node = input.get_node(); + Assert.IsTrue(node.Ptr != IntPtr.Zero); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_index_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Output input = model.output(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + ulong index = input.get_index(); + Assert.IsNotNull(index); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_element_type_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Output input = model.output(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + OvType type = input.get_element_type(); + Assert.IsNotNull(type); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_shape_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Output input = model.output(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + Shape shape = input.get_shape(); + Assert.IsNotNull(shape); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_any_name_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Output input = model.output(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + string name = input.get_any_name(); + Assert.IsNotNull(name); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void get_partial_shape_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + Output input = model.output(); + Assert.IsTrue(input.get_node().Ptr != IntPtr.Zero); + PartialShape shape = input.get_partial_shape(); + Assert.IsNotNull(shape); + input.Dispose(); + model.Dispose(); + core.Dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/core/PartialShapeTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/core/PartialShapeTests.cs new file mode 100644 index 000000000..ab9945ae6 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/core/PartialShapeTests.cs @@ -0,0 +1,153 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.Tests +{ + [TestClass()] + public class PartialShapeTests + { + [TestMethod()] + public void PartialShape_test() + { + } + + [TestMethod()] + public void PartialShape_test1() + { + Dimension[] dimensions = new Dimension[] { new Dimension(10), new Dimension(10), new Dimension(10) }; + PartialShape shape = new PartialShape(dimensions); + Assert.IsNotNull(shape); + } + + [TestMethod()] + public void PartialShape_test2() + { + List dimensions = new List { new Dimension(10), new Dimension(10), new Dimension(10) }; + PartialShape shape = new PartialShape(dimensions); + Assert.IsNotNull(shape); + } + + [TestMethod()] + public void PartialShape_test3() + { + Dimension rank = new Dimension(3); + Dimension[] dimensions = new Dimension[] { new Dimension(10), new Dimension(10), new Dimension(10) }; + PartialShape shape = new PartialShape(rank, dimensions); + Assert.IsNotNull(shape); + } + + [TestMethod()] + public void PartialShape_test4() + { + Dimension rank = new Dimension(3); + List dimensions = new List { new Dimension(10), new Dimension(10), new Dimension(10) }; + PartialShape shape = new PartialShape(rank, dimensions); + Assert.IsNotNull(shape); + } + + [TestMethod()] + public void PartialShape_test5() + { + long rank = 3; + long[] dimensions = new long[] { 10, 10, 10 }; + PartialShape shape = new PartialShape(rank, dimensions); + Assert.IsNotNull(shape); + } + + [TestMethod()] + public void PartialShape_test6() + { + long rank = 3; + List dimensions = new List { 10, 10, 10 }; + PartialShape shape = new PartialShape(rank, dimensions); + Assert.IsNotNull(shape); + } + + [TestMethod()] + public void PartialShape_test7() + { + Shape shape = new Shape(1,3,9); + PartialShape shape1 = new PartialShape(shape); + Assert.IsNotNull(shape1); + } + + [TestMethod()] + public void get_partial_shape_test() + { + Shape shape = new Shape(1, 3, 9); + PartialShape shape1 = new PartialShape(shape); + Assert.IsNotNull(shape1); + Ov.ov_partial_shape ov_partial = shape1.get_partial_shape(); + Assert.IsNotNull(ov_partial); + } + + [TestMethod()] + public void get_rank_test() + { + long rank = 3; + long[] dimensions = new long[] { 10, 10, 10 }; + PartialShape shape = new PartialShape(rank, dimensions); + Assert.IsNotNull(shape); + Dimension dimension = shape.get_rank(); + Assert.IsNotNull(dimension); + } + + [TestMethod()] + public void get_dimensions_test() + { + long rank = 3; + long[] dimensions = new long[] { 10, 10, 10 }; + PartialShape shape = new PartialShape(rank, dimensions); + Assert.IsNotNull(shape); + Dimension[] dimension = shape.get_dimensions(); + Assert.IsNotNull(dimension); + } + + [TestMethod()] + public void to_shape_test() + { + long rank = 3; + long[] dimensions = new long[] { 10, 10, 10 }; + PartialShape shape = new PartialShape(rank, dimensions); + Assert.IsNotNull(shape); + Shape shape1 = shape.to_shape(); + Assert.IsNotNull(shape1); + } + + [TestMethod()] + public void is_static_test() + { + long rank = 3; + long[] dimensions = new long[] { 10, 10, 10 }; + PartialShape shape = new PartialShape(rank, dimensions); + Assert.IsNotNull(shape); + shape.is_static(); + } + + [TestMethod()] + public void is_dynamic_test() + { + long rank = 3; + long[] dimensions = new long[] { 10, 10, 10 }; + PartialShape shape = new PartialShape(rank, dimensions); + Assert.IsNotNull(shape); + shape.is_dynamic(); + } + + [TestMethod()] + public void to_string_test() + { + long rank = 3; + long[] dimensions = new long[] { 10, 10, 10 }; + PartialShape shape = new PartialShape(rank, dimensions); + Assert.IsNotNull(shape); + string msg = shape.to_string(); + Assert.IsNotNull(msg); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/core/ShapeTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/core/ShapeTests.cs new file mode 100644 index 000000000..a44027e51 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/core/ShapeTests.cs @@ -0,0 +1,67 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.Tests +{ + [TestClass()] + public class ShapeTests + { + [TestMethod()] + public void Shape_test() + { + } + + [TestMethod()] + public void Shape_test1() + { + List data = new List() { 1, 2, 3 }; + Shape shape = new Shape(data); + shape.Dispose(); + } + + [TestMethod()] + public void Shape_test2() + { + long[] data = new long[] { 1, 2, 3 }; + Shape shape = new Shape(data); + shape.Dispose(); + } + + [TestMethod()] + public void Shape_test3() + { + Shape shape = new Shape(1,2,9); + shape.Dispose(); + } + + [TestMethod()] + public void Dispose_test() + { + Shape shape = new Shape(1, 2, 9); + shape.Dispose(); + } + + [TestMethod()] + public void to_string_test() + { + Shape shape = new Shape(1, 2, 9); + string msg = shape.to_string(); + Assert.IsNotNull(msg); + shape.Dispose(); + } + + [TestMethod()] + public void data_size_test() + { + Shape shape = new Shape(1, 2, 9); + long size = shape.data_size(); + Assert.IsTrue(size == 18); + shape.Dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/core/TensorTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/core/TensorTests.cs new file mode 100644 index 000000000..e0d021d7b --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/core/TensorTests.cs @@ -0,0 +1,258 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.Tests +{ + [TestClass()] + public class TensorTests + { + [TestMethod()] + public void Tensor_test() + { + } + + [TestMethod()] + public void Tensor_test1() + { + } + + [TestMethod()] + public void Tensor_test2() + { + Shape shape = new Shape(1, 2, 3); + float[] data = new float[6]; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + } + + [TestMethod()] + public void Tensor_test3() + { + Shape shape = new Shape(1, 2, 3); + double[] data = new double[6]; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + } + + [TestMethod()] + public void Tensor_test4() + { + Shape shape = new Shape(1, 2, 3); + int[] data = new int[6]; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + } + + [TestMethod()] + public void Tensor_test5() + { + Shape shape = new Shape(1, 2, 3); + short[] data = new short[6]; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + } + + [TestMethod()] + public void Tensor_test6() + { + Shape shape = new Shape(1, 2, 3); + long[] data = new long[6]; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + } + + [TestMethod()] + public void Tensor_test7() + { + Shape shape = new Shape(1, 2, 3); + float[] data = new float[6]; + Tensor tensor = new Tensor(new element.Type(ElementType.F32), shape, Marshal.UnsafeAddrOfPinnedArrayElement(data, 0)); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + } + + [TestMethod()] + public void Tensor_test8() + { + Shape shape = new Shape(1, 2, 3); + Tensor tensor = new Tensor(new element.Type(ElementType.F32), shape); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + } + + [TestMethod()] + public void Tensor_test9() + { + Shape shape = new Shape(1, 2, 3); + float[] data = new float[6]; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + Tensor tensor1 = new Tensor(tensor); + Assert.IsTrue(tensor1.Ptr != IntPtr.Zero); + tensor.Dispose(); + tensor1.Dispose(); + } + + [TestMethod()] + public void Dispose_test() + { + } + + [TestMethod()] + public void set_shape_test() + { + Shape shape = new Shape(1, 2, 80); + float[] data = new float[6]; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + Shape new_shape = new Shape(1, 2, 15); + tensor.set_shape(new_shape); + tensor.Dispose(); + } + + [TestMethod()] + public void get_shape_test() + { + Shape shape = new Shape(1, 2, 3); + float[] data = new float[6]; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + Shape new_shape = tensor.get_shape(); + Assert.IsTrue(shape.Count == new_shape.Count); + tensor.Dispose(); + } + + [TestMethod()] + public void get_element_type_test() + { + Shape shape = new Shape(1, 2, 3); + float[] data = new float[6]; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + OvType type = tensor.get_element_type(); + Assert.IsTrue((int)type.get_type()!=100); + tensor.Dispose(); + } + + [TestMethod()] + public void get_size_test() + { + Shape shape = new Shape(1, 2, 3); + float[] data = new float[6]; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + ulong size = tensor.get_size(); + Assert.IsTrue(size > 0); + tensor.Dispose(); + } + + [TestMethod()] + public void get_byte_size_test() + { + Shape shape = new Shape(1, 2, 3); + float[] data = new float[6]; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + ulong size = tensor.get_byte_size(); + Assert.IsTrue(size > 0); + tensor.Dispose(); + } + + [TestMethod()] + public void copy_to_test() + { + Shape shape = new Shape(1, 2, 3); + float[] data = new float[6]; + data[0] = 0.6f; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + Tensor new_tensor = new Tensor(shape, data); + tensor.copy_to(new_tensor); + float[] new_data = new_tensor.get_data((int)new_tensor.get_size()); + Assert.IsTrue(new_data[0] == 0.6f); + new_tensor.Dispose(); + tensor.Dispose(); + } + + [TestMethod()] + public void data_test() + { + Shape shape = new Shape(1, 2, 3); + float[] data = new float[6]; + data[0] = 0.6f; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + IntPtr ptr = tensor.data(); + Assert.IsTrue(ptr != IntPtr.Zero); + tensor.Dispose(); + } + + [TestMethod()] + public void set_data_test() + { + Shape shape = new Shape(1, 2, 3); + float[] data = new float[6]; + data[0] = 0.6f; + Tensor tensor = new Tensor(new OvType(ElementType.F32), shape); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.set_data(data); + float[] new_data = tensor.get_data((int)tensor.get_size()); + Assert.IsTrue(new_data[0] == 0.6f); + tensor.Dispose(); + } + + [TestMethod()] + public void get_data_test() + { + Shape shape = new Shape(1, 2, 3); + float[] data = new float[6]; + data[0] = 0.6f; + Tensor tensor = new Tensor(shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + float[] new_data = tensor.get_data((int)tensor.get_size()); + Assert.IsTrue(new_data[0] == 0.6f); + tensor.Dispose(); + } + + [TestMethod()] + public void Tensor_test10() + { + Shape shape = new Shape(1, 2, 3); + float[] data = new float[6]; + Tensor tensor = new Tensor(new OvType(ElementType.F32), shape, Marshal.UnsafeAddrOfPinnedArrayElement(data, 0)); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + } + + [TestMethod()] + public void Tensor_test11() + { + Shape shape = new Shape(1, 2, 3); + float[] data = new float[6]; + Tensor tensor = new Tensor(new OvType(ElementType.F32), shape); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + } + + [TestMethod()] + public void Tensor_test12() + { + Shape shape = new Shape(1, 2, 3); + byte[] data = new byte[6]; + Tensor tensor = new Tensor(new OvType(ElementType.F32), shape, data); + Assert.IsTrue(tensor.Ptr != IntPtr.Zero); + tensor.Dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/csharp_api_unit_tests.csproj b/modules/csharp_api/tests/csharp_api_unit_tests/csharp_api_unit_tests.csproj new file mode 100644 index 000000000..9c55d5ee4 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/csharp_api_unit_tests.csproj @@ -0,0 +1,23 @@ + + + + net6.0 + enable + enable + + false + true + + + + + + + + + + + + + + diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/ov/OvTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/ov/OvTests.cs new file mode 100644 index 000000000..0700a4721 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/ov/OvTests.cs @@ -0,0 +1,28 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.Tests +{ + [TestClass()] + public class OvTests : OVBaseTest + { + [TestMethod()] + public void get_openvino_version_test() + { + Version version = Ov.get_openvino_version(); + Assert.IsNotNull(version); + } + + [TestMethod()] + public void content_from_file_test() + { + byte[] data = Ov.content_from_file(get_model_bin_file_name()); + Assert.IsTrue(data.Length>0); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/InputInfoTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/InputInfoTests.cs new file mode 100644 index 000000000..a267e8de9 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/InputInfoTests.cs @@ -0,0 +1,82 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp.preprocess; +using OpenVinoSharp.Tests; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess.Tests +{ + [TestClass()] + public class InputInfoTests : OVBaseTest + { + [TestMethod()] + public void InputInfo_test() + { + } + + [TestMethod()] + public void Dispose_test() + { + } + + [TestMethod()] + public void tensor_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + InputTensorInfo input_tensor = input.tensor(); + Assert.IsTrue(input_tensor.Ptr != IntPtr.Zero); + input_tensor.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void preprocess_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + PreProcessSteps process_steps = input.preprocess(); + Assert.IsTrue(process_steps.Ptr != IntPtr.Zero); + process_steps.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void model_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + InputModelInfo model_info = input.model(); + Assert.IsTrue(model_info.Ptr != IntPtr.Zero); + model_info.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/InputModelInfoTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/InputModelInfoTests.cs new file mode 100644 index 000000000..4c2df4aea --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/InputModelInfoTests.cs @@ -0,0 +1,45 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp.preprocess; +using OpenVinoSharp.Tests; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess.Tests +{ + [TestClass()] + public class InputModelInfoTests : OVBaseTest + { + [TestMethod()] + public void InputModelInfo_test() + { + } + + [TestMethod()] + public void Dispose_test() + { + } + + [TestMethod()] + public void set_layout_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + InputModelInfo model_info = input.model(); + Assert.IsTrue(model_info.Ptr != IntPtr.Zero); + model_info.set_layout(new Layout("NCHW")); + model_info.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/InputTensorInfoTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/InputTensorInfoTests.cs new file mode 100644 index 000000000..8560208c6 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/InputTensorInfoTests.cs @@ -0,0 +1,150 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp.preprocess; +using OpenVinoSharp.Tests; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess.Tests +{ + [TestClass()] + public class InputTensorInfoTests : OVBaseTest + { + [TestMethod()] + public void InputTensorInfo_test() + { + } + + [TestMethod()] + public void Dispose_test() + { + } + + [TestMethod()] + public void set_color_format_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + InputTensorInfo input_tensor = input.tensor(); + Assert.IsTrue(input_tensor.Ptr != IntPtr.Zero); + input_tensor.set_color_format(ColorFormat.NV12_SINGLE_PLANE); + input_tensor.set_color_format(ColorFormat.NV12_TWO_PLANES, "y", "uv"); + input_tensor.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void set_element_type_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + InputTensorInfo input_tensor = input.tensor(); + Assert.IsTrue(input_tensor.Ptr != IntPtr.Zero); + input_tensor.set_element_type(new OvType(ElementType.F32)); + input_tensor.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void set_spatial_static_shape_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + InputTensorInfo input_tensor = input.tensor(); + Assert.IsTrue(input_tensor.Ptr != IntPtr.Zero); + input_tensor.set_spatial_static_shape(100, 100); + input_tensor.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void set_memory_type_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + InputTensorInfo input_tensor = input.tensor(); + Assert.IsTrue(input_tensor.Ptr != IntPtr.Zero); + input_tensor.set_memory_type("GPU_SURFACE"); + input_tensor.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void set_layout_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + InputTensorInfo input_tensor = input.tensor(); + Assert.IsTrue(input_tensor.Ptr != IntPtr.Zero); + input_tensor.set_layout(new Layout("NCHW")); + input_tensor.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void set_from_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + InputTensorInfo input_tensor = input.tensor(); + Assert.IsTrue(input_tensor.Ptr != IntPtr.Zero); + + Shape shape = new Shape(1, 2, 3); + Tensor tensor = new Tensor(new element.Type(ElementType.F32), shape); + + input_tensor.set_from(tensor); + input_tensor.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/OutputInfoTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/OutputInfoTests.cs new file mode 100644 index 000000000..85c585c9a --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/OutputInfoTests.cs @@ -0,0 +1,44 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp.preprocess; +using OpenVinoSharp.Tests; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess.Tests +{ + [TestClass()] + public class OutputInfoTests : OVBaseTest + { + [TestMethod()] + public void OutputInfo_test() + { + } + + [TestMethod()] + public void Dispose_test() + { + } + + [TestMethod()] + public void tensor_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + OutputInfo output = processor.output(); + Assert.IsTrue(output.Ptr != IntPtr.Zero); + OutputTensorInfo tensor_info = output.tensor(); + Assert.IsTrue(tensor_info.Ptr != IntPtr.Zero); + tensor_info.Dispose(); + output.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/OutputTensorInfoTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/OutputTensorInfoTests.cs new file mode 100644 index 000000000..06950c132 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/OutputTensorInfoTests.cs @@ -0,0 +1,42 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp.preprocess; +using OpenVinoSharp.Tests; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess.Tests +{ + [TestClass()] + public class OutputTensorInfoTests : OVBaseTest + { + [TestMethod()] + public void OutputTensorInfo_test() + { + } + + [TestMethod()] + public void Dispose_test() + { + } + + [TestMethod()] + public void set_element_type_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + OutputInfo output = processor.output(); + Assert.IsNotNull(output); + OutputTensorInfo output_tensor = output.tensor(); + Assert.IsNotNull(output_tensor); + output_tensor.set_element_type(new OvType(ElementType.F32)); + output.Dispose(); + processor.Dispose(); + model.Dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/PrePostProcessorTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/PrePostProcessorTests.cs new file mode 100644 index 000000000..a04b3ef47 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/PrePostProcessorTests.cs @@ -0,0 +1,132 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp.preprocess; +using OpenVinoSharp.Tests; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess.Tests +{ + [TestClass()] + public class PrePostProcessorTests : OVBaseTest + { + [TestMethod()] + public void PrePostProcessor_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + processor.Dispose(); + model.Dispose(); + } + + [TestMethod()] + public void Dispose_test() + { + } + + [TestMethod()] + public void input_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + InputInfo input = processor.input(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + } + + [TestMethod()] + public void input_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + InputInfo input = processor.input(model_input_name()); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + } + + [TestMethod()] + public void input_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + InputInfo input = processor.input(0); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + } + + [TestMethod()] + public void output_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + OutputInfo input = processor.output(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + } + + [TestMethod()] + public void output_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + OutputInfo input = processor.output(model_output_name()); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + } + + [TestMethod()] + public void output_test2() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + OutputInfo input = processor.output(0); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + } + + [TestMethod()] + public void build_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + + Tensor input_tensor = new Tensor(new OvType(ElementType.U8), new Shape(1, 640, 640, 3)); + InputInfo input_info = processor.input(0); + InputTensorInfo input_tensor_info = input_info.tensor(); + input_tensor_info.set_from(input_tensor).set_layout(new Layout("NHWC")).set_color_format(ColorFormat.BGR); + + PreProcessSteps process_steps = input_info.preprocess(); + process_steps.convert_color(ColorFormat.RGB).resize(ResizeAlgorithm.RESIZE_LINEAR) + .convert_element_type(new OvType(ElementType.F32)).scale(255.0f).convert_layout(new Layout("NCHW")); + + Model new_model = processor.build(); + new_model.Dispose(); + processor.Dispose(); + model.Dispose(); + } + } +} \ No newline at end of file diff --git a/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/PreProcessStepsTests.cs b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/PreProcessStepsTests.cs new file mode 100644 index 000000000..a51bde300 --- /dev/null +++ b/modules/csharp_api/tests/csharp_api_unit_tests/preprocess/PreProcessStepsTests.cs @@ -0,0 +1,231 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using OpenVinoSharp.preprocess; +using OpenVinoSharp.Tests; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace OpenVinoSharp.preprocess.Tests +{ + [TestClass()] + public class PreProcessStepsTests:OVBaseTest + { + [TestMethod()] + public void PreProcessSteps_test() + { + } + + [TestMethod()] + public void Dispose_test() + { + } + + [TestMethod()] + public void resize_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + PreProcessSteps process_steps = input.preprocess(); + Assert.IsTrue(process_steps.Ptr != IntPtr.Zero); + + process_steps.resize(ResizeAlgorithm.RESIZE_LINEAR); + + process_steps.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void scale_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + PreProcessSteps process_steps = input.preprocess(); + Assert.IsTrue(process_steps.Ptr != IntPtr.Zero); + + process_steps.scale(0.5f); + + process_steps.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void mean_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + PreProcessSteps process_steps = input.preprocess(); + Assert.IsTrue(process_steps.Ptr != IntPtr.Zero); + + process_steps.mean(0.5f); + + process_steps.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void crop_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + PreProcessSteps process_steps = input.preprocess(); + Assert.IsTrue(process_steps.Ptr != IntPtr.Zero); + + + int[] begin = { 0, 0, 5, 10 }; + int[] end = { 1, 3, 15, 20 }; + + process_steps.crop(begin, end); + + process_steps.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void crop_test1() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + PreProcessSteps process_steps = input.preprocess(); + Assert.IsTrue(process_steps.Ptr != IntPtr.Zero); + + + List begin = new List { 0, 0, 5, 10 }; + List end = new List{ 1, 3, 15, 20 }; + + process_steps.crop(begin, end); + + process_steps.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void convert_layout_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + PreProcessSteps process_steps = input.preprocess(); + Assert.IsTrue(process_steps.Ptr != IntPtr.Zero); + + process_steps.convert_layout(new Layout("NCHW")); + + process_steps.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void reverse_channels_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + PreProcessSteps process_steps = input.preprocess(); + Assert.IsTrue(process_steps.Ptr != IntPtr.Zero); + + process_steps.reverse_channels(); + + process_steps.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void convert_color_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + PreProcessSteps process_steps = input.preprocess(); + Assert.IsTrue(process_steps.Ptr != IntPtr.Zero); + + process_steps.convert_color(ColorFormat.RGB); + + process_steps.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + + [TestMethod()] + public void convert_element_type_test() + { + var core = new Core(); + Model model = core.read_model(get_model_xml_file_name()); + Assert.IsTrue(model.Ptr != IntPtr.Zero); + PrePostProcessor processor = new PrePostProcessor(model); + Assert.IsTrue(processor.Ptr != IntPtr.Zero); + InputInfo input = processor.input(); + Assert.IsTrue(input.Ptr != IntPtr.Zero); + PreProcessSteps process_steps = input.preprocess(); + Assert.IsTrue(process_steps.Ptr != IntPtr.Zero); + + process_steps.convert_element_type(new OvType(ElementType.F32)); + + process_steps.Dispose(); + input.Dispose(); + processor.Dispose(); + model.Dispose(); + core.Dispose(); + } + } +} \ No newline at end of file From 0384be9ce847003274c7ab8e530c13b34f6fba3f Mon Sep 17 00:00:00 2001 From: yanguojin Date: Thu, 18 Jan 2024 17:54:40 +0800 Subject: [PATCH 40/40] Update readme. --- modules/csharp_api/README.md | 68 +++++++++++++-------------- modules/csharp_api/README_cn.md | 83 +++++++++++++++------------------ 2 files changed, 70 insertions(+), 81 deletions(-) diff --git a/modules/csharp_api/README.md b/modules/csharp_api/README.md index a1e8a08a5..13d1a71b9 100644 --- a/modules/csharp_api/README.md +++ b/modules/csharp_api/README.md @@ -5,14 +5,12 @@ - +

[简体中文](README_cn.md) | English -## This is OpenVINO ™ C # API, this project is still under construction and its functions are not yet fully developed. If you have any problems using it, please feel free to communicate with me. If you are interested in this project, you can also join our development.🥰🥰🥰🥰🥰 - ## 📚 What is OpenVINO™ C# API ? [OpenVINO™](www.openvino.ai) is an open-source toolkit for optimizing and deploying AI inference. @@ -25,14 +23,16 @@ ## NuGet Package -### Managed libraries +### Core Managed Libraries -| Package | Description | Link | -| --------------------------- | ------------------------------ | ------------------------------------------------------------ | -| **OpenVINO.CSharp.API** | OpenVINO C# API core libraries | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.API.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.API/) | -| **OpenVINO.CSharp.Windows** | All-in-one package for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.Windows.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.Windows/) | +| Package | Description | Link | +| ---------------------------------------------- | --------------------------------------------------------- | ------------------------------------------------------------ | +| **OpenVINO.CSharp.API** | OpenVINO C# API core libraries | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.API.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.API/) | +| **OpenVINO.CSharp.API.Extensions** | OpenVINO C# API core extensions libraries | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.API.Extensions.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.API.Extensions/) | +| **OpenVINO.CSharp.API.Extensions.OpenCvSharp** | OpenVINO C# API core extensions libraries use OpenCvSharp | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.API.Extensions.OpenCvSharp.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.API.Extensions.OpenCvSharp/) | +| **OpenVINO.CSharp.API.Extensions.EmguCV** | OpenVINO C# API core extensions libraries use EmguCV | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.API.Extensions.EmguCV.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.API.Extensions.EmguCV/) | -### Native bindings +### Native Runtime Libraries | Package | Description | Link | | ------------------------------------- | ------------------------------------ | ------------------------------------------------------------ | @@ -41,7 +41,19 @@ | **OpenVINO.runtime.ubuntu.20-x86_64** | Native bindings for ubuntu.20-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.ubuntu.20-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.ubuntu.20-x86_64/) | | **OpenVINO.runtime.ubuntu.18-x86_64** | Native bindings for ubuntu.18-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.ubuntu.18-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.ubuntu.18-x86_64/) | | **OpenVINO.runtime.debian9-arm64** | Native bindings for debian9-arm64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.win.svg)](https://www.nuget.org/packages/OpenVINO.runtime.win/) | +| **OpenVINO.runtime.debian9-armhf ** | Native bindings for debian9-armhf | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.debian9-armhf.svg)](https://www.nuget.org/packages/OpenVINO.runtime.debian9-armhf/) | | **OpenVINO.runtime.centos7-x86_64** | Native bindings for centos7-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.centos7-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.centos7-x86_64/) | +| **OpenVINO.runtime.rhel8-x86_64** | Native bindings for rhel8-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.rhel8-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.rhel8-x86_64/) | +| **OpenVINO.runtime.macos-x86_64** | Native bindings for macos-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.macos-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.macos-x86_64/) | +| **OpenVINO.runtime.macos-arm64** | Native bindings for macos-arm64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.macos-arm64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.macos-arm64/) | + +### Integration Library + +| Package | Description | Link | +| --------------------------- | ------------------------------ | ------------------------------------------------------------ | +| **OpenVINO.CSharp.Windows** | All-in-one package for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.Windows.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.Windows/) | + + ## ⚙ How to install OpenVINO™ C# API? @@ -81,10 +93,19 @@ export LD_LIBRARY_PATH=/home/ygj/Program/sample1/bin/Debug/net6.0/runtimes/ubunt sudo -E ./install_openvino_dependencies.sh ``` +## Mac OS + +Install the following package using the ``dotnet add package``command + +```shell +dotnet add package OpenVINO.CSharp.API +dotnet add package OpenVINO.runtime.macos-arm64 +``` + +## 🏷开始使用 + ## 🏷How to use OpenVINO™ C# API? -- **Quick start** - - [Deploying the Yolov8 full series model using OpenVINO™ C# API](demos/yolov8/README.md) - **Simple usage** If you don't know how to use it, simply understand the usage method through the following code. @@ -113,35 +134,12 @@ The classes and objects encapsulated in the project, such as Core, Model, Tensor ## 💻 Tutorial Examples -- [Using OpenVINO™ C# API to Deploy the Yolov8 Model on the AIxBoard](tutorial_examples/AlxBoard_deploy_yolov8/README.md) -- [Pedestrian fall detection - Deploying PP-Human based on OpenVINO C # API](tutorial_examples\PP-Human_Fall_Detection\README.md) -- [Deploying RT-DETR based on OpenVINO](https://github.com/guojin-yan/RT-DETR-OpenVINO) + ## 🗂 API Reference If you want to learn more information, you can refer to: [OpenVINO™ C# API API Documented](https://guojin-yan.github.io/OpenVINO-CSharp-API.docs/index.html) -## 🔃 Update log - -#### 🔥 **2023.10.22 :Update OpenVINO™ C# API ** - -- 🗳 **OpenVINO™ C# API :** - - Modify OpenVINO™ errors in the C # API, and integration of code sections to add exception handling mechanisms. -- 🛹**Application Cases:** - - Pedestrian fall detection - Deploying PP-Human based on OpenVINO C # API - - Deploying RT-DETR based on OpenVINO -- 🔮 **NuGet:** - - Abolish the previously released NuGet package, release updated installation packages, and release three types of NuGet packages, including **OpenVINO. CSharp. API **: core code package, **OpenVINO. CSharp. Windows **: Windows platform integration package, and **OpenVINO. runtime. win **: Windows platform runtime package. - -#### **2023.6.19 : release OpenVINO™ C# API 3.0** - -- 🗳OpenVINO™ C# API : - - Upgrade OpenVINO™ C# API 2.0 to OpenVINO™ C# API 3.0, changing from refactoring the C++API to directly reading OpenVino ™ The official C API makes the application more flexible and supports a richer range of functions. -- 🛹Application Cases: - - OpenVINO™ C# API Deployment Yolov8 Model Example。 -- 🔮NuGet: - - Create and publish NuGet package, release * * OpenVINO™ C# API. win 3.0.120 * *, including OpenVino 2023.0 dependencies. - ## 🎖 Contribute   If you are interested in OpenVINO ™ Interested in using C # and contributing to the open source community, welcome to join us and develop OpenVINO™ C# API together. diff --git a/modules/csharp_api/README_cn.md b/modules/csharp_api/README_cn.md index 8fc740a0f..010b9a034 100644 --- a/modules/csharp_api/README_cn.md +++ b/modules/csharp_api/README_cn.md @@ -5,16 +5,14 @@ - + - -简体中文| [English](README.md) - -## 这是OpenVINO™ C# API,该项目还在建设中,功能还未完善,如使用中有问题,欢迎与我沟通联系。如果对该项目感兴趣,也可以加入到我们的开发中来。🥰🥰🥰🥰 +

+简体中文| [English](README.md) -## 📚 简介 +# 📚 简介 [OpenVINO™ ](www.openvino.ai)是一个用于优化和部署 AI 推理的开源工具包。 @@ -24,16 +22,18 @@   该项目基于OpenVINO™工具套件推出了 OpenVINO™ C# API,旨在推动 OpenVINO™在C#领域的应用。OpenVINO™ C# API 由于是基于 OpenVINO™ 开发,所支持的平台与OpenVINO™ 一致,具体信息可以参考 OpenVINO™。 -## NuGet 包 +# NuGet Package -### Managed libraries +## Core Managed Libraries -| Package | Description | Link | -| --------------------------- | ------------------------------ | ------------------------------------------------------------ | -| **OpenVINO.CSharp.API** | OpenVINO C# API core libraries | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.API.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.API/) | -| **OpenVINO.CSharp.Windows** | All-in-one package for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.Windows.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.Windows/) | +| Package | Description | Link | +| ---------------------------------------------- | --------------------------------------------------------- | ------------------------------------------------------------ | +| **OpenVINO.CSharp.API** | OpenVINO C# API core libraries | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.API.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.API/) | +| **OpenVINO.CSharp.API.Extensions** | OpenVINO C# API core extensions libraries | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.API.Extensions.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.API.Extensions/) | +| **OpenVINO.CSharp.API.Extensions.OpenCvSharp** | OpenVINO C# API core extensions libraries use OpenCvSharp | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.API.Extensions.OpenCvSharp.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.API.Extensions.OpenCvSharp/) | +| **OpenVINO.CSharp.API.Extensions.EmguCV** | OpenVINO C# API core extensions libraries use EmguCV | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.API.Extensions.EmguCV.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.API.Extensions.EmguCV/) | -### Native bindings +## Native Runtime Libraries | Package | Description | Link | | ------------------------------------- | ------------------------------------ | ------------------------------------------------------------ | @@ -42,26 +42,35 @@ | **OpenVINO.runtime.ubuntu.20-x86_64** | Native bindings for ubuntu.20-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.ubuntu.20-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.ubuntu.20-x86_64/) | | **OpenVINO.runtime.ubuntu.18-x86_64** | Native bindings for ubuntu.18-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.ubuntu.18-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.ubuntu.18-x86_64/) | | **OpenVINO.runtime.debian9-arm64** | Native bindings for debian9-arm64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.win.svg)](https://www.nuget.org/packages/OpenVINO.runtime.win/) | +| **OpenVINO.runtime.debian9-armhf ** | Native bindings for debian9-armhf | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.debian9-armhf.svg)](https://www.nuget.org/packages/OpenVINO.runtime.debian9-armhf/) | | **OpenVINO.runtime.centos7-x86_64** | Native bindings for centos7-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.centos7-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.centos7-x86_64/) | +| **OpenVINO.runtime.rhel8-x86_64** | Native bindings for rhel8-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.rhel8-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.rhel8-x86_64/) | +| **OpenVINO.runtime.macos-x86_64** | Native bindings for macos-x86_64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.macos-x86_64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.macos-x86_64/) | +| **OpenVINO.runtime.macos-arm64** | Native bindings for macos-arm64 | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.runtime.macos-arm64.svg)](https://www.nuget.org/packages/OpenVINO.runtime.macos-arm64/) | +## Integration Library -## ⚙ 如何安装 +| Package | Description | Link | +| --------------------------- | ------------------------------ | ------------------------------------------------------------ | +| **OpenVINO.CSharp.Windows** | All-in-one package for Windows | [![NuGet Gallery ](https://badge.fury.io/nu/OpenVINO.CSharp.Windows.svg)](https://www.nuget.org/packages/OpenVINO.CSharp.Windows/) | + +# ⚙ 如何安装 以下提供了OpenVINO™ C# API在不同平台的安装方法,可以根据自己使用平台进行安装。 -### **Windows** +## Windows 通过``dotnet add package``指令安装或通过Visual Studio安装以下程序包 ```shell dotnet add package OpenVINO.CSharp.API dotnet add package OpenVINO.runtime.win -或者安装集成包=》 +或者安装集成包——> dotnet add package OpenVINO.CSharp.Windows ``` -### **Linux** +## Linux    **linux**平台我们根据官方编译的平台制作了对应的NuGet Package,以**ubuntu.22-x86_64**为例,通过``dotnet add package``指令安装: @@ -72,9 +81,9 @@ dotnet add package OpenVINO.runtime.ubuntu.22-x86_64   运行一次程序后,添加环境变量: -``` +```shell export LD_LIBRARY_PATH={Program generated executable file directory}/runtimes/ubuntu.22-x86_64/native -例如=》 +例如——> export LD_LIBRARY_PATH=/home/ygj/Program/sample1/bin/Debug/net6.0/runtimes/ubuntu.22-x86_64/native ``` @@ -84,11 +93,16 @@ export LD_LIBRARY_PATH=/home/ygj/Program/sample1/bin/Debug/net6.0/runtimes/ubunt sudo -E ./install_openvino_dependencies.sh ``` -## 🏷开始使用 +## Mac OS + +通过``dotnet add package``指令安装以下程序包 -- **快速体验** +```shell +dotnet add package OpenVINO.CSharp.API +dotnet add package OpenVINO.runtime.macos-arm64 +``` - [使用OpenVINO™ C# API部署Yolov8全系列模型](demos/yolov8/README_cn.md) +## 🏷开始使用 - **使用方法** @@ -118,35 +132,12 @@ namespace test ## 💻 应用案例 -- [爱克斯开发板使用OpenVINO™ C# API部署Yolov8模型](tutorial_examples/AlxBoard_deploy_yolov8/README_cn.md) -- [行人摔倒检测 — 基于 OpenVINO C# API 部署PP-Human](tutorial_examples\PP-Human_Fall_Detection\README_cn.md) -- [基于 OpenVINO 部署 RT-DETR](https://github.com/guojin-yan/RT-DETR-OpenVINO) + ## 🗂 API 文档 如果想了解更多信息,可以参阅:[OpenVINO™ C# API API Documented](https://guojin-yan.github.io/OpenVINO-CSharp-API.docs/index.html) -## 🔃 更新日志 - -#### 🔥 **2023.10.22 :更新OpenVINO™ C# API ** - -- 🗳 **OpenVINO™ C# API 库:** - - 修改OpenVINO™ C# API 中的错误,并对代码板块进行整合,添加异常处理机制。 -- 🛹**应用案例:** - - 行人摔倒检测 — 基于 OpenVINO C# API 部署PP-Human - - 基于 OpenVINO 部署 RT-DETR -- 🔮 **NuGet包:** - - 废除之前发布的NuGet包,发布更新新的安装包,发布三类NuGet包,包括**OpenVINO.CSharp.API**:核心代码包,**OpenVINO.CSharp.Windows**:Windows平台整合包、**OpenVINO.runtime.win**:Windows平台运行库包。 - -#### **2023.6.19 :发布 OpenVINO™ C# API 3.0** - -- 🗳 **OpenVINO™ C# API 库:** - - 升级OpenVINO™ C# API 2.0 到 OpenVINO™ C# API 3.0 版本,由原来的重构 C++ API 改为直接读取 OpenVINO™ 官方 C API,使得应用更加灵活,所支持的功能更加丰富。 -- 🛹**应用案例:** - - OpenVINO™ C# API部署Yolov8模型实例。 -- 🔮 **NuGet包:** - - 制作并发布NuGet包,发布**OpenVINO™ C# API.win 3.0.120** ,包含OpenVINO 2023.0 依赖项。 - ## 🎖 贡献   如果您对OpenVINO™ 在C#使用感兴趣,有兴趣对开源社区做出自己的贡献,欢迎加入我们,一起开发OpenVINO™ C# API。