JeVois  1.21
JeVois Smart Embedded Machine Vision Toolkit
Share this page:
Loading...
Searching...
No Matches
NetworkONNX.C
Go to the documentation of this file.
1// ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
2//
3// JeVois Smart Embedded Machine Vision Toolkit - Copyright (C) 2021 by Laurent Itti, the University of Southern
4// California (USC), and iLab at USC. See http://iLab.usc.edu and http://jevois.org for information about this project.
5//
6// This file is part of the JeVois Smart Embedded Machine Vision Toolkit. This program is free software; you can
7// redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software
8// Foundation, version 2. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
9// without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
10// License for more details. You should have received a copy of the GNU General Public License along with this program;
11// if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
12//
13// Contact information: Laurent Itti - 3641 Watt Way, HNB-07A - Los Angeles, CA 90089-2520 - USA.
14// Tel: +1 213 740 3527 - itti@pollux.usc.edu - http://iLab.usc.edu - http://jevois.org
15// ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
16/*! \file */
17
18#ifdef JEVOIS_PRO
19
21#include <jevois/DNN/Utils.H>
22#include <jevois/Util/Utils.H>
23
24// ####################################################################################################
25jevois::dnn::NetworkONNX::NetworkONNX(std::string const & instance) :
26 jevois::dnn::Network(instance),
27 itsEnv(ORT_LOGGING_LEVEL_WARNING, "NetworkONNX")
28{
29 itsSessionOptions.SetIntraOpNumThreads(4);
30 itsSessionOptions.SetGraphOptimizationLevel(GraphOptimizationLevel::ORT_ENABLE_EXTENDED);
31}
32
33// ####################################################################################################
35{ waitBeforeDestroy(); }
36
37// ####################################################################################################
39{
40 dataroot::freeze(doit);
41 model::freeze(doit);
42 jevois::dnn::Network::freeze(doit); // base class parameters
43}
44
45// ####################################################################################################
46std::vector<vsi_nn_tensor_attr_t> jevois::dnn::NetworkONNX::inputShapes()
47{
48 if (ready() == false) LFATAL("Network is not ready");
49 return itsInAttrs;
50}
51
52// ####################################################################################################
53std::vector<vsi_nn_tensor_attr_t> jevois::dnn::NetworkONNX::outputShapes()
54{
55 if (ready() == false) LFATAL("Network is not ready");
56 return itsOutAttrs;
57}
58
59// ####################################################################################################
61{
62 // Need to nuke the network first if it exists or we could run out of RAM:
63 if (! itsSession) itsSession.reset();
64
65 std::string const m = jevois::absolutePath(dataroot::get(), model::get());
66 LINFO("Loading " << m << " ...");
67
68 // Create and load the network:
69 itsSession.reset(new Ort::Session(itsEnv, m.c_str(), itsSessionOptions));
70 itsInAttrs.clear();
71 itsOutAttrs.clear();
72 itsInNamePtrs.clear();
73 itsInNames.clear();
74 itsOutNamePtrs.clear();
75 itsOutNames.clear();
76
77 // Print information about inputs:
78 size_t const num_input_nodes = itsSession->GetInputCount();
79 Ort::AllocatorWithDefaultOptions allocator;
80 LINFO("Network has " << num_input_nodes << " inputs:");
81 for (size_t i = 0; i < num_input_nodes; ++i)
82 {
83 Ort::AllocatedStringPtr input_name = itsSession->GetInputNameAllocated(i, allocator);
84 Ort::TypeInfo const type_info = itsSession->GetInputTypeInfo(i);
85 Ort::ConstTensorTypeAndShapeInfo const tensor_info = type_info.GetTensorTypeAndShapeInfo();
86 LINFO("- Input " << i << " [" << input_name.get() << "]: " << jevois::dnn::shapestr(tensor_info));
87 itsInAttrs.emplace_back(jevois::dnn::tensorattr(tensor_info));
88 itsInNames.emplace_back(input_name.get());
89 itsInNamePtrs.emplace_back(std::move(input_name));
90 }
91
92 // Print information about outputs:
93 size_t const num_output_nodes = itsSession->GetOutputCount();
94 LINFO("Network has " << num_output_nodes << " outputs:");
95 for (size_t i = 0; i < num_output_nodes; ++i)
96 {
97 Ort::AllocatedStringPtr output_name = itsSession->GetOutputNameAllocated(i, allocator);
98 Ort::TypeInfo const type_info = itsSession->GetOutputTypeInfo(i);
99 Ort::ConstTensorTypeAndShapeInfo const tensor_info = type_info.GetTensorTypeAndShapeInfo();
100 LINFO("- Output " << i << " [" << output_name.get() << "]: " << jevois::dnn::shapestr(tensor_info));
101 itsOutAttrs.emplace_back(jevois::dnn::tensorattr(tensor_info));
102 itsOutNames.emplace_back(output_name.get());
103 itsOutNamePtrs.emplace_back(std::move(output_name));
104 }
105 LINFO("Network " << m << " ready.");
106}
107
108// ####################################################################################################
109std::vector<cv::Mat> jevois::dnn::NetworkONNX::doprocess(std::vector<cv::Mat> const & blobs,
110 std::vector<std::string> & info)
111{
112 if (! itsSession) LFATAL("Internal inconsistency");
113
114 if (blobs.size() != itsInAttrs.size())
115 LFATAL("Received " << blobs.size() << " inputs but network wants " << itsInAttrs.size());
116
117 // Create input tensor objects from input blobs:
118 std::vector<Ort::Value> inputs;
119 std::vector<char const *> input_node_names;
120 for (size_t i = 0; i < itsInAttrs.size(); ++i)
121 {
122 vsi_nn_tensor_attr_t const & attr = itsInAttrs[i];
123 cv::Mat const & m = blobs[i];
124
125 if (jevois::dnn::vsi2cv(attr.dtype.vx_type) != m.type())
126 LFATAL("Input " << i << " has type " << jevois::cvtypestr(m.type()) <<
127 " but network wants " << jevois::dnn::attrstr(attr));
128
129 std::vector<int64_t> dims; size_t sz = jevois::cvBytesPerPix(m.type());
130 for (size_t k = 0; k < attr.dim_num; ++k)
131 {
132 dims.emplace_back(attr.size[attr.dim_num - 1 - k]);
133 sz *= attr.size[attr.dim_num - 1 - k];
134 }
135
136 if (sz != m.total() * m.elemSize())
137 LFATAL("Input " << i << " size mismatch: got " << jevois::dnn::shapestr(m) <<
138 " but network wants " << jevois::dnn::shapestr(attr));
139
140 Ort::MemoryInfo meminfo = Ort::MemoryInfo::CreateCpu(OrtArenaAllocator, OrtMemTypeDefault);
141
142 switch (attr.dtype.vx_type)
143 {
144 case VSI_NN_TYPE_FLOAT32:
145 inputs.emplace_back(Ort::Value::CreateTensor<float>(meminfo, reinterpret_cast<float *>(m.data),
146 sz, dims.data(), dims.size()));
147 break;
148
149 case VSI_NN_TYPE_UINT8:
150 inputs.emplace_back(Ort::Value::CreateTensor<uint8_t>(meminfo, reinterpret_cast<uint8_t *>(m.data),
151 sz, dims.data(), dims.size()));
152 break;
153
154 case VSI_NN_TYPE_INT8:
155 inputs.emplace_back(Ort::Value::CreateTensor<int8_t>(meminfo, reinterpret_cast<int8_t *>(m.data),
156 sz, dims.data(), dims.size()));
157 break;
158
159 case VSI_NN_TYPE_UINT32:
160 inputs.emplace_back(Ort::Value::CreateTensor<uint32_t>(meminfo, reinterpret_cast<uint32_t *>(m.data),
161 sz, dims.data(), dims.size()));
162 break;
163
164 case VSI_NN_TYPE_INT32:
165 inputs.emplace_back(Ort::Value::CreateTensor<int32_t>(meminfo, reinterpret_cast<int32_t *>(m.data),
166 sz, dims.data(), dims.size()));
167 break;
168
169 default:
170 LFATAL("Sorry, input tensor type " << jevois::dnn::attrstr(attr) << " is not yet supported...");
171 }
172 if (inputs.back().IsTensor() == false) LFATAL("Failed to create tensor for input " << i);
173 }
174
175 // Run inference:
176 itsOutputs = itsSession->Run(Ort::RunOptions{nullptr}, itsInNames.data(), inputs.data(), inputs.size(),
177 itsOutNames.data(), itsOutNames.size());
178 if (itsOutputs.size() != itsOutNames.size())
179 LFATAL("Received " << itsOutputs.size() << " outputs but network should produce " << itsOutNames.size());
180
181 // Convert output tensors to cv::Mat with zero-copy:
182 std::vector<cv::Mat> outs;
183 for (size_t i = 0; i < itsOutputs.size(); ++i)
184 {
185 Ort::Value & out = itsOutputs[i];
186 vsi_nn_tensor_attr_t const & attr = itsOutAttrs[i];
187 if (out.IsTensor() == false) LFATAL("Network produced a non-tensor output " << i);
188
189 switch (attr.dtype.vx_type)
190 {
191 case VSI_NN_TYPE_FLOAT32:
192 outs.emplace_back(cv::Mat(jevois::dnn::attrmat(itsOutAttrs[i], out.GetTensorMutableData<float>())));
193 break;
194
195 case VSI_NN_TYPE_UINT8:
196 outs.emplace_back(cv::Mat(jevois::dnn::attrmat(itsOutAttrs[i], out.GetTensorMutableData<uint8_t>())));
197 break;
198
199 case VSI_NN_TYPE_INT8:
200 outs.emplace_back(cv::Mat(jevois::dnn::attrmat(itsOutAttrs[i], out.GetTensorMutableData<int8_t>())));
201 break;
202
203 case VSI_NN_TYPE_UINT32:
204 outs.emplace_back(cv::Mat(jevois::dnn::attrmat(itsOutAttrs[i], out.GetTensorMutableData<uint32_t>())));
205 break;
206
207 case VSI_NN_TYPE_INT32:
208 outs.emplace_back(cv::Mat(jevois::dnn::attrmat(itsOutAttrs[i], out.GetTensorMutableData<int32_t>())));
209 break;
210
211 default:
212 LFATAL("Sorry, output tensor type " << jevois::dnn::attrstr(attr) << " is not yet supported...");
213 }
214 }
215
216 info.emplace_back("Forward Network OK");
217
218 return outs;
219}
220
221#endif // JEVOIS_PRO
std::vector< cv::Mat > doprocess(std::vector< cv::Mat > const &blobs, std::vector< std::string > &info) override
Process input blobs and obtain output blobs.
void freeze(bool doit) override
Freeze/unfreeze parameters that users should not change while running.
Definition NetworkONNX.C:38
virtual ~NetworkONNX()
Destructor.
Definition NetworkONNX.C:34
void load() override
Load from disk.
Definition NetworkONNX.C:60
NetworkONNX(std::string const &instance)
Inherited constructor ok.
Definition NetworkONNX.C:25
virtual std::vector< vsi_nn_tensor_attr_t > inputShapes() override
Get shapes of all input tensors.
Definition NetworkONNX.C:46
virtual std::vector< vsi_nn_tensor_attr_t > outputShapes() override
Get shapes of all output tensors.
Definition NetworkONNX.C:53
Abstract class to represent a neural network.
Definition Network.H:162
virtual void freeze(bool doit)
Freeze/unfreeze parameters that users should not change while running.
Definition Network.C:27
#define LFATAL(msg)
Convenience macro for users to print out console or syslog messages, FATAL level.
Definition Log.H:230
#define LINFO(msg)
Convenience macro for users to print out console or syslog messages, INFO level.
Definition Log.H:194
int vsi2cv(vsi_nn_type_e t)
Convert from NPU data type to OpenCV.
Definition Utils.C:308
vsi_nn_tensor_attr_t tensorattr(TfLiteTensor const *t)
Get tensor shape and type attributes for a TensorFlow Lite tensor.
Definition Utils.C:563
std::string attrstr(vsi_nn_tensor_attr_t const &attr)
Get a string describing the specs of a tensor, including quantification specs (not provided by shapes...
Definition Utils.C:512
cv::Mat attrmat(vsi_nn_tensor_attr_t const &attr, void *dataptr=nullptr)
Construct a cv::Mat from attr and possibly data pointer.
Definition Utils.C:471
std::string shapestr(cv::Mat const &m)
Get a string of the form: "nD AxBxC... TYPE" from an n-dimensional cv::Mat with data type TYPE.
Definition Utils.C:105
unsigned int cvBytesPerPix(unsigned int cvtype)
Return the number of bytes per pixel for a given OpenCV pixel type.
Definition Utils.C:89
std::string cvtypestr(unsigned int cvtype)
Convert cv::Mat::type() code to to a string (e.g., CV_8UC1, CV_32SC3, etc)
Definition Utils.C:58
std::filesystem::path absolutePath(std::filesystem::path const &root, std::filesystem::path const &path)
Compute an absolute path from two paths.
Definition Utils.C:385
Main namespace for all JeVois classes and functions.
Definition Concepts.dox:2