26#include <tensorflow/lite/model.h>
27#include <tensorflow/lite/interpreter.h>
35 "directory within JEVOIS:/share/tensorflow/ which should contain two files: "
36 "model.tflite and labels.txt",
37 "mobilenet_v1_224_android_quant_2017_11_08", ParamCateg);
41 "If empty, use the module's path.",
42 JEVOIS_SHARE_PATH
"/tensorflow", ParamCateg);
74 public jevois::Parameter<tflow::netdir, tflow::dataroot, tflow::top, tflow::thresh, tflow::threads,
96 float predict(cv::Mat
const & cvimg, std::vector<jevois::ObjReco> & results);
105 std::unique_ptr<tflite::FlatBufferModel>
model;
109 void onParamChange(tflow::netdir
const & param, std::string
const & newval)
override;
110 void onParamChange(tflow::dataroot
const & param, std::string
const & newval)
override;
115 void get_top_n(T * prediction,
int prediction_size, std::vector<jevois::ObjReco> & top_results,
116 bool input_floating);
124 int Report(
char const * format, va_list args)
override;
Identify an object using TensorFlow deep neural network.
std::atomic< bool > itsReady
virtual ~TensorFlow()
Virtual destructor for safe inheritance.
JEVOIS_DECLARE_PARAMETER(top, unsigned int, "Max number of top-scoring predictions that score above thresh to return", 5, ParamCateg)
Parameter.
void readLabelsFile(std::string const &fname)
JeVoisReporter itsErrorReporter
float predict(cv::Mat const &cvimg, std::vector< jevois::ObjReco > &results)
Processing function, results are stored internally in the underlying TensorFlow network object.
void onParamChange(tflow::netdir const ¶m, std::string const &newval) override
JEVOIS_DECLARE_PARAMETER_WITH_CALLBACK(dataroot, std::string, "Root path for data, config, and weight files. " "If empty, use the module's path.", JEVOIS_SHARE_PATH "/tensorflow", ParamCateg)
Parameter.
JEVOIS_DECLARE_PARAMETER(threads, int, "Number of parallel computation threads, or 0 for auto", 4, jevois::Range< int >(0, 1024), ParamCateg)
Parameter.
std::unique_ptr< tflite::Interpreter > interpreter
JEVOIS_DECLARE_PARAMETER(scorescale, float, "Scaling factors applied to recognition scores, useful for InceptionV3", 1.0F, ParamCateg)
Parameter.
void postInit() override
Initialize, configure and load the network in a thread.
JEVOIS_DECLARE_PARAMETER_WITH_CALLBACK(netdir, std::string, "Network to load. This should be the name of a " "directory within JEVOIS:/share/tensorflow/ which should contain two files: " "model.tflite and labels.txt", "mobilenet_v1_224_android_quant_2017_11_08", ParamCateg)
Parameter.
std::future< void > itsReadyFut
std::atomic< bool > itsNeedReload
void get_top_n(T *prediction, int prediction_size, std::vector< jevois::ObjReco > &top_results, bool input_floating)
void postUninit() override
Un-initialize and free resources.
void getInDims(int &w, int &h, int &c)
Get input width, height, channels.
std::vector< std::string > labels
JEVOIS_DECLARE_PARAMETER(thresh, float, "Threshold (in percent confidence) above which predictions will be reported", 20.0F, jevois::Range< float >(0.0F, 100.0F), ParamCateg)
Parameter.
std::unique_ptr< tflite::FlatBufferModel > model
int Report(char const *format, va_list args) override