JeVoisBase  1.18
JeVois Smart Embedded Machine Vision Toolkit Base Modules
Share this page:
PyCoralClassify.py
Go to the documentation of this file.
1 import pyjevois
2 if pyjevois.pro: import libjevoispro as jevois
3 else: import libjevois as jevois
4 import cv2 as cv
5 import numpy as np
6 from PIL import Image
7 from pycoral.adapters import classify
8 from pycoral.adapters import common
9 from pycoral.utils.dataset import read_label_file
10 from pycoral.utils.edgetpu import make_interpreter
11 import time
12 
13 ## Object recognition using Coral Edge TPU
14 #
15 # This module runs an object classification deep neural network using the Coral TPU library. It only works on JeVois-Pro
16 # platform equipped with an Edge TPU add-on card. Classification (recognition) networks analyze a central portion of the
17 # whole scene and produce identity labels and confidence scores about what the object in the field of view might be.
18 #
19 # This module supports networks implemented in TensorFlow-Lite and ported to Edge TPU/
20 #
21 # Included with the standard JeVois distribution are:
22 #
23 # - MobileNetV3
24 # - more to come, please contribute!
25 #
26 # See the module's constructor (__init__) code and select a value for \b model to switch network.
27 #
28 # Object category names for models trained on ImageNet are at
29 # https://github.com/jevois/jevoisbase/blob/master/share/opencv-dnn/classification/synset_words.txt
30 #
31 # Sometimes it will make mistakes! The performance of SqueezeNet v1.1 is about 56.1% correct (mean average precision,
32 # top-1) on the ImageNet test set.
33 #
34 # This module is adapted from the sample code:
35 # https://github.com/google-coral/pycoral/blob/master/examples/classify_image.py
36 #
37 # More pre-trained models are available at https://coral.ai/models/
38 #
39 #
40 # @author Laurent Itti
41 #
42 # @videomapping YUYV 320 264 30.0 YUYV 320 240 30.0 JeVois PyClassificationDNN
43 # @email itti@usc.edu
44 # @address 880 W 1st St Suite 807, Los Angeles CA 90012, USA
45 # @copyright Copyright (C) 2020 by Laurent Itti
46 # @mainurl http://jevois.org
47 # @supporturl http://jevois.org
48 # @otherurl http://jevois.org
49 # @license GPL v3
50 # @distribution Unrestricted
51 # @restrictions None
52 # @ingroup modules
54  # ####################################################################################################
55  ## Constructor
56  def __init__(self):
57  self.threshold = 0.2 # Confidence threshold (0..1), higher for stricter confidence.
58  self.rgb = True # True if model expects RGB inputs, otherwise it expects BGR
59 
60  # Select one of the models:
61  self.model = 'MobileNetV3'
62 
63  # You should not have to edit anything beyond this point.
64  if (self.model == 'MobileNetV3'):
65  classnames = 'imagenet_labels.txt'
66  modelname = 'tf2_mobilenet_v3_edgetpu_1.0_224_ptq_edgetpu.tflite'
67 
68  # Load names of classes:
69  sdir = pyjevois.share + '/coral/classification/'
70  self.labels = read_label_file(sdir + classnames)
71 
72  # Load network:
73  self.interpreter = make_interpreter(sdir + modelname)
74  #self.interpreter = make_interpreter(*modelname.split('@'))
75  self.interpreter.allocate_tensors()
76  self.timer = jevois.Timer('Coral classification', 10, jevois.LOG_DEBUG)
77 
78  # ####################################################################################################
79  ## JeVois main processing function
80  def process(self, inframe, outframe):
81  frame = inframe.getCvRGB() if self.rgb else inframe.getCvBGR()
82  self.timer.start()
83 
84  h = frame.shape[0]
85  w = frame.shape[1]
86 
87  # Set the input:
88  size = common.input_size(self.interpreter)
89  image = Image.fromarray(frame).resize(size, Image.ANTIALIAS)
90  common.set_input(self.interpreter, image)
91 
92  # Run the model
93  start = time.perf_counter()
94  self.interpreter.invoke()
95  inference_time = time.perf_counter() - start
96 
97  # Get classes with high enough scores:
98  classes = classify.get_classes(self.interpreter, 1, self.threshold)
99 
100  # Create dark-gray (value 80) image for the bottom panel, 24 pixels tall and show top-1 class:
101  msgbox = np.zeros((24, w, 3), dtype = np.uint8) + 80
102  for c in classes:
103  rlabel = '%s: %.2f' % (self.labels.get(c.id, c.id), c.score)
104  cv.putText(msgbox, rlabel, (3, 15), cv.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv.LINE_AA)
105 
106  # Put efficiency information:
107  cv.putText(frame, 'JeVois Coral Classification - ' + self.model, (3, 15),
108  cv.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv.LINE_AA)
109 
110  fps = self.timer.stop()
111  label = fps + ', %dms' % (inference_time * 1000.0)
112  cv.putText(frame, label, (3, h-5), cv.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv.LINE_AA)
113 
114  # Stack bottom panel below main image:
115  frame = np.vstack((frame, msgbox))
116 
117  # Send output frame to host:
118  if self.rgb: outframe.sendCvRGB(frame)
119  else: outframe.sendCv(frame)
120 
121  # ###################################################################################################
122  ## Process function with GUI output
123  def processGUI(self, inframe, helper):
124  # Start a new display frame, gets its size and also whether mouse/keyboard are idle:
125  idle, winw, winh = helper.startFrame()
126 
127  # Draw full-resolution input frame from camera:
128  x, y, w, h = helper.drawInputFrame("c", inframe, False, False)
129 
130  # Get the next camera image at processing resolution (may block until it is captured):
131  frame = inframe.getCvRGBp() if self.rgb else inframe.getCvBGRp()
132 
133  # Start measuring image processing time:
134  self.timer.start()
135 
136  # Set the input:
137  size = common.input_size(self.interpreter)
138  image = Image.fromarray(frame).resize(size, Image.ANTIALIAS)
139  common.set_input(self.interpreter, image)
140 
141  # Run the model
142  start = time.perf_counter()
143  self.interpreter.invoke()
144  inference_time = time.perf_counter() - start
145 
146  # Get classes with high enough scores:
147  classes = classify.get_classes(self.interpreter, 1, self.threshold)
148 
149  # Put efficiency information:
150  helper.itext('JeVois-Pro Python Coral Classification - %s - %dms/inference' %
151  (self.model, inference_time * 1000.0), 0, -1)
152 
153  # Report top-scoring classes:
154  for c in classes:
155  rlabel = '%s: %.2f' % (self.labels.get(c.id, c.id), c.score)
156  helper.itext(rlabel, 0, -1)
157 
158  # Write frames/s info from our timer:
159  fps = self.timer.stop()
160  helper.iinfo(inframe, fps, winw, winh);
161 
162  # End of frame:
163  helper.endFrame()
PyCoralClassify.PyCoralClassify.rgb
rgb
Definition: PyCoralClassify.py:58
PyCoralClassify.PyCoralClassify.process
def process(self, inframe, outframe)
JeVois main processing function.
Definition: PyCoralClassify.py:80
PyCoralClassify.PyCoralClassify.threshold
threshold
Definition: PyCoralClassify.py:57
PyCoralClassify.PyCoralClassify.labels
labels
Definition: PyCoralClassify.py:70
PyCoralClassify.PyCoralClassify.__init__
def __init__(self)
Constructor.
Definition: PyCoralClassify.py:56
PyCoralClassify.PyCoralClassify.processGUI
def processGUI(self, inframe, helper)
Process function with GUI output.
Definition: PyCoralClassify.py:123
PyCoralClassify.PyCoralClassify.timer
timer
Definition: PyCoralClassify.py:76
PyCoralClassify.PyCoralClassify.interpreter
interpreter
Definition: PyCoralClassify.py:73
PyCoralClassify.PyCoralClassify
Object recognition using Coral Edge TPU.
Definition: PyCoralClassify.py:53
PyCoralClassify.PyCoralClassify.model
model
Definition: PyCoralClassify.py:61
jevois::Timer