JeVoisBase  1.20
JeVois Smart Embedded Machine Vision Toolkit Base Modules
Share this page:
PyCoralSegment.py
Go to the documentation of this file.
1 import pyjevois
2 if pyjevois.pro: import libjevoispro as jevois
3 else: import libjevois as jevois
4 import cv2 as cv
5 import numpy as np
6 from PIL import Image
7 from pycoral.utils import edgetpu
8 from pycoral.adapters import common
9 from pycoral.adapters import segment
10 import time
11 
12 ## Semantic segmentation using Coral Edge TPU
13 #
14 # More pre-trained models are available at https://coral.ai/models/
15 #
16 #
17 # @author Laurent Itti
18 #
19 # @videomapping YUYV 320 264 30.0 YUYV 320 240 30.0 JeVois PyCoralSegment
20 # @videomapping JVUI 0 0 30.0 CropScale=RGB24@512x288:YUYV 1920 1080 30.0 JeVois PyCoralSegment
21 # @email itti@usc.edu
22 # @address 880 W 1st St Suite 807, Los Angeles CA 90012, USA
23 # @copyright Copyright (C) 2020 by Laurent Itti
24 # @mainurl http://jevois.org
25 # @supporturl http://jevois.org
26 # @otherurl http://jevois.org
27 # @license GPL v3
28 # @distribution Unrestricted
29 # @restrictions None
30 # @ingroup modules
32  # ####################################################################################################
33  ## Constructor
34  def __init__(self):
36  jevois.LFATAL("A Google Coral EdgeTPU is required for this module (PCIe M.2 2230 A+E or USB)")
37 
38  self.rgb = True # True if model expects RGB inputs, otherwise it expects BGR
39  self.keepaspect = True # Keep aspect ratio using zero padding
40  alpha = 128 # Transparency alpha values for processGUI, higher is less transparent
41  tidx = 0 # Class index of transparent background
42 
43  # Select one of the models:
44  self.model = 'UNet128' # expects 128x128
45  #self.model = 'MobileNetV2DeepLabV3' # expects 513x513
46 
47  # You should not have to edit anything beyond this point.
48  if (self.model == 'MobileNetV2DeepLabV3'):
49  modelname = 'deeplabv3_mnv2_dm05_pascal_quant_edgetpu.tflite'
50  elif (self.model == 'UNet128'):
51  modelname = 'keras_post_training_unet_mv2_128_quant_edgetpu.tflite'
52  tidx = 1
53 
54  # Load network:
55  sdir = pyjevois.share + '/coral/segmentation/'
56  self.interpreter = edgetpu.make_interpreter(sdir + modelname)
57  #self.interpreter = edgetpu.make_interpreter(*modelname.split('@'))
58  self.interpreter.allocate_tensors()
59  self.timer = jevois.Timer('Coral segmentation', 10, jevois.LOG_DEBUG)
61  self.cmapRGBA = self.create_pascal_label_colormapRGBA(alpha, tidx)
62 
63  # ####################################################################################################
65  """Creates a label colormap used in PASCAL VOC segmentation benchmark.
66  Returns:
67  A Colormap for visualizing segmentation results.
68  """
69  colormap = np.zeros((256, 3), dtype=int)
70  indices = np.arange(256, dtype=int)
71 
72  for shift in reversed(range(8)):
73  for channel in range(3):
74  colormap[:, channel] |= ((indices >> channel) & 1) << shift
75  indices >>= 3
76 
77  return colormap.astype(np.uint8)
78 
79  # ####################################################################################################
80  def create_pascal_label_colormapRGBA(self, alpha, tidx):
81  """Creates a label colormap used in PASCAL VOC segmentation benchmark.
82  Returns:
83  A Colormap for visualizing segmentation results.
84  """
85  colormap = np.zeros((256, 4), dtype=int)
86  indices = np.arange(256, dtype=int)
87 
88  for shift in reversed(range(8)):
89  for channel in range(3):
90  colormap[:, channel] |= ((indices >> channel) & 1) << shift
91  indices >>= 3
92 
93  colormap[:, 3] = alpha
94  colormap[tidx, 3] = 0 # force fully transparent for entry tidx
95  return colormap.astype(np.uint8)
96 
97  # ####################################################################################################
98  ## JeVois main processing function
99  def process(self, inframe, outframe):
100  frame = inframe.getCvRGB() if self.rgb else inframe.getCvBGR()
101  self.timer.start()
102 
103  h = frame.shape[0]
104  w = frame.shape[1]
105 
106  # Set the input:
107  width, height = common.input_size(self.interpreter)
108  img = Image.fromarray(frame);
109  if self.keepaspect:
110  resized_img, _ = common.set_resized_input(self.interpreter, img.size,
111  lambda size: img.resize(size, Image.ANTIALIAS))
112  else:
113  resized_img = img.resize((width, height), Image.ANTIALIAS)
114  common.set_input(self.interpreter, resized_img)
115 
116  # Run the model
117  start = time.perf_counter()
118  self.interpreter.invoke()
119  inference_time = time.perf_counter() - start
120 
121  # Draw segmentation results:
122  result = segment.get_output(self.interpreter)
123  if len(result.shape) == 3: result = np.argmax(result, axis=-1)
124 
125  # If keep_aspect_ratio, we need to remove the padding area.
126  new_width, new_height = resized_img.size
127  result = result[:new_height, :new_width]
128  mask_img = Image.fromarray(self.cmapRGB[result])
129 
130  # Concat resized input image and processed segmentation results.
131  output_img = Image.new('RGB', (2 * img.width, img.height))
132  output_img.paste(img, (0, 0))
133  output_img.paste(mask_img.resize(img.size), (img.width, 0))
134 
135  # Back to opencv:
136  outcv = np.array(output_img)
137 
138  # Put efficiency information.
139  cv.putText(outcv, 'JeVois Coral Segmentation - ' + self.model, (3, 15),
140  cv.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv.LINE_AA)
141 
142  fps = self.timer.stop()
143  label = fps + ', %dms' % (inference_time * 1000.0)
144  cv.putText(outcv, label, (3, h-5), cv.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv.LINE_AA)
145 
146  # Send output frame to host:
147  if self.rgb: outframe.sendCvRGB(outcv)
148  else: outframe.sendCv(outcv)
149 
150  # ###################################################################################################
151  ## Process function with GUI output
152  def processGUI(self, inframe, helper):
153  # Start a new display frame, gets its size and also whether mouse/keyboard are idle:
154  idle, winw, winh = helper.startFrame()
155 
156  # Draw full-resolution input frame from camera:
157  x, y, w, h = helper.drawInputFrame("c", inframe, False, False)
158 
159  # Get the next camera image at processing resolution (may block until it is captured):
160  frame = inframe.getCvRGBp() if self.rgb else inframe.getCvBGRp()
161  iw, ih = frame.shape[1], frame.shape[0]
162 
163  # Start measuring image processing time:
164  self.timer.start()
165 
166  # Set the input:
167  width, height = common.input_size(self.interpreter)
168  img = Image.fromarray(frame);
169  if self.keepaspect:
170  resized_img, _ = common.set_resized_input(self.interpreter, img.size,
171  lambda size: img.resize(size, Image.ANTIALIAS))
172  else:
173  resized_img = img.resize((width, height), Image.ANTIALIAS)
174  common.set_input(self.interpreter, resized_img)
175 
176  # Run the model:
177  start = time.perf_counter()
178  self.interpreter.invoke()
179  inference_time = time.perf_counter() - start
180 
181  # Draw segmentation results:
182  result = segment.get_output(self.interpreter)
183  if len(result.shape) == 3: result = np.argmax(result, axis=-1)
184 
185  # If keep_aspect_ratio, we need to remove the padding area:
186  new_width, new_height = resized_img.size
187  result = result[:new_height, :new_width]
188  mask = self.cmapRGBA[result]
189 
190  # Draw the mask on top of our image, OpenGL will do the alpha blending:
191  helper.drawImage("m", mask, self.rgb, False, True)
192 
193  # Put efficiency information:
194  helper.itext('JeVois-Pro Python Coral Segmentation - %s - %dms/inference' %
195  (self.model, inference_time * 1000.0), 0, -1)
196 
197  # Write frames/s info from our timer:
198  fps = self.timer.stop()
199  helper.iinfo(inframe, fps, winw, winh);
200 
201  # End of frame:
202  helper.endFrame()
203 
PyCoralSegment.PyCoralSegment.process
def process(self, inframe, outframe)
JeVois main processing function.
Definition: PyCoralSegment.py:99
PyCoralSegment.PyCoralSegment.cmapRGB
cmapRGB
Definition: PyCoralSegment.py:60
PyCoralSegment.PyCoralSegment.create_pascal_label_colormap
def create_pascal_label_colormap(self)
Definition: PyCoralSegment.py:64
PyCoralSegment.PyCoralSegment.rgb
rgb
Definition: PyCoralSegment.py:38
PyCoralSegment.PyCoralSegment.model
model
Definition: PyCoralSegment.py:44
PyCoralSegment.PyCoralSegment.interpreter
interpreter
Definition: PyCoralSegment.py:56
jevois::getNumInstalledTPUs
size_t getNumInstalledTPUs()
PyCoralSegment.PyCoralSegment.cmapRGBA
cmapRGBA
Definition: PyCoralSegment.py:61
PyCoralSegment.PyCoralSegment.keepaspect
keepaspect
Definition: PyCoralSegment.py:39
PyCoralSegment.PyCoralSegment
Semantic segmentation using Coral Edge TPU.
Definition: PyCoralSegment.py:31
PyCoralSegment.PyCoralSegment.create_pascal_label_colormapRGBA
def create_pascal_label_colormapRGBA(self, alpha, tidx)
Definition: PyCoralSegment.py:80
PyCoralSegment.PyCoralSegment.__init__
def __init__(self)
Constructor.
Definition: PyCoralSegment.py:34
PyCoralSegment.PyCoralSegment.timer
timer
Definition: PyCoralSegment.py:59
PyCoralSegment.PyCoralSegment.processGUI
def processGUI(self, inframe, helper)
Process function with GUI output.
Definition: PyCoralSegment.py:152
jevois::Timer