JeVoisBase  1.20
JeVois Smart Embedded Machine Vision Toolkit Base Modules
Share this page:
PyCoralClassify.py
Go to the documentation of this file.
1 import pyjevois
2 if pyjevois.pro: import libjevoispro as jevois
3 else: import libjevois as jevois
4 import cv2 as cv
5 import numpy as np
6 from PIL import Image
7 from pycoral.utils import edgetpu
8 from pycoral.adapters import classify
9 from pycoral.adapters import common
10 from pycoral.utils.dataset import read_label_file
11 import time
12 
13 ## Object recognition using Coral Edge TPU
14 #
15 # This module runs an object classification deep neural network using the Coral TPU library. It only works on JeVois-Pro
16 # platform equipped with an Edge TPU add-on card. Classification (recognition) networks analyze a central portion of the
17 # whole scene and produce identity labels and confidence scores about what the object in the field of view might be.
18 #
19 # This module supports networks implemented in TensorFlow-Lite and ported to Edge TPU/
20 #
21 # Included with the standard JeVois distribution are:
22 #
23 # - MobileNetV3
24 # - more to come, please contribute!
25 #
26 # See the module's constructor (__init__) code and select a value for \b model to switch network.
27 #
28 # Object category names for models trained on ImageNet are at
29 # https://github.com/jevois/jevoisbase/blob/master/share/opencv-dnn/classification/synset_words.txt
30 #
31 # Sometimes it will make mistakes! The performance of SqueezeNet v1.1 is about 56.1% correct (mean average precision,
32 # top-1) on the ImageNet test set.
33 #
34 # This module is adapted from the sample code:
35 # https://github.com/google-coral/pycoral/blob/master/examples/classify_image.py
36 #
37 # More pre-trained models are available at https://coral.ai/models/
38 #
39 #
40 # @author Laurent Itti
41 #
42 # @videomapping YUYV 320 264 30.0 YUYV 320 240 30.0 JeVois PyClassificationDNN
43 # @email itti@usc.edu
44 # @address 880 W 1st St Suite 807, Los Angeles CA 90012, USA
45 # @copyright Copyright (C) 2020 by Laurent Itti
46 # @mainurl http://jevois.org
47 # @supporturl http://jevois.org
48 # @otherurl http://jevois.org
49 # @license GPL v3
50 # @distribution Unrestricted
51 # @restrictions None
52 # @ingroup modules
54  # ####################################################################################################
55  ## Constructor
56  def __init__(self):
58  jevois.LFATAL("A Google Coral EdgeTPU is required for this module (PCIe M.2 2230 A+E or USB)")
59 
60  self.threshold = 0.2 # Confidence threshold (0..1), higher for stricter confidence.
61  self.rgb = True # True if model expects RGB inputs, otherwise it expects BGR
62 
63  # Select one of the models:
64  self.model = 'MobileNetV3'
65 
66  # You should not have to edit anything beyond this point.
67  if (self.model == 'MobileNetV3'):
68  classnames = 'imagenet_labels.txt'
69  modelname = 'tf2_mobilenet_v3_edgetpu_1.0_224_ptq_edgetpu.tflite'
70 
71  # Load names of classes:
72  sdir = pyjevois.share + '/coral/classification/'
73  self.labels = read_label_file(sdir + classnames)
74 
75  # Load network:
76  self.interpreter = edgetpu.make_interpreter(sdir + modelname)
77  #self.interpreter = edgetpu.make_interpreter(*modelname.split('@'))
78  self.interpreter.allocate_tensors()
79  self.timer = jevois.Timer('Coral classification', 10, jevois.LOG_DEBUG)
80 
81  # ####################################################################################################
82  ## JeVois main processing function
83  def process(self, inframe, outframe):
84  frame = inframe.getCvRGB() if self.rgb else inframe.getCvBGR()
85  self.timer.start()
86 
87  h = frame.shape[0]
88  w = frame.shape[1]
89 
90  # Set the input:
91  size = common.input_size(self.interpreter)
92  image = Image.fromarray(frame).resize(size, Image.ANTIALIAS)
93  common.set_input(self.interpreter, image)
94 
95  # Run the model
96  start = time.perf_counter()
97  self.interpreter.invoke()
98  inference_time = time.perf_counter() - start
99 
100  # Get classes with high enough scores:
101  classes = classify.get_classes(self.interpreter, 1, self.threshold)
102 
103  # Create dark-gray (value 80) image for the bottom panel, 24 pixels tall and show top-1 class:
104  msgbox = np.zeros((24, w, 3), dtype = np.uint8) + 80
105  for c in classes:
106  rlabel = '%s: %.2f' % (self.labels.get(c.id, c.id), c.score)
107  cv.putText(msgbox, rlabel, (3, 15), cv.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv.LINE_AA)
108 
109  # Put efficiency information:
110  cv.putText(frame, 'JeVois Coral Classification - ' + self.model, (3, 15),
111  cv.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv.LINE_AA)
112 
113  fps = self.timer.stop()
114  label = fps + ', %dms' % (inference_time * 1000.0)
115  cv.putText(frame, label, (3, h-5), cv.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv.LINE_AA)
116 
117  # Stack bottom panel below main image:
118  frame = np.vstack((frame, msgbox))
119 
120  # Send output frame to host:
121  if self.rgb: outframe.sendCvRGB(frame)
122  else: outframe.sendCv(frame)
123 
124  # ###################################################################################################
125  ## Process function with GUI output
126  def processGUI(self, inframe, helper):
127  # Start a new display frame, gets its size and also whether mouse/keyboard are idle:
128  idle, winw, winh = helper.startFrame()
129 
130  # Draw full-resolution input frame from camera:
131  x, y, w, h = helper.drawInputFrame("c", inframe, False, False)
132 
133  # Get the next camera image at processing resolution (may block until it is captured):
134  frame = inframe.getCvRGBp() if self.rgb else inframe.getCvBGRp()
135 
136  # Start measuring image processing time:
137  self.timer.start()
138 
139  # Set the input:
140  size = common.input_size(self.interpreter)
141  image = Image.fromarray(frame).resize(size, Image.ANTIALIAS)
142  common.set_input(self.interpreter, image)
143 
144  # Run the model
145  start = time.perf_counter()
146  self.interpreter.invoke()
147  inference_time = time.perf_counter() - start
148 
149  # Get classes with high enough scores:
150  classes = classify.get_classes(self.interpreter, 1, self.threshold)
151 
152  # Put efficiency information:
153  helper.itext('JeVois-Pro Python Coral Classification - %s - %dms/inference' %
154  (self.model, inference_time * 1000.0), 0, -1)
155 
156  # Report top-scoring classes:
157  for c in classes:
158  rlabel = '%s: %.2f' % (self.labels.get(c.id, c.id), c.score)
159  helper.itext(rlabel, 0, -1)
160 
161  # Write frames/s info from our timer:
162  fps = self.timer.stop()
163  helper.iinfo(inframe, fps, winw, winh);
164 
165  # End of frame:
166  helper.endFrame()
PyCoralClassify.PyCoralClassify.rgb
rgb
Definition: PyCoralClassify.py:61
PyCoralClassify.PyCoralClassify.process
def process(self, inframe, outframe)
JeVois main processing function.
Definition: PyCoralClassify.py:83
PyCoralClassify.PyCoralClassify.threshold
threshold
Definition: PyCoralClassify.py:60
PyCoralClassify.PyCoralClassify.labels
labels
Definition: PyCoralClassify.py:73
jevois::getNumInstalledTPUs
size_t getNumInstalledTPUs()
PyCoralClassify.PyCoralClassify.__init__
def __init__(self)
Constructor.
Definition: PyCoralClassify.py:56
PyCoralClassify.PyCoralClassify.processGUI
def processGUI(self, inframe, helper)
Process function with GUI output.
Definition: PyCoralClassify.py:126
PyCoralClassify.PyCoralClassify.timer
timer
Definition: PyCoralClassify.py:79
PyCoralClassify.PyCoralClassify.interpreter
interpreter
Definition: PyCoralClassify.py:76
PyCoralClassify.PyCoralClassify
Object recognition using Coral Edge TPU.
Definition: PyCoralClassify.py:53
PyCoralClassify.PyCoralClassify.model
model
Definition: PyCoralClassify.py:64
jevois::Timer