Python 插入Arduino串行通信时,YOLOV2变慢
我从事目标检测和跟踪系统已有一段时间了。我曾尝试在检测到有人根据分辨率范围的宽度确定边界框的坐标时点亮LED。到目前为止,当我没有插入串行通信功能时,FPS大约是30。但当我插入串行通信时,fps在7-10左右下降得太低了。是什么导致了这里的问题 OS=Windows GPU=GTX 1070 CPU=i7 模型=暗流,约洛夫2 目标检测代码Python 插入Arduino串行通信时,YOLOV2变慢,python,opencv,arduino,object-detection,darkflow,Python,Opencv,Arduino,Object Detection,Darkflow,我从事目标检测和跟踪系统已有一段时间了。我曾尝试在检测到有人根据分辨率范围的宽度确定边界框的坐标时点亮LED。到目前为止,当我没有插入串行通信功能时,FPS大约是30。但当我插入串行通信时,fps在7-10左右下降得太低了。是什么导致了这里的问题 OS=Windows GPU=GTX 1070 CPU=i7 模型=暗流,约洛夫2 目标检测代码 import cv2 from darkflow.net.build import TFNet import numpy as np import tim
import cv2
from darkflow.net.build import TFNet
import numpy as np
import time
from collections import namedtuple
import luggage_arduino
"""
Main system for running the whole script for object detection and tracking
"""
class NeuralNetwork:
def __init__(self):
"""Define model configuration and weight"""
options = {
'model': 'cfg/yolov2.cfg',
'load': 'bin/yolov2.weights',
'threshold': 0.8, # Sets the confidence level of detecting box, range from 0 to 1
'gpu': 0.8 # If do not want to use gpu, set to 0
}
"""Define OpenCV configuration"""
tfnet = TFNet(options)
colors = [tuple(255 * np.random.rand(3)) for _ in range(10)] # Set colors for different boxes
capture = cv2.VideoCapture(0, cv2.CAP_DSHOW)
capture.set(cv2.CAP_PROP_FRAME_WIDTH, 1280)
capture.set(cv2.CAP_PROP_FRAME_HEIGHT, 720)
while True: # Main loop for object detection and tracking
stime = time.time()
ret, frame = capture.read()
box = cv2.rectangle(frame, (0, 0), (426, 720), (0, 0, 255), 2) # Parameter of first segment (LEFT)
box2 = cv2.rectangle(frame, (426, 0), (852, 720), (0, 0, 255), 2) # Parameter of second segment (CENTER)
box3 = cv2.rectangle(frame, (852, 0), (1280, 720), (0, 0, 255), 2) # Parameter of third segment (RIGHT)
if ret:
results = tfnet.return_predict(frame)
for color, result in zip(colors, results):
tl = (result['topleft']['x'], result['topleft']['y'])
br = (result['bottomright']['x'], result['bottomright']['y'])
label = result['label']
confidence = result['confidence']
text = '{}: {:.0f}%'.format(label, confidence * 100)
frame = cv2.rectangle(frame, tl, br, color, 5)
frame = cv2.putText(frame, text, tl, cv2.FONT_HERSHEY_COMPLEX, 1, (0, 0, 0), 2)
self.center_of_box(tl, br) # Calls the function for coordinate calculation
cv2.imshow('frame', frame)
print('FPS {:.1f}'.format(1 / (time.time() - stime)))
if cv2.waitKey(1) & 0xFF == ord('q'):
break
capture.release()
cv2.destroyAllWindows()
def center_of_box(self, tl, br):
self.tl = tl
self.br = br
center_coord = namedtuple("center_coord", ['x', 'y']) # List of calculated center coord for each FPS
center_x = ((tl[0] + br[0]) / 2)
center_y = ((tl[1] + br[1]) / 2)
center_box = center_coord(center_x, center_y) # Save center coord of detected box in list
print(center_box)
self.box_tracking(center_x) # Call function for tracking the box coord
def box_tracking(self, center_x):
self.center_x = center_x
while True:
if 0 <= center_x <= 426:
center = -1
elif 426 < center_x <= 852:
center = 0
elif 852 < center_x <= 1280:
center = 1
else:
center = 2
break
luggage_arduino.Arduino(center) # Calls function for serial comm
对于那些感兴趣的人,我会把我想出来的答案贴出来。 解决方案是使用线程并并发运行这些类。这是密码
import cv2
from darkflow.net.build import TFNet
import numpy as np
import time
from collections import namedtuple
import luggage_arduino
import psutil
import threading
"""
Main system for running the whole script for object detection and tracking
"""
center_of_x = []
class NeuralNetwork():
def __init__(self):
self.object_detect()
def object_detect(self):
options = {
'model': 'cfg/yolov2.cfg',
'load': 'bin/yolov2.weights',
'threshold': 0.8, # Sets the confidence level of detecting box, range from 0 to 1
'gpu': 0.8 # If do not want to use gpu, set to 0
}
cpu_usage = psutil.cpu_percent(interval=1)
tfnet = TFNet(options)
capture = cv2.VideoCapture(0, cv2.CAP_DSHOW)
capture.set(cv2.CAP_PROP_FRAME_WIDTH, 1280)
capture.set(cv2.CAP_PROP_FRAME_HEIGHT, 720)
while True: # Main loop for object detection and tracking
stime = time.time()
ret, frame = capture.read()
box = cv2.rectangle(frame, (0, 0), (426, 720), (0, 0, 255), 2) # Parameter of first segment (LEFT)
box2 = cv2.rectangle(frame, (426, 0), (852, 720), (0, 0, 255), 2) # Parameter of second segment (CENTER)
box3 = cv2.rectangle(frame, (852, 0), (1280, 720), (0, 0, 255), 2) # Parameter of third segment (RIGHT)
if ret:
results = tfnet.return_predict(frame)
for result in results:
tl = (result['topleft']['x'], result['topleft']['y'])
br = (result['bottomright']['x'], result['bottomright']['y'])
label = result['label']
confidence = result['confidence']
text = '{}: {:.0f}%'.format(label, confidence * 100)
frame = cv2.rectangle(frame, tl, br, (255, 153, 255), 2)
frame = cv2.putText(frame, text, tl, cv2.FONT_HERSHEY_COMPLEX, 1, (0, 0, 0), 2)
self.center_of_box(tl, br)
cv2.imshow('frame', frame)
print('FPS {:.1f}'.format(1 / (time.time() - stime)))
print(cpu_usage)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
capture.release()
cv2.destroyAllWindows()
def center_of_box(self, tl, br):
global center_of_x
self.tl = tl
self.br = br
center_coord = namedtuple("center_coord", ['x', 'y']) # List of calculated center coord for each FPS
center_x = ((tl[0] + br[0]) / 2)
center_y = ((tl[1] + br[1]) / 2)
center_box = center_coord(center_x, center_y) # Save center coord of detected box in list
center_of_x.clear()
center_of_x.append(center_x)
print(center_box)
return center_x
class Ard:
def __init__(self):
time.sleep(7)
while True:
for i in center_of_x:
self.box_tracking(i)
def box_tracking(self, center_x):
self.center_x = center_x
while True:
if 0 <= center_x <= 426:
center = -1
elif 426 < center_x <= 852:
center = 0
elif 852 < center_x <= 1280:
center = 1
else:
center = 2
break
luggage_arduino.Arduino(center) # Calls function for serial comm
t1 = threading.Thread(target=NeuralNetwork)
t2 = threading.Thread(target=Ard)
t1.start()
t2.start()
这是pyserial代码
import serial
import time
arduino = serial.Serial("com3", 9600)
def serial_comm(): # Pass the function
pass
"""Main class for serial comm"""
class Arduino:
def __init__(self, center):
self.serial_comm(center) # Calls function of serial comm
def serial_comm(self, center):
self.center = center
if center == -1:
time.sleep(1)
arduino.write(b'L') # b can be replaced with str.encode("Your string here")
serial_comm()
elif center == 0:
time.sleep(1)
arduino.write(b'C')
serial_comm()
elif center == 1:
time.sleep(1)
arduino.write(b'R')
serial_comm()
else:
center = 2
time.sleep(1)
arduino.write(b'N')
serial_comm()
time.sleep(2)
对于那些感兴趣的人,我会把我想出来的答案贴出来。 解决方案是使用线程并并发运行这些类。这是密码
import cv2
from darkflow.net.build import TFNet
import numpy as np
import time
from collections import namedtuple
import luggage_arduino
import psutil
import threading
"""
Main system for running the whole script for object detection and tracking
"""
center_of_x = []
class NeuralNetwork():
def __init__(self):
self.object_detect()
def object_detect(self):
options = {
'model': 'cfg/yolov2.cfg',
'load': 'bin/yolov2.weights',
'threshold': 0.8, # Sets the confidence level of detecting box, range from 0 to 1
'gpu': 0.8 # If do not want to use gpu, set to 0
}
cpu_usage = psutil.cpu_percent(interval=1)
tfnet = TFNet(options)
capture = cv2.VideoCapture(0, cv2.CAP_DSHOW)
capture.set(cv2.CAP_PROP_FRAME_WIDTH, 1280)
capture.set(cv2.CAP_PROP_FRAME_HEIGHT, 720)
while True: # Main loop for object detection and tracking
stime = time.time()
ret, frame = capture.read()
box = cv2.rectangle(frame, (0, 0), (426, 720), (0, 0, 255), 2) # Parameter of first segment (LEFT)
box2 = cv2.rectangle(frame, (426, 0), (852, 720), (0, 0, 255), 2) # Parameter of second segment (CENTER)
box3 = cv2.rectangle(frame, (852, 0), (1280, 720), (0, 0, 255), 2) # Parameter of third segment (RIGHT)
if ret:
results = tfnet.return_predict(frame)
for result in results:
tl = (result['topleft']['x'], result['topleft']['y'])
br = (result['bottomright']['x'], result['bottomright']['y'])
label = result['label']
confidence = result['confidence']
text = '{}: {:.0f}%'.format(label, confidence * 100)
frame = cv2.rectangle(frame, tl, br, (255, 153, 255), 2)
frame = cv2.putText(frame, text, tl, cv2.FONT_HERSHEY_COMPLEX, 1, (0, 0, 0), 2)
self.center_of_box(tl, br)
cv2.imshow('frame', frame)
print('FPS {:.1f}'.format(1 / (time.time() - stime)))
print(cpu_usage)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
capture.release()
cv2.destroyAllWindows()
def center_of_box(self, tl, br):
global center_of_x
self.tl = tl
self.br = br
center_coord = namedtuple("center_coord", ['x', 'y']) # List of calculated center coord for each FPS
center_x = ((tl[0] + br[0]) / 2)
center_y = ((tl[1] + br[1]) / 2)
center_box = center_coord(center_x, center_y) # Save center coord of detected box in list
center_of_x.clear()
center_of_x.append(center_x)
print(center_box)
return center_x
class Ard:
def __init__(self):
time.sleep(7)
while True:
for i in center_of_x:
self.box_tracking(i)
def box_tracking(self, center_x):
self.center_x = center_x
while True:
if 0 <= center_x <= 426:
center = -1
elif 426 < center_x <= 852:
center = 0
elif 852 < center_x <= 1280:
center = 1
else:
center = 2
break
luggage_arduino.Arduino(center) # Calls function for serial comm
t1 = threading.Thread(target=NeuralNetwork)
t2 = threading.Thread(target=Ard)
t1.start()
t2.start()
这是pyserial代码
import serial
import time
arduino = serial.Serial("com3", 9600)
def serial_comm(): # Pass the function
pass
"""Main class for serial comm"""
class Arduino:
def __init__(self, center):
self.serial_comm(center) # Calls function of serial comm
def serial_comm(self, center):
self.center = center
if center == -1:
time.sleep(1)
arduino.write(b'L') # b can be replaced with str.encode("Your string here")
serial_comm()
elif center == 0:
time.sleep(1)
arduino.write(b'C')
serial_comm()
elif center == 1:
time.sleep(1)
arduino.write(b'R')
serial_comm()
else:
center = 2
time.sleep(1)
arduino.write(b'N')
serial_comm()
time.sleep(2)
如果函数未在线程中运行,则调用
time.sleep()
可能是原因之一
类Arduino
是否用于?如果不是,它可能应该这样做
使用Serial.readline()
时,这些调用会阻塞并等待数据
请参阅中的“速度提高”部分和发布的代码。如果函数未在线程中运行,则调用
time.sleep()
可能是原因
类Arduino
是否用于?如果不是,它可能应该这样做
使用Serial.readline()
时,这些调用会阻塞并等待数据
请参阅中的“速度提高”一节和发布的代码。运行串行通信时,您的CPU利用率是多少?@hcheung我使用pcutils检查该比率,当任务管理器检测到该框并点亮led时,它显示12%,任务管理器显示约10%。调用
time.sleep()
?类Arduino
是否用于线程中?使用Serial.readline()
时,这些调用会阻塞并等待数据。请参阅is time.sleep in seconds或ms?中的“速度提升”一节。Joe我没有使用线程,但我将学习这一点并同时运行。非常感谢。运行串行通信时,您的CPU利用率是多少?@hcheung我使用pcutils检查该率,当它检测到框并点亮led时,它显示12%,任务管理器显示10%左右。调用time.sleep()
?类Arduino
是否用于线程中?使用Serial.readline()
时,这些调用会阻塞并等待数据。请参阅is time.sleep in seconds或ms?中的“速度提升”一节。Joe我没有使用线程,但我将学习这一点并同时运行。非常感谢。