I am trying to apply CRST object tracking with 2 cameras by multithread. 2 cameras will be directly connected in my laptop USB ports separately. And 2 cameras will be tracking a object in real time.
The first function which is "object_tracking_1" and second one is "object_tracking_2". And I try to use it with multithread.
If I execute one of any cameras, it works very well. (The camID : 0(labtop webcam) , 1(camera1) , 2(camera2) )
My problem:
- My laptop webcam + one of the connected camera
It can be executed. But when one is executed with proper object tracking , the other one is not working. I mean I see the window plot to choose box area, but i cannot apply the object tracking.
In other words, both can be executed but one is not working for the object tracking.
- 2 cameras (main goal)
When I executed it , it is kind of working but the plotted window shows 2 camera's views very fast and alternatively.
And i got this error message:
Exception in thread Thread-8:
Traceback (most recent call last):
File "C:\ProgramData\Anaconda3\lib\threading.py", line 973, in _bootstrap_inner
self.run()
File "C:\ProgramData\Anaconda3\lib\threading.py", line 910, in run
self._target(*self._args, **self._kwargs)
File "C:\Users\user\AppData\Local\Temp/ipykernel_18232/2959108515.py", line 59, in object_tracking_1
cv2.error: OpenCV(3.4.11) C:\Users\appveyor\AppData\Local\Temp\1\pip-req-build-neg5amx3\opencv\modules\core\src\dxt.cpp:3335: error: (-215:Assertion failed) type == CV_32FC1 || type == CV_32FC2 || type == CV_64FC1 || type == CV_64FC2 in function 'cv::dft'
On the internet, it says that it is the problem with bus and address with USB, but if i use it one by one, it works perfectly.
Code :
```
import cv2
import sys
import threading
(major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.')
def object_tracking_1():
if __name__ == '__main__' :
# Set up tracker.
# Instead of MIL, you can also use
tracker_types = ['BOOSTING', 'MIL','KCF', 'TLD', 'MEDIANFLOW', 'GOTURN', 'MOSSE', 'CSRT']
tracker_type = tracker_types[7]
if int(minor_ver) < 3:
tracker = cv2.Tracker_create(tracker_type)
else:
if tracker_type == 'BOOSTING':
tracker = cv2.TrackerBoosting_create()
if tracker_type == 'MIL':
tracker = cv2.TrackerMIL_create()
if tracker_type == 'KCF':
tracker = cv2.TrackerKCF_create()
if tracker_type == 'TLD':
tracker = cv2.TrackerTLD_create()
if tracker_type == 'MEDIANFLOW':
tracker = cv2.TrackerMedianFlow_create()
if tracker_type == 'GOTURN':
tracker = cv2.TrackerGOTURN_create()
if tracker_type == 'MOSSE':
tracker = cv2.TrackerMOSSE_create()
if tracker_type == "CSRT":
tracker = cv2.TrackerCSRT_create()
# Read video
video = cv2.VideoCapture(1) ######################################################
#video = cv2.VideoCapture("video15.mp4")
# Exit if video not opened.
if not video.isOpened():
print ("Could not open video")
sys.exit()
# Read first frame.
ok, frame = video.read()
if not ok:
print ('Cannot read video file')
sys.exit()
# Define an initial bounding box
bbox = (287, 23, 86, 320)
# Uncomment the line below to select a different bounding box
bbox = cv2.selectROI(frame, False)
# Initialize tracker with first frame and bounding box
ok = tracker.init(frame, bbox)
while True:
# Read a new frame
ok, frame = video.read()
if not ok:
break
# Start timer
timer = cv2.getTickCount()
# Update tracker
ok, bbox = tracker.update(frame)
# Calculate Frames per second (FPS)
fps = cv2.getTickFrequency() / (cv2.getTickCount() - timer);
# Draw bounding box
if ok:
# Tracking success
p1 = (int(bbox[0]), int(bbox[1])) # bbox[0]=x value, bbox[1]=y value
p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3])) # x2, y2
cv2.rectangle(frame, p1, p2, (255,0,0), 2, 1)
#print(bbox)
List_x1 = (bbox[0]+bbox[2])/2
List_y1 = (bbox[1]+bbox[3])/2
RList_x1 = round(List_x1, 2)
RList_y1 = round(List_y1, 2)
Coordinate_x1 = str(RList_x1)
Coordinate_y1 = str(RList_y1)
cv2.putText(frame, "x Coordinate : "+ Coordinate_x1 , (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
cv2.putText(frame, "y Coordinate : "+ Coordinate_y1 , (100,105), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
# print('x1=' , RList_x1 , 'y2=' , RList_y1 )
# X = list(map(str,bbox))
# print(X)
# cv2.putText(frame, ", ".join(X) , (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
else :
# Tracking failure
cv2.putText(frame, "Tracking failure detected", (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
# Display tracker type on frame
cv2.putText(frame, tracker_type + " Tracker", (100,20), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50),2);
# Display FPS on frame
cv2.putText(frame, "FPS : " + str(int(fps)), (100,50), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50), 2);
# Display result
cv2.imshow("Tracking", frame)
# Exit if ESC pressed
k = cv2.waitKey(1) & 0xff
if k == 27 : break
def object_tracking_2():
if __name__ == '__main__' :
# Set up tracker.
# Instead of MIL, you can also use
tracker_types = ['BOOSTING', 'MIL','KCF', 'TLD', 'MEDIANFLOW', 'GOTURN', 'MOSSE', 'CSRT']
tracker_type = tracker_types[7]
if int(minor_ver) < 3:
tracker = cv2.Tracker_create(tracker_type)
else:
if tracker_type == 'BOOSTING':
tracker = cv2.TrackerBoosting_create()
if tracker_type == 'MIL':
tracker = cv2.TrackerMIL_create()
if tracker_type == 'KCF':
tracker = cv2.TrackerKCF_create()
if tracker_type == 'TLD':
tracker = cv2.TrackerTLD_create()
if tracker_type == 'MEDIANFLOW':
tracker = cv2.TrackerMedianFlow_create()
if tracker_type == 'GOTURN':
tracker = cv2.TrackerGOTURN_create()
if tracker_type == 'MOSSE':
tracker = cv2.TrackerMOSSE_create()
if tracker_type == "CSRT":
tracker = cv2.TrackerCSRT_create()
# Read video
video = cv2.VideoCapture(2) ##################################################
#video = cv2.VideoCapture("video15.mp4")
# Exit if video not opened.
if not video.isOpened():
print ("Could not open video")
sys.exit()
# Read first frame.
ok, frame = video.read()
if not ok:
print ('Cannot read video file')
sys.exit()
# Define an initial bounding box
bbox = (287, 23, 86, 320)
# Uncomment the line below to select a different bounding box
bbox = cv2.selectROI(frame, False)
# Initialize tracker with first frame and bounding box
ok = tracker.init(frame, bbox)
while True:
# Read a new frame
ok, frame = video.read()
if not ok:
break
# Start timer
timer = cv2.getTickCount()
# Update tracker
ok, bbox = tracker.update(frame)
# Calculate Frames per second (FPS)
fps = cv2.getTickFrequency() / (cv2.getTickCount() - timer);
# Draw bounding box
if ok:
# Tracking success
p1 = (int(bbox[0]), int(bbox[1])) # bbox[0]=x value, bbox[1]=y value
p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3])) # x2, y2
cv2.rectangle(frame, p1, p2, (255,0,0), 2, 1)
#print(bbox)
List_x2 = (bbox[0]+bbox[2])/2
List_y2 = (bbox[1]+bbox[3])/2
RList_x2 = round(List_x2, 2)
RList_y2 = round(List_y2, 2)
Coordinate_x2 = str(RList_x2)
Coordinate_y2 = str(RList_y2)
cv2.putText(frame, "x Coordinate : "+ Coordinate_x2 , (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
cv2.putText(frame, "y Coordinate : "+ Coordinate_y2 , (100,105), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
# print('x1=' , RList_x1 , 'y2=' , RList_y1 )
# X = list(map(str,bbox))
# print(X)
# cv2.putText(frame, ", ".join(X) , (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
else :
# Tracking failure
cv2.putText(frame, "Tracking failure detected", (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
# Display tracker type on frame
cv2.putText(frame, tracker_type + " Tracker", (100,20), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50),2);
# Display FPS on frame
cv2.putText(frame, "FPS : " + str(int(fps)), (100,50), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50), 2);
# Display result
cv2.imshow("Tracking", frame)
# Exit if ESC pressed
k = cv2.waitKey(1) & 0xff
if k == 27 : break
t1 = threading.Thread(target=object_tracking_1)
t2 = threading.Thread(target=object_tracking_2)
t1.start()
t2.start()
# t1.join()
# t2.join()
```
My main goal is to apply CRST algorithms for each connected camera.
Thanks a lot.