I was streaming video using UDP steam in python using the socket module, it somehow crashes after some time and sometime randomly works perfectly as well, sometimes changing the network somehow stops it from crashing, I have two sockets server running one is used for streaming and other one gives the command to my drone( i take footage from drone using rpi and proces it on my laptop and forward a direction command based on it)
I tried running the code but it crashes randomly, the tkinter window stops responding and the client program window also stops responding i expected it to work without crashing (that happens sometimes randomly)
Tkinter Window Crashing: Server(Laptop) Window turns white
Server Code (Running on my laptop):
import pickle
from tkinter import *
import cv2
import socket
from PIL import Image, ImageTk
import PIL
import time
from gestures.gesture_recognition import GestureRecognition
cmd = "S"
track = True
label = "STATIONARY"
def keyUp(e):
global cmd, track, label
cmd = "S"
track = True
label = "STATIONARY"
def keyDown(e):
global cmd, track, label
track = False
if e.char == "W" or e.char == "w":
cmd = "B"
label = "FORWARD"
elif e.char == "A" or e.char == "a":
cmd = "R"
label = "LEFT"
elif e.char == "S" or e.char == "s":
cmd = "F"
label = "BACKWARD"
elif e.char == "D" or e.char == "d":
cmd = "L"
label = "RIGHT"
elif e.char == "":
cmd = "X"
label = "LANDING"
else:
cmd = "S"
label = "STATIONARY"
recog = GestureRecognition()
window = Tk()
f1 = LabelFrame(window)
f1.pack()
l1 = Label(f1)
l1.pack()
statlabel = Label(window, font=("Helvetica", 100))
statlabel.pack()
window.bind("<KeyPress>", keyDown)
window.bind("<KeyRelease>", keyUp)
pTime = 0
cTime = 0
capCX = 320
capCY = 240
ip = "192.168.1.101"
port = 5556
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind((ip, port))
s2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s2.bind((ip, port))
s2.listen(5)
clientSoc, addr = s2.accept()
size = (640, 480)
vidCap = cv2.VideoWriter('flightFootage.avi',
cv2.VideoWriter_fourcc(*'MJPG'),
10, size)
while True:
x = s.recvfrom(307300)
clientIP = x[1][0]
img = x[0]
img = pickle.loads(img)
if img is not None:
img = cv2.imdecode(img, cv2.IMREAD_COLOR)
img, gesture, brect = recog.recognize(img)
cTime = time.time()
fps = 1 / (cTime - pTime)
pTime = cTime
cv2.rectangle(img, (capCX - 50, capCY - 50), (capCX + 50, capCY + 50), (0, 255, 0), 1)
if brect and track:
if brect[0] > (capCX - 50) and brect[1] > (capCY - 50) and brect[2] < (capCX + 50) and brect[3] < (
capCY + 50):
if gesture == 1:
cmd = "S"
label = "STATIONARY"
elif gesture == 0:
#cmd = "F"
cmd = "S"
label = "FORWARD"
elif gesture == 5:
cmd = "B"
label = "BACKWARD"
elif gesture == 6:
cmd = "R"
label = "RIGHT"
elif gesture == 7:
cmd = "L"
label = "LEFT"
elif gesture == 3:
cmd = "X"
label = "LANDING"
else:
cmd = "S"
label = "STATIONARY"
else:
cmd = "S"
label = "STATIONARY"
clientSoc.send(bytes(cmd, "utf-8"))
statlabel.config(text=label)
vidCap.write(img)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img = ImageTk.PhotoImage(PIL.Image.fromarray(img))
l1['image'] = img
window.update()
if cv2.waitKey(1) == 27 or cmd == "X":
cmd = "X"
clientSoc.send(bytes(cmd, "utf-8"))
statlabel.config(text=label)
break
window.destroy()
vidCap.release()
cv2.destroyAllWindows()
s.close()
s2.close()
Client Code (Running on drone):
import cv2, socket, pickle
from multiprocessing import Process, Queue
from FlightControllerLink import FC
def movementThread(q):
apm = FC(connectionport="/dev/ttyACM0")
h = -0.34
x = apm.arm_and_takeoff(height=h)
while True:
cmd = q.get()
if cmd == "S":
apm.stop()
elif cmd == "F":
apm.forward()
elif cmd == "B":
apm.backward()
elif cmd == "L":
apm.left()
elif cmd == "R":
apm.right()
elif cmd == "X":
apm.setModeLand()
break
else:
print("STATIONARY")
apm.close()
print("landed")
def vidCmd(q):
serverip="192.168.1.101"
serverport=5556
cap = cv2.VideoCapture(0)
s2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s2.connect((serverip, serverport))
s=socket.socket(socket.AF_INET , socket.SOCK_DGRAM)
while True:
ret,photo = cap.read()
cv2.imshow('my pic', photo)
ret, buffer = cv2.imencode(".jpg", photo, [int(cv2.IMWRITE_JPEG_QUALITY),30])
x_as_bytes = pickle.dumps(buffer)
s.sendto(x_as_bytes,(serverip , serverport))
cmd = s2.recv(1)
cmd = cmd.decode("utf-8")
q.put(cmd)
if cv2.waitKey(1) == 27 or cmd == "X":
q.put("X")
break
s.close()
s2.close()
cv2.destroyAllWindows()
cap.release()
queue = Queue()
p1 = Process(target=movementThread, args=(queue, ))
p2 = Process(target=vidCmd, args=(queue, ))
p1.start()
p2.start()
p1.join()
p2.join()
Custom Flight controller link module:
from pymavlink import mavutil
from time import sleep
class FC:
the_connection = None
height = 0
def __init__(self, connectionport='com4', dialect='ardupilotmega'):
mavutil.set_dialect(dialect)
self.the_connection = mavutil.mavlink_connection(connectionport)
self.the_connection.wait_heartbeat()
print("Heartbeat from system (system %u component %u)" %
(self.the_connection.target_system, self.the_connection.target_component))
def arm(self):
x = 1
while x:
self.the_connection.mav.command_long_send(self.the_connection.target_system,
self.the_connection.target_component,
mavutil.mavlink.MAV_CMD_COMPONENT_ARM_DISARM, 0, 1, 0, 0, 0, 0, 0,
0)
msg = self.the_connection.recv_match(type='COMMAND_ACK', blocking=True).to_dict()
x = msg['result']
print("ARMED")
def disarm(self):
x = 1
while x:
self.the_connection.mav.command_long_send(self.the_connection.target_system,
self.the_connection.target_component,
mavutil.mavlink.MAV_CMD_COMPONENT_ARM_DISARM, 0, 0, 0, 0, 0, 0, 0,
0)
msg = self.the_connection.recv_match(type='COMMAND_ACK', blocking=True).to_dict()
x = msg['result']
print("DISARMED")
def getAlt(self):
self.the_connection.mav.request_data_stream_send(self.the_connection.target_system,
self.the_connection.target_component,
mavutil.mavlink.MAV_DATA_STREAM_EXTRA2, 1, 1)
msg = self.the_connection.recv_match(type='VFR_HUD', blocking=True).to_dict()
self.the_connection.mav.request_data_stream_send(self.the_connection.target_system,
self.the_connection.target_component,
mavutil.mavlink.MAV_DATA_STREAM_EXTRA2, 1, 0)
return msg['alt']
def setModeGuided(self):
x = 1
while x:
self.the_connection.mav.set_mode_send(self.the_connection.target_system, 89, 4)
x = self.the_connection.recv_match(type='COMMAND_ACK', blocking=True).to_dict()['result']
print("MODE GUIDED")
def setModeLand(self):
x = 1
print("LANDING")
while x:
self.the_connection.mav.set_mode_send(self.the_connection.target_system, 81, 9)
x = self.the_connection.recv_match(type='COMMAND_ACK', blocking=True).to_dict()['result']
sleep(1)
while self.getAlt() >= 0:
print("LANDING CURRENT HEIGHT ", self.getAlt())
sleep(1)
print("LANDED ", self.getAlt())
def arm_and_takeoff(self, height=1):
self.setModeGuided()
self.arm()
self.height = height
x = 1
while x:
self.the_connection.mav.command_long_send(self.the_connection.target_system,
self.the_connection.target_component,
mavutil.mavlink.MAV_CMD_NAV_TAKEOFF, 0, 0, 0, 0, 0, 0, 0,
self.height)
x = self.the_connection.recv_match(type='COMMAND_ACK', blocking=True).to_dict()['result']
if x == 4 or x == 3:
print("FAILED!!")
return 0
break
else:
print("TAKEOFF ACCEPTED")
while self.getAlt() < self.height*0.95:
print("CURRENT HEIGHT ", self.getAlt())
print("REACHED TARGET ALTITUDE")
return 1
def forward(self, speed=0.1):
self.the_connection.mav.send(mavutil.mavlink.MAVLink_set_position_target_local_ned_message(10, self.the_connection.target_system,
self.the_connection.target_component,
mavutil.mavlink.MAV_FRAME_LOCAL_NED,
int(0b110111000111),
0, 0, -1*self.height,
speed, 0, 0, 0, 0, 0, 0,
0))
print("GOING FORWARD")
def backward(self, speed=0.1):
self.the_connection.mav.send(mavutil.mavlink.MAVLink_set_position_target_local_ned_message(10, self.the_connection.target_system,
self.the_connection.target_component,
mavutil.mavlink.MAV_FRAME_LOCAL_NED,
int(0b110111000111),
0, 0, -1*self.height,
-1*speed, 0, 0, 0, 0, 0, 0,
0))
print("GOING BACKWARD")
def left(self, speed=0.1):
self.the_connection.mav.send(mavutil.mavlink.MAVLink_set_position_target_local_ned_message(10, self.the_connection.target_system,
self.the_connection.target_component,
mavutil.mavlink.MAV_FRAME_LOCAL_NED,
int(0b110111000111),
0, 0, -1*self.height,
0, -1*speed, 0, 0, 0, 0, 0,
0))
print("GOING LEFT")
def right(self, speed=0.1):
self.the_connection.mav.send(mavutil.mavlink.MAVLink_set_position_target_local_ned_message(10, self.the_connection.target_system,
self.the_connection.target_component,
mavutil.mavlink.MAV_FRAME_LOCAL_NED,
int(0b110111000111),
0, 0, -1*self.height,
0, speed, 0, 0, 0, 0, 0,
0))
print("GOING RIGHT")
def CW(self, speed=0.5):
self.the_connection.mav.send(mavutil.mavlink.MAVLink_set_position_target_local_ned_message(10, self.the_connection.target_system,
self.the_connection.target_component,
mavutil.mavlink.MAV_FRAME_LOCAL_NED,
int(0b100111111111),
0, 0, -1*self.height,
0, 0, 0, 0, 0, 0, 0,
speed))
print("ROTATING CW")
def CCW(self, speed=0.5):
self.the_connection.mav.send(mavutil.mavlink.MAVLink_set_position_target_local_ned_message(10, self.the_connection.target_system,
self.the_connection.target_component,
mavutil.mavlink.MAV_FRAME_LOCAL_NED,
int(0b100111111111),
0, 0, -1*self.height,
0, 0, 0, 0, 0, 0, 0,
-1*speed))
print("ROTATING CW")
def stop(self):
self.the_connection.mav.send(mavutil.mavlink.MAVLink_set_position_target_local_ned_message(10, self.the_connection.target_system,
self.the_connection.target_component,
mavutil.mavlink.MAV_FRAME_LOCAL_NED,
int(0b100111000111),
0, 0, -1*self.height,
0, 0, 0, 0, 0, 0, 0,
0))
print("STATIONARY")
def close(self):
self.the_connection.close()
if __name__ == "__main__":
apm = FC()
apm.takeOff(height=-0.34)
sleep(5)
apm.setModeLand()
The gesture module was taken from the github repository: https://github.com/kinivi/tello-gesture-control/tree/main/gestures the gesture_recognition.py module was used, i didnt use the tello code (just the gesture recognition module) as I am using custom made drone (APM 2.8) and communicating with pymavlink