Streaming video from server - python

I have a server which loads the video and a client which receives and displays the video. I'm new to python. the server code to load the video and send it to client/receiver.
server.py
import cv2
import socket
UDP_IP = "localhost"
UDP_PORT = 5005
cap = cv2.VideoCapture('D:\\testVideo.mp4')
while(True):
ret, frame = cap.read()
cv2.imshow('frame',frame)
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
d = frame.flatten ()
s = d.tostring ()
for i in range(20):
sock.sendto (s[i*46080:(i+1)*46080],(UDP_IP, UDP_PORT))
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
The receiver code
import socket
import numpy
import cv2
UDP_IP = "localhost"
UDP_PORT = 5005
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
sock.bind ((UDP_IP, UDP_PORT))
s=""
while True:
data, addr = sock.recvfrom(46080)
s += data
if len(s) == (46080*20):
frame = numpy.fromstring (s,dtype=numpy.uint8)
frame = frame.reshape (480,640,3)
cv2.imshow('frame',frame)
s=""
if cv2.waitKey(1) & 0xFF == ord ('q'):
break
I'm getting an error in "s += data"-TypeError: must be str, not bytes. Is there a problem with my append? Is my approach correct?

Your problem is that s is a string and + concatenates strings, but data is byte() (or perhaps even bytearray, not sure on that one yet). Edit: it seems to just hold a bytestring like b\xff\xff\xff
What you can do is reserve a bytearray-buffer, and then receive into that, since you seem to restrict filesize anyway (you check each iteration if your local (string)buffer is a certain length). With a maximum size, you just allocate that and read however much data comes through. If there's more, it's just cut off, and if it's less, the rest will be NULL-padded. Kind of like this:
# allocate 32 bytes as buffer
s = bytearray(32)
sock.recvmsg_into([s])
Test this by running the following Python code:
import socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind(("localhost", 8080))
# reserve a buffer of 4 bytes
s = bytearray(4)
sock.recvmsg_into([s])
At that point, the socket will block and wait for input. From a second terminal run this:
echo "foobar" | nc -u 127.0.0.1 8080
Your Python-script will continue execution and print what it received:
(4, [], 32, ('127.0.0.1', 56464))
>>> s
bytearray(b'foob')
If you don't want to go down that road, I have a solution to your original approach of just appending in an infinite loop. I previously posted a solution with appending to a bytearray, but that didn't work due to type mismatch. It turns out you don't even need bytearray, as this works as expected:
s = bytes()
while True:
s += bytes(b'foobar')
if s == b'foobarfoobar':
print(s)
break

Related

Send img from pi using python sockets to display in real time using cv2 on windows

I'm trying to send images from my pi to windows so I can use yolo on windows then send code back to my pi to control my robot.
I can't display the pictures sent by the pi on my pc in real time because the bytes are different every time and it messes up opencv. (opens half photo)
How would my code on windows know how many bytes are going to be in the photo being sent from the pi? Or is there another way to go about this?
Pi code:
from picamera.array import PiRGBArray
from picamera import PiCamera
import socket
import time
import cv2
listensocket = socket.socket() #Creates an instance of socket
Port = 8000 #Port to host server on
maxConnections = 999
IP = socket.gethostname() #IP address of local machine
listensocket.bind(('',Port))
# Starts server
listensocket.listen(maxConnections)
print("Server started at " + IP + " on port " + str(Port))
# Accepts the incoming connection
(clientsocket, address) = listensocket.accept()
print("New connection made!")
# Initialize the camera
camera = PiCamera()
camera.resolution = (640, 480)
camera.framerate = 20
raw_capture = PiRGBArray(camera, size=(640, 480))
time.sleep(0.1)
# Capture frames continuously from the camera
for frame in camera.capture_continuous(raw_capture, format="bgr", use_video_port=True):
image = frame.array
#cv2.imshow("Frame", image)
# Wait for keyPress for 1 millisecond
key = cv2.waitKey(1) & 0xFF
cv2.imwrite("image.jpg", image)
file = open('image.jpg', 'rb')
file_data = file.read(56000)
clientsocket.send(file_data)
print("Data has been transmitted successfully")
raw_capture.truncate(0)
time.sleep(0.050)
if key == ord("q"):
camera.close()
cv2.destroyAllWindows()
break
camera.close()
cv2.destroyAllWindows()
Windows code:
import socket
#import time
import cv2
#import sys
s = socket.socket()
hostname = 'raspi' #Server IP/Hostname
port = 8000 #Server Port
s.connect((hostname, port)) #Connects to server
file = open('image.jpg', 'wb')
file_data = s.recv(2048)
cap = cv2.VideoCapture('image.jpg')
while file_data:
# Save recent image from server
file.write(file_data)
file_data = s.recv(56000)
print("File has been received successfully.")
# Display image as video
ret, img = cap.read()
# Show image
cv2.imshow("Image", img)
key = cv2.waitKey(50)
if key == 27:
break
file.close()
cv2.destroyAllWindows()
I don't have pi to test it with the exact setup but I did a similar project a while ago. My approach to this problem is either using a null-terminated header that will tell the client how many bytes the image will be. Alternatively, you can use a constant size header you decide before hands. For example first 4 bytes will tell the client how many bytes to be received. Here is a sample code snippet for the first solution I described:
sender:
import socket
import cv2 as cv
# read a test image
img = cv.imread('panda.jpg')
# encode it to jpg format, you can do this without redundant file openings
retval, buf = cv.imencode(".JPEG", img)
# get number of bytes
number_of_bytes = len(buf)
# create a null terminated string
header = "" + str(number_of_bytes) + "\0"
# encode it to utf-8 byte format
raw_header = bytes(header, "utf-8")
# create server socket
sock = socket.socket()
sock.bind(('localhost', 8000))
sock.listen()
conn, addr = sock.accept()
# send header first, reciever will use it to recieve image
conn.send(raw_header)
# send the rest of image
conn.send(buf)
reciever:
import socket
# create client socket
sock = socket.socket()
sock.connect(('localhost', 8000))
# recieve bytes until null termination
raw_header = []
recv_byte = sock.recv(1)
while recv_byte != b"\0":
raw_header.append(recv_byte)
recv_byte = sock.recv(1)
# decode header
header = str(b''.join(raw_header), "utf-8")
# recieve the amount of bytes foretold by header
recv_img = sock.recv(int(header))
# save image to file or you can use cv2.imendecode to turn it back to numpy.ndarray (cv2 image format)
with open("traveller_panda.jpg", 'wb+') as im_file:
im_file.write(recv_img)
# transform back from jpg to numpy array
image_decoded = np.frombuffer(recv_img, dtype=np.uint8)
image_decoded = cv.imdecode(image_decoded, cv.IMREAD_COLOR)
# display image
cv.imshow("recieved", image_decoded)
cv.waitKey()

Code for streaming video over tcp socket in python; need help understanding parts of it

I got this code for streaming a video from a client to a server:
Client:
import cv2, imutils
import mss
import numpy
from win32api import GetSystemMetrics
import pickle
import socket, struct
client_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
host_ip = "IPADRESS"
port = 9999
client_socket.connect((host_ip,port))
with mss.mss() as sct:
monitor = {"top": 0, "left": 0, "width": GetSystemMetrics(0), "height": GetSystemMetrics(1)}
while True:
img = numpy.array(sct.grab(monitor))
frame = imutils.resize(img, width=1400)
a = pickle.dumps(frame)
message = struct.pack("Q",len(a))+a
client_socket.send(message)
Server:
import cv2, imutils
import numpy as np
import pickle, struct
import socket
import threading
server_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
host_ip = "IP_ADRESS"
port = 9999
socket_address = (host_ip,port)
server_socket.bind(socket_address)
server_socket.listen()
print("Listening at",socket_address)
def show_client(addr,client_socket):
try:
print('CLIENT {} CONNECTED!'.format(addr))
if client_socket: # if a client socket exists
data = b""
payload_size = struct.calcsize("Q")
while True:
while len(data) < payload_size:
packet = client_socket.recv(4*1024)
if not packet:
break
data+=packet
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("Q",packed_msg_size)[0]
while len(data) < msg_size:
data += client_socket.recv(4*1024)
frame_data = data[:msg_size]
data = data[msg_size:]
frame = pickle.loads(frame_data)
cv2.imshow("Screen", frame)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
client_socket.close()
except Exception as e:
print(e)
print(f"CLINET {addr} DISCONNECTED")
pass
while True:
client_socket,addr = server_socket.accept()
thread = threading.Thread(target=show_client, args=(addr,client_socket))
thread.start()
print("TOTAL CLIENTS ",threading.activeCount() - 1)
A lot of this code is from a youtuber called "pyshine", and everything is working just fine, but I don't understand, what a specific part of this code is really doing.
These are the parts:
First of all in the client-code:
message = struct.pack("Q",len(a))+a
I know that it does something with the length of the pickle and, that it appends the pickle to it, but not more.
Second of all in the server-code:
data = b""
payload_size = struct.calcsize("Q")
while True:
while len(data) < payload_size:
packet = client_socket.recv(4*1024)
if not packet:
break
data+=packet
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("Q",packed_msg_size)[0]
while len(data) < msg_size:
data += client_socket.recv(4*1024)
frame_data = data[:msg_size]
With printing out some values, I definitely understood it a bit better, but the whole process, how it gets the final "frame_data", is still a mystery to me. So I would really appreciate, if someone could explain me the process that is going there.
socket is primitive object and it doesn't care what data you send. You can send two frames and client can get it as single package or it may get it as many small packages - socket doesn't care where is end of first frame. To resolve this problem this code first sends len(data) and next data. It uses struct("Q") so this value len(data) has always 8 bytes. This way receiver knows how much data it has to receive to have complete frame - first it gets 8 bytes to get len(data) and later it use this value to get all data. And this is what second code does - it repeats recv() until it gets all data. It also checks if it doesn't get data from next frame - and keep this part as data[payload_size:] to use it with next frame/
If you will use the same rule(s) on both sides - sender first sends 8 bytes with sizeand next data, receiver first gets 8 bytes with size and next get data (using size) - then you have defined protocol. (similar to other protocols: HTTP (HyperText Transfer Protocol), FTP (File Transfer Protocol), SMTP (Send Mail Transfer Protocol), etc.)

i am running a program to send a picture over two sockets and i have got no errors but the program gets stuck

I created a server and a client in python so I could send an image across two sockets but when I run the client it receives the data but once it reaches the end of the file it gets stuck. It raises no error and doesn't crash the terminal is just there stuck.
I have tried changing the code a bit to no avail. I am still a beginner.
client.py
import socket
import cv2
import numpy as np
import pickle
client_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
client_socket.bind(('127.0.0.1',5000))
host_ip = ('127.0.0.1',400)
client_socket.connect(host_ip)
serialized_img = b""
while True:
packet = client_socket.recv(1024)
if not packet :
break
serialized_img += packet
image = pickle.loads(serialized_img)
cv2.imshow("a",image)
server.py
import socket
import cv2
import numpy as np
import pickle
server_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
server_socket.bind(('127.0.0.1',400))
cap = cv2.VideoCapture(0)
ret,img = cap.read()
cv2.imshow('image',img)
cv2.waitKey(0)
cap.release()
cv2.destroyAllWindows()
serialized_img = pickle.dumps(img)
print(serialized_img)
while ret:
try:
server_socket.listen()
client_socket,client_address = server_socket.accept()
print(client_address)
client_socket.sendall(serialized_img)
except socket.timeout :
print("time out")
server_socket.close()
I want the client side to be able to show the image.
Close client_socket in server to inform client that it end of data.
client_socket.sendall(serialized_img)
client_socket.close()
In client you have to wait for key to keep window opened.
cv2.imshow("a", image)
cv2.waitKey(0)
cv2.destroyAllWindows()
Server:
import socket
import cv2
import pickle
server_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind(('127.0.0.1', 4000))
cap = cv2.VideoCapture(0)
ret, img = cap.read()
cap.release()
cv2.imshow("server", img)
cv2.waitKey(0)
cv2.destroyAllWindows()
serialized_img = pickle.dumps(img)
while ret:
try:
server_socket.listen()
client_socket,client_address = server_socket.accept()
print(client_address)
client_socket.sendall(serialized_img)
client_socket.close()
print('closed')
except socket.timeout :
print("time out")
server_socket.close()
Client:
import socket
import cv2
import pickle
client_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
client_socket.connect(('127.0.0.1', 4000))
serialized_img = b""
while True:
packet = client_socket.recv(1024)
if not packet :
break
serialized_img += packet
image = pickle.loads(serialized_img)
cv2.imshow("client", image)
cv2.waitKey(0)
cv2.destroyAllWindows()
If you want to send live video then server would have to run separate thread with camera or with while ret. And every client_socket run in separate thread in while True loop. Problem is how to inform client where is end of one frame and beginning of next frame. You couldn't use close() for this.
EDIT: this code streams images from camera so client can see on live - with small delay.
It sends image's size before image so client know how many bytes to receive to get full image. Serialized integer has always 8 bytes so I always receive 8 bytes before image.
I use cv2.waitKey(10) in client to check button not only to close window but it didn't display image without this. Maybe window has to receive events from system to work correctly (and refresh window) like in others modules - ie. PyGame - and waitKey() is checking events.
Server:
import socket
import cv2
import pickle
server_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind(('127.0.0.1', 4000))
cap = cv2.VideoCapture(0)
while True:
try:
server_socket.listen()
print('waiting ...')
client_socket,client_address = server_socket.accept()
print(client_address)
while True:
try:
ret, img = cap.read()
serialized_img = pickle.dumps(img)
print('serialized_len:', len(serialized_img))
serialized_len = pickle.dumps(len(serialized_img))
#print('len(serialized_len):', len(serialized_len)) # always length 8
client_socket.sendall(serialized_len) # always length 8
client_socket.sendall(serialized_img)
except Exception as ex:
print(ex)
# exit loop when errro, ie. when client close connection
break
client_socket.close()
print('closed')
except socket.timeout:
print('time out')
cap.release()
server_socket.close()
Client:
import socket
import cv2
import pickle
client_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
client_socket.connect(('127.0.0.1', 4000))
cv2.namedWindow('client')
while True:
serialized_image = b""
serialized_len = client_socket.recv(8) # always length 8
length = pickle.loads(serialized_len)
#print('length:', length)
while length > 0:
if length < 1024:
packet = client_socket.recv(length)
else:
packet = client_socket.recv(1024)
if not packet:
print('error: no data')
break
serialized_image += packet
length -= len(packet)
#print('received:', len(serialized_image))
image = pickle.loads(serialized_image)
cv2.imshow('client', image)
# it need it to display image (maybe it has to receive events from system)
# it `waitKey` waits 10ms so it doesn't block loop
key = cv2.waitKey(10) & 0XFF
if key == 27:
break
cv2.destroyAllWindows()

UDP video streaming latency - OPENCV python

I have streaming video UDP, but the packets are delayed and disordered.
Here is my code for reference which I have taken from opencv website.
But when I give Socket.Stream to use TCP connection it streams fine and the frames are continuous.
Server.py
import socket
import numpy
import time
import cv2
UDP_IP = "127.0.0.1"
UDP_PORT = 999
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((UDP_IP, UDP_PORT))
s=b''
while True:
data, addr = sock.recvfrom(46080)
s += data
if len(s) == (46080*20):
frame = numpy.fromstring (s,dtype=numpy.uint8)
frame = frame.reshape (480,640,3)
cv2.imshow('frame',frame)
s=b''
if cv2.waitKey(1) & 0xFF == ord ('q'):
break
Client.py
import socket
import numpy as np
import cv2
UDP_IP = '127.0.0.1'
UDP_PORT = 999
cap = cv2.VideoCapture(0)
#cap.set(cv2.CAP_PROP_FRAME_WIDTH,320)
#cap.set(cv2.CAP_PROP_FRAME_HEIGHT,240)
def xrange(x):
return iter(range(x))
while (True):
ret, frame = cap.read()
cv2.imshow('frame', frame)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
d = frame.flatten()
s = d.tostring()
for i in xrange(20):
sock.sendto(s[i * 46080:(i + 1) * 46080], (UDP_IP, UDP_PORT))
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
It looks like the client.py script is sending out UDP frames (much) faster than the server.py is piecing them together. I am not sure why this is the case. Perhaps after increasing the size of byte s after every iteration, Python needs to find a new place to store s, which causes it to miss frames sent from client.py. However, one way to sync them is to tell the client.py to send after server.py acknowledges receipt of the frame.
In the below code, server.py will listen for a start-of-image transmission via b'eframe'. server.py would the consecutively reply client.py with b'n' to cue sending of the next frame. client.py would notify the end-of-image by sending b'eframe'
server.py
import socket
import numpy
import time
import cv2
UDP_IP = "127.0.0.1"
UDP_PORT = 999
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((UDP_IP, UDP_PORT))
s=b''
while True:
data, addr = sock.recvfrom(6) # b'eframe' is 6 byte long
if data == b'sframe':
while True:
sock.sendto(b'n', addr)
data, addr = sock.recvfrom(46080)
if data == b'eframe':
break
s += data
if len(s) == (46080*20):
frame = numpy.fromstring (s,dtype=numpy.uint8)
frame = frame.reshape (480,640,3)
cv2.imshow('frame',frame)
s=b''
if cv2.waitKey(1) & 0xFF == ord ('q'):
break
client.py
import socket
import numpy as np
import cv2
UDP_IP = '127.0.0.1'
UDP_PORT = 999
cap = cv2.VideoCapture(0)
#cap.set(cv2.CAP_PROP_FRAME_WIDTH,320)
#cap.set(cv2.CAP_PROP_FRAME_HEIGHT,240)
def xrange(x):
return iter(range(x))
while (True):
ret, frame = cap.read()
cv2.imshow('frame', frame)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
d = frame.flatten()
s = d.tostring()
sock.sendto(b'sframe', (UDP_IP, UDP_PORT))
for i in xrange(20):
data, addr = sock.recvfrom(1) # b'n' is 1 byte long
if data == b'n':
sock.sendto(s[i * 46080:(i + 1) * 46080], (UDP_IP, UDP_PORT))
data, addr = sock.recvfrom(1)
sock.sendto(b'eframe', (UDP_IP, UDP_PORT))
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()

OpenCV live stream video over socket in Python 3

I am trying to create a simple application to send live stream video over the socket in Python 3 with OpenCV. I am new to OpenCV and socket programming so if you can provide answer in detail I will be very grateful. Thank you.
Here is sender.py
import socket
import time
import cv2
capture = cv2.VideoCapture(0)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(('192.168.1.10', 50505))
while True:
ret, frame = capture.read()
data = cv2.imencode('.jpg', frame)[1].tostring()
sock.sendall(data)
time.sleep(2)
Here is receiver.py
import socket
import cv2
import numpy as np
import time
HOST = '192.168.1.10'
PORT = 50505
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Socket created')
s.bind((HOST, PORT))
print('Socket bind complete')
s.listen(10)
print('Socket now listening')
conn, addr = s.accept()
while True:
data = conn.recv(8192)
nparr = np.fromstring(data, np.uint8)
frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
cv2.imshow('frame', frame)
time.sleep(2)
and this is the error
receiver.py", line 29, in <module>
cv2.imshow('frame', frame)
cv2.error: D:\Build\OpenCV\opencv-3.4.0\modules\highgui\src\window.cpp:339:
error: (-215) size.width>0 && size.height>0 in function cv::imshow
I'm the author of VidGear Video Processing python library that now also provides NetGear API, which is exclusively designed to transfer video frames synchronously between interconnecting systems over the network in real-time. You try it as follows:
A. Server End:(Bare-Minimum example)
Open your favorite terminal and execute the following python code:
Note: You can end streaming anytime on both server and client side by pressing [Ctrl+c] on your keyboard on server end!
# import libraries
from vidgear.gears import VideoGear
from vidgear.gears import NetGear
stream = VideoGear(source='test.mp4').start() #Open any video stream
server = NetGear() #Define netgear server with default settings
# infinite loop until [Ctrl+C] is pressed
while True:
try:
frame = stream.read()
# read frames
# check if frame is None
if frame is None:
#if True break the infinite loop
break
# do something with frame here
# send frame to server
server.send(frame)
except KeyboardInterrupt:
#break the infinite loop
break
# safely close video stream
stream.stop()
# safely close server
writer.close()
B. Client End:(Bare-Minimum example)
Then open another terminal on the same system and execute the following python code and see the output:
# import libraries
from vidgear.gears import NetGear
import cv2
#define netgear client with `receive_mode = True` and default settings
client = NetGear(receive_mode = True)
# infinite loop
while True:
# receive frames from network
frame = client.recv()
# check if frame is None
if frame is None:
#if True break the infinite loop
break
# do something with frame here
# Show output window
cv2.imshow("Output Frame", frame)
key = cv2.waitKey(1) & 0xFF
# check for 'q' key-press
if key == ord("q"):
#if 'q' key-pressed break out
break
# close output window
cv2.destroyAllWindows()
# safely close client
client.close()
NetGear as of now supports two ZeroMQ messaging patterns: i.e zmq.PAIR and zmq.REQ and zmq.REP and the supported protocol are: 'tcp' and 'ipc'
More advanced usage can be found here: https://abhitronix.github.io/vidgear/latest/gears/netgear/overview/
It is because you are receiving small amount of data, and image is not complete. 8192 bytes is not enough in 99.99% of the time, because every image is larger than 8Kb. You'll need to grab ALL data sent by sender in order to convert it to image.
You can take a look at my code on github and change it acording to your need.
Long story short, easy option is to first send number of bytes to the client, and then send an image itself. In client code, after receiving length of image, loop until all bytes are received.
for example:
...
img_len = 175428 # received by sender.py
e=0
data = ''
while e < img_len:
d = sock.recv(1024)
e += len(d)
data += d
nparr = np.fromstring(data, np.uint8)
frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
cv2.imshow('frame', frame)
Late answer, but for those looking for live video transmission and reception over socket:
Here is the snapshot of results:
server.py
import socket, cv2, pickle,struct
# Socket Create
server_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
host_name = socket.gethostname()
host_ip = socket.gethostbyname(host_name)
print('HOST IP:',host_ip)
port = 9999
socket_address = (host_ip,port)
# Socket Bind
server_socket.bind(socket_address)
# Socket Listen
server_socket.listen(5)
print("LISTENING AT:",socket_address)
# Socket Accept
while True:
client_socket,addr = server_socket.accept()
print('GOT CONNECTION FROM:',addr)
if client_socket:
vid = cv2.VideoCapture(0)
while(vid.isOpened()):
img,frame = vid.read()
a = pickle.dumps(frame)
message = struct.pack("Q",len(a))+a
client_socket.sendall(message)
cv2.imshow('TRANSMITTING VIDEO',frame)
key = cv2.waitKey(1) & 0xFF
if key ==ord('q'):
client_socket.close()
client.py
import socket,cv2, pickle,struct
# create socket
client_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
host_ip = '192.168.1.20' # paste your server ip address here
port = 9999
client_socket.connect((host_ip,port)) # a tuple
data = b""
payload_size = struct.calcsize("Q")
while True:
while len(data) < payload_size:
packet = client_socket.recv(4*1024) # 4K
if not packet: break
data+=packet
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("Q",packed_msg_size)[0]
while len(data) < msg_size:
data += client_socket.recv(4*1024)
frame_data = data[:msg_size]
data = data[msg_size:]
frame = pickle.loads(frame_data)
cv2.imshow("RECEIVING VIDEO",frame)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
client_socket.close()

Categories