OpenCV live stream video over socket in Python 3 - python

I am trying to create a simple application to send live stream video over the socket in Python 3 with OpenCV. I am new to OpenCV and socket programming so if you can provide answer in detail I will be very grateful. Thank you.
Here is sender.py
import socket
import time
import cv2
capture = cv2.VideoCapture(0)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(('192.168.1.10', 50505))
while True:
ret, frame = capture.read()
data = cv2.imencode('.jpg', frame)[1].tostring()
sock.sendall(data)
time.sleep(2)
Here is receiver.py
import socket
import cv2
import numpy as np
import time
HOST = '192.168.1.10'
PORT = 50505
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Socket created')
s.bind((HOST, PORT))
print('Socket bind complete')
s.listen(10)
print('Socket now listening')
conn, addr = s.accept()
while True:
data = conn.recv(8192)
nparr = np.fromstring(data, np.uint8)
frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
cv2.imshow('frame', frame)
time.sleep(2)
and this is the error
receiver.py", line 29, in <module>
cv2.imshow('frame', frame)
cv2.error: D:\Build\OpenCV\opencv-3.4.0\modules\highgui\src\window.cpp:339:
error: (-215) size.width>0 && size.height>0 in function cv::imshow

I'm the author of VidGear Video Processing python library that now also provides NetGear API, which is exclusively designed to transfer video frames synchronously between interconnecting systems over the network in real-time. You try it as follows:
A. Server End:(Bare-Minimum example)
Open your favorite terminal and execute the following python code:
Note: You can end streaming anytime on both server and client side by pressing [Ctrl+c] on your keyboard on server end!
# import libraries
from vidgear.gears import VideoGear
from vidgear.gears import NetGear
stream = VideoGear(source='test.mp4').start() #Open any video stream
server = NetGear() #Define netgear server with default settings
# infinite loop until [Ctrl+C] is pressed
while True:
try:
frame = stream.read()
# read frames
# check if frame is None
if frame is None:
#if True break the infinite loop
break
# do something with frame here
# send frame to server
server.send(frame)
except KeyboardInterrupt:
#break the infinite loop
break
# safely close video stream
stream.stop()
# safely close server
writer.close()
B. Client End:(Bare-Minimum example)
Then open another terminal on the same system and execute the following python code and see the output:
# import libraries
from vidgear.gears import NetGear
import cv2
#define netgear client with `receive_mode = True` and default settings
client = NetGear(receive_mode = True)
# infinite loop
while True:
# receive frames from network
frame = client.recv()
# check if frame is None
if frame is None:
#if True break the infinite loop
break
# do something with frame here
# Show output window
cv2.imshow("Output Frame", frame)
key = cv2.waitKey(1) & 0xFF
# check for 'q' key-press
if key == ord("q"):
#if 'q' key-pressed break out
break
# close output window
cv2.destroyAllWindows()
# safely close client
client.close()
NetGear as of now supports two ZeroMQ messaging patterns: i.e zmq.PAIR and zmq.REQ and zmq.REP and the supported protocol are: 'tcp' and 'ipc'
More advanced usage can be found here: https://abhitronix.github.io/vidgear/latest/gears/netgear/overview/

It is because you are receiving small amount of data, and image is not complete. 8192 bytes is not enough in 99.99% of the time, because every image is larger than 8Kb. You'll need to grab ALL data sent by sender in order to convert it to image.
You can take a look at my code on github and change it acording to your need.
Long story short, easy option is to first send number of bytes to the client, and then send an image itself. In client code, after receiving length of image, loop until all bytes are received.
for example:
...
img_len = 175428 # received by sender.py
e=0
data = ''
while e < img_len:
d = sock.recv(1024)
e += len(d)
data += d
nparr = np.fromstring(data, np.uint8)
frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
cv2.imshow('frame', frame)

Late answer, but for those looking for live video transmission and reception over socket:
Here is the snapshot of results:
server.py
import socket, cv2, pickle,struct
# Socket Create
server_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
host_name = socket.gethostname()
host_ip = socket.gethostbyname(host_name)
print('HOST IP:',host_ip)
port = 9999
socket_address = (host_ip,port)
# Socket Bind
server_socket.bind(socket_address)
# Socket Listen
server_socket.listen(5)
print("LISTENING AT:",socket_address)
# Socket Accept
while True:
client_socket,addr = server_socket.accept()
print('GOT CONNECTION FROM:',addr)
if client_socket:
vid = cv2.VideoCapture(0)
while(vid.isOpened()):
img,frame = vid.read()
a = pickle.dumps(frame)
message = struct.pack("Q",len(a))+a
client_socket.sendall(message)
cv2.imshow('TRANSMITTING VIDEO',frame)
key = cv2.waitKey(1) & 0xFF
if key ==ord('q'):
client_socket.close()
client.py
import socket,cv2, pickle,struct
# create socket
client_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
host_ip = '192.168.1.20' # paste your server ip address here
port = 9999
client_socket.connect((host_ip,port)) # a tuple
data = b""
payload_size = struct.calcsize("Q")
while True:
while len(data) < payload_size:
packet = client_socket.recv(4*1024) # 4K
if not packet: break
data+=packet
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("Q",packed_msg_size)[0]
while len(data) < msg_size:
data += client_socket.recv(4*1024)
frame_data = data[:msg_size]
data = data[msg_size:]
frame = pickle.loads(frame_data)
cv2.imshow("RECEIVING VIDEO",frame)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
client_socket.close()

Related

Send img from pi using python sockets to display in real time using cv2 on windows

I'm trying to send images from my pi to windows so I can use yolo on windows then send code back to my pi to control my robot.
I can't display the pictures sent by the pi on my pc in real time because the bytes are different every time and it messes up opencv. (opens half photo)
How would my code on windows know how many bytes are going to be in the photo being sent from the pi? Or is there another way to go about this?
Pi code:
from picamera.array import PiRGBArray
from picamera import PiCamera
import socket
import time
import cv2
listensocket = socket.socket() #Creates an instance of socket
Port = 8000 #Port to host server on
maxConnections = 999
IP = socket.gethostname() #IP address of local machine
listensocket.bind(('',Port))
# Starts server
listensocket.listen(maxConnections)
print("Server started at " + IP + " on port " + str(Port))
# Accepts the incoming connection
(clientsocket, address) = listensocket.accept()
print("New connection made!")
# Initialize the camera
camera = PiCamera()
camera.resolution = (640, 480)
camera.framerate = 20
raw_capture = PiRGBArray(camera, size=(640, 480))
time.sleep(0.1)
# Capture frames continuously from the camera
for frame in camera.capture_continuous(raw_capture, format="bgr", use_video_port=True):
image = frame.array
#cv2.imshow("Frame", image)
# Wait for keyPress for 1 millisecond
key = cv2.waitKey(1) & 0xFF
cv2.imwrite("image.jpg", image)
file = open('image.jpg', 'rb')
file_data = file.read(56000)
clientsocket.send(file_data)
print("Data has been transmitted successfully")
raw_capture.truncate(0)
time.sleep(0.050)
if key == ord("q"):
camera.close()
cv2.destroyAllWindows()
break
camera.close()
cv2.destroyAllWindows()
Windows code:
import socket
#import time
import cv2
#import sys
s = socket.socket()
hostname = 'raspi' #Server IP/Hostname
port = 8000 #Server Port
s.connect((hostname, port)) #Connects to server
file = open('image.jpg', 'wb')
file_data = s.recv(2048)
cap = cv2.VideoCapture('image.jpg')
while file_data:
# Save recent image from server
file.write(file_data)
file_data = s.recv(56000)
print("File has been received successfully.")
# Display image as video
ret, img = cap.read()
# Show image
cv2.imshow("Image", img)
key = cv2.waitKey(50)
if key == 27:
break
file.close()
cv2.destroyAllWindows()
I don't have pi to test it with the exact setup but I did a similar project a while ago. My approach to this problem is either using a null-terminated header that will tell the client how many bytes the image will be. Alternatively, you can use a constant size header you decide before hands. For example first 4 bytes will tell the client how many bytes to be received. Here is a sample code snippet for the first solution I described:
sender:
import socket
import cv2 as cv
# read a test image
img = cv.imread('panda.jpg')
# encode it to jpg format, you can do this without redundant file openings
retval, buf = cv.imencode(".JPEG", img)
# get number of bytes
number_of_bytes = len(buf)
# create a null terminated string
header = "" + str(number_of_bytes) + "\0"
# encode it to utf-8 byte format
raw_header = bytes(header, "utf-8")
# create server socket
sock = socket.socket()
sock.bind(('localhost', 8000))
sock.listen()
conn, addr = sock.accept()
# send header first, reciever will use it to recieve image
conn.send(raw_header)
# send the rest of image
conn.send(buf)
reciever:
import socket
# create client socket
sock = socket.socket()
sock.connect(('localhost', 8000))
# recieve bytes until null termination
raw_header = []
recv_byte = sock.recv(1)
while recv_byte != b"\0":
raw_header.append(recv_byte)
recv_byte = sock.recv(1)
# decode header
header = str(b''.join(raw_header), "utf-8")
# recieve the amount of bytes foretold by header
recv_img = sock.recv(int(header))
# save image to file or you can use cv2.imendecode to turn it back to numpy.ndarray (cv2 image format)
with open("traveller_panda.jpg", 'wb+') as im_file:
im_file.write(recv_img)
# transform back from jpg to numpy array
image_decoded = np.frombuffer(recv_img, dtype=np.uint8)
image_decoded = cv.imdecode(image_decoded, cv.IMREAD_COLOR)
# display image
cv.imshow("recieved", image_decoded)
cv.waitKey()

How to send multiple continuous values (i.e. video frames, signals) over socket programming using python

I am a newbie at python socket programming. I am trying to send video frames and two lists over the socket. But the problem is only one of the items is sent and the other one gets blocked. How can I send these two types of data simultaneously? Thank you for your help.
Server-side code:
import socket
import cv2, pickle,struct
import time
capture = cv2.VideoCapture(0)
port=5050
host=socket.gethostbyname(socket.gethostname())
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((host,port))
s.listen(5)
while True:
# now our endpoint knows about the OTHER endpoint.
conn,add = s.accept()
print(f"Connection from {add} has been established.")
msg=conn.recv(1024)
print(msg)
if conn:
vid = cv2.VideoCapture(0)
X=[10,20,30]
Y=[]
for i in range(len(X)):
y=X[i]*10
Y.append(y)
print(Y)
X_data=pickle.dumps(X)
Y_data=pickle.dumps(Y)
conn.send(X_data)
conn.send(Y_data)
while(vid.isOpened()):
img,frame = vid.read()
a = pickle.dumps(frame)
message = struct.pack("Q",len(a))+a
conn.sendall(message)
#cv2.imshow('TRANSMITTING VIDEO',frame)
key = cv2.waitKey(1) & 0xFF
if key ==ord('q'):
conn.close()
Client side Code: I haven't tried to use the lists in the client code. But if I send the lists first then the video portion does not work
import socket,pickle,struct
import numpy as np
import cv2
import time
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
port=5050
host=socket.gethostbyname(socket.gethostname())
s.connect((host,port))
data=b""
payload_size = struct.calcsize("Q")
while True:
msg=(bytes('Khan',"utf-8"))
s.sendall(msg)
while len(data) < payload_size:
packet =s.recv(4*1024) # 4K
if not packet: break
data+=packet
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("Q",packed_msg_size)[0]
while len(data) < msg_size:
data += s.recv(4*1024)
frame_data = data[:msg_size]
data = data[msg_size:]
frame = pickle.loads(frame_data)
cv2.imshow("RECEIVING VIDEO",frame)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
s.close()
In my experience, you need to send one item and then clear/flush the socket buffer on the server-side before you send the next item.
Do this in a while loop and you can do it continuously forever.
I did it like this:
def clear_buffer(sock):
try:
while sock.recv(1024): pass
except:
pass
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("0.0.0.0",3389))
s.listen(10)
c,a = s.accept()
c.settimeout(1.0)
clear_buffer(c)

i am running a program to send a picture over two sockets and i have got no errors but the program gets stuck

I created a server and a client in python so I could send an image across two sockets but when I run the client it receives the data but once it reaches the end of the file it gets stuck. It raises no error and doesn't crash the terminal is just there stuck.
I have tried changing the code a bit to no avail. I am still a beginner.
client.py
import socket
import cv2
import numpy as np
import pickle
client_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
client_socket.bind(('127.0.0.1',5000))
host_ip = ('127.0.0.1',400)
client_socket.connect(host_ip)
serialized_img = b""
while True:
packet = client_socket.recv(1024)
if not packet :
break
serialized_img += packet
image = pickle.loads(serialized_img)
cv2.imshow("a",image)
server.py
import socket
import cv2
import numpy as np
import pickle
server_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
server_socket.bind(('127.0.0.1',400))
cap = cv2.VideoCapture(0)
ret,img = cap.read()
cv2.imshow('image',img)
cv2.waitKey(0)
cap.release()
cv2.destroyAllWindows()
serialized_img = pickle.dumps(img)
print(serialized_img)
while ret:
try:
server_socket.listen()
client_socket,client_address = server_socket.accept()
print(client_address)
client_socket.sendall(serialized_img)
except socket.timeout :
print("time out")
server_socket.close()
I want the client side to be able to show the image.
Close client_socket in server to inform client that it end of data.
client_socket.sendall(serialized_img)
client_socket.close()
In client you have to wait for key to keep window opened.
cv2.imshow("a", image)
cv2.waitKey(0)
cv2.destroyAllWindows()
Server:
import socket
import cv2
import pickle
server_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind(('127.0.0.1', 4000))
cap = cv2.VideoCapture(0)
ret, img = cap.read()
cap.release()
cv2.imshow("server", img)
cv2.waitKey(0)
cv2.destroyAllWindows()
serialized_img = pickle.dumps(img)
while ret:
try:
server_socket.listen()
client_socket,client_address = server_socket.accept()
print(client_address)
client_socket.sendall(serialized_img)
client_socket.close()
print('closed')
except socket.timeout :
print("time out")
server_socket.close()
Client:
import socket
import cv2
import pickle
client_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
client_socket.connect(('127.0.0.1', 4000))
serialized_img = b""
while True:
packet = client_socket.recv(1024)
if not packet :
break
serialized_img += packet
image = pickle.loads(serialized_img)
cv2.imshow("client", image)
cv2.waitKey(0)
cv2.destroyAllWindows()
If you want to send live video then server would have to run separate thread with camera or with while ret. And every client_socket run in separate thread in while True loop. Problem is how to inform client where is end of one frame and beginning of next frame. You couldn't use close() for this.
EDIT: this code streams images from camera so client can see on live - with small delay.
It sends image's size before image so client know how many bytes to receive to get full image. Serialized integer has always 8 bytes so I always receive 8 bytes before image.
I use cv2.waitKey(10) in client to check button not only to close window but it didn't display image without this. Maybe window has to receive events from system to work correctly (and refresh window) like in others modules - ie. PyGame - and waitKey() is checking events.
Server:
import socket
import cv2
import pickle
server_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind(('127.0.0.1', 4000))
cap = cv2.VideoCapture(0)
while True:
try:
server_socket.listen()
print('waiting ...')
client_socket,client_address = server_socket.accept()
print(client_address)
while True:
try:
ret, img = cap.read()
serialized_img = pickle.dumps(img)
print('serialized_len:', len(serialized_img))
serialized_len = pickle.dumps(len(serialized_img))
#print('len(serialized_len):', len(serialized_len)) # always length 8
client_socket.sendall(serialized_len) # always length 8
client_socket.sendall(serialized_img)
except Exception as ex:
print(ex)
# exit loop when errro, ie. when client close connection
break
client_socket.close()
print('closed')
except socket.timeout:
print('time out')
cap.release()
server_socket.close()
Client:
import socket
import cv2
import pickle
client_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
client_socket.connect(('127.0.0.1', 4000))
cv2.namedWindow('client')
while True:
serialized_image = b""
serialized_len = client_socket.recv(8) # always length 8
length = pickle.loads(serialized_len)
#print('length:', length)
while length > 0:
if length < 1024:
packet = client_socket.recv(length)
else:
packet = client_socket.recv(1024)
if not packet:
print('error: no data')
break
serialized_image += packet
length -= len(packet)
#print('received:', len(serialized_image))
image = pickle.loads(serialized_image)
cv2.imshow('client', image)
# it need it to display image (maybe it has to receive events from system)
# it `waitKey` waits 10ms so it doesn't block loop
key = cv2.waitKey(10) & 0XFF
if key == 27:
break
cv2.destroyAllWindows()

Video tchat with opencv via tcp

I'm trying to send video captured by webcam from a client to a server which should display the video.
Unfortunately, my server get the data (I think) but doesn't display it correctly and I don't unerstand why. I only get a small and grey window.
The client:
import numpy as np
import cv2, time
import speech_recognition as sr
from threading import Thread
import socket
import pickle
host = "localhost"
port = 8888
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connection.connect((host, port))
print("Connextion established on port {}".format(port))
cap = cv2.VideoCapture(0)
# while(True):
# Capture frame-by-frame
while 1:
ret, frame = cap.read()
ret = str(ret).encode()
connection.send(ret)
frame = pickle.dumps(frame)
connection.send(frame)
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows()
And the server
#cd C:\Users\Clement\Desktop\pychat\example
import numpy as np
import cv2
import socket
host = ''
port = 8888
connexion_principale = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connexion_principale.bind((host, port))
connexion_principale.listen(5)
print("The server is listening on port {}".format(port))
connexion_avec_client, infos_connexion = connexion_principale.accept()
cap = cv2.VideoCapture("localhost") #Maybe there is a problem here
msg_recu = b""
error = 0
while 1:
msg_recu1 = connexion_avec_client.recv(1024)
msg_recu2 = connexion_avec_client.recv(1024)
try:
if msg_recu1 != b"":
# Capture frame-by-frame
ret, frame = bool(msg_recu1.decode()), np.frombuffer(msg_recu2)
# Display the resulting frame
cv2.imshow('frame',frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
except:
error +=1
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows()
print("Connection is now closed")
connexion_avec_client.close()
connexion_principale.close()

Sending live video frame over network in python opencv

I'm trying to send live video frame that I catch with my camera to a server and process them. I'm usig opencv for image processing and python for the language. Here is my code
client_cv.py
import cv2
import numpy as np
import socket
import sys
import pickle
cap=cv2.VideoCapture(0)
clientsocket=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
clientsocket.connect(('localhost',8089))
while True:
ret,frame=cap.read()
print sys.getsizeof(frame)
print frame
clientsocket.send(pickle.dumps(frame))
server_cv.py
import socket
import sys
import cv2
import pickle
import numpy as np
HOST=''
PORT=8089
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
print 'Socket created'
s.bind((HOST,PORT))
print 'Socket bind complete'
s.listen(10)
print 'Socket now listening'
conn,addr=s.accept()
while True:
data=conn.recv(80)
print sys.getsizeof(data)
frame=pickle.loads(data)
print frame
cv2.imshow('frame',frame)
This code gives me end of file error, which is logical because the data always keep coming to the server and pickle doesn't know when to finish. My search on the internet made me use pickle but it doesn't work so far.
Note: I set conn.recv to 80 because that's the number I get when I say print sys.getsizeof(frame).
Few things:
use sendall instead of send since you're not guaranteed everything will be sent in one go
pickle is ok for data serialization but you have to make a protocol of
you own for the messages you exchange between the client and the server, this
way you can know in advance the amount of data to read for unpickling (see
below)
for recv you will get better performance if you receive big chunks, so replace 80 by 4096 or even more
beware of sys.getsizeof: it returns the size of the object in memory, which is not
the same as the size (length) of the bytes to send over the network ; for a
Python string the two values are not the same at all
be mindful of the size of the frame you are sending. Code below supports a frame up to 65535. Change "H" to "L" if you have a larger frame.
A protocol example:
client_cv.py
import cv2
import numpy as np
import socket
import sys
import pickle
import struct ### new code
cap=cv2.VideoCapture(0)
clientsocket=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
clientsocket.connect(('localhost',8089))
while True:
ret,frame=cap.read()
data = pickle.dumps(frame) ### new code
clientsocket.sendall(struct.pack("H", len(data))+data) ### new code
server_cv.py
import socket
import sys
import cv2
import pickle
import numpy as np
import struct ## new
HOST=''
PORT=8089
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
print('Socket created')
s.bind((HOST,PORT))
print('Socket bind complete')
s.listen(10)
print('Socket now listening')
conn,addr=s.accept()
### new
data = ""
payload_size = struct.calcsize("H")
while True:
while len(data) < payload_size:
data += conn.recv(4096)
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("H", packed_msg_size)[0]
while len(data) < msg_size:
data += conn.recv(4096)
frame_data = data[:msg_size]
data = data[msg_size:]
###
frame=pickle.loads(frame_data)
print frame
cv2.imshow('frame',frame)
You can probably optimize all this a lot (less copying, using the buffer interface, etc) but at least you can get the idea.
After months of searching the internet, this is what I came up with, I have neatly packaged it into classes, with unit tests and documentation as SmoothStream check it out, it was the only simple and working version of streaming I could find anywhere.
I used this code and wrapped mine around it.
Viewer.py
import cv2
import zmq
import base64
import numpy as np
context = zmq.Context()
footage_socket = context.socket(zmq.SUB)
footage_socket.bind('tcp://*:5555')
footage_socket.setsockopt_string(zmq.SUBSCRIBE, np.unicode(''))
while True:
try:
frame = footage_socket.recv_string()
img = base64.b64decode(frame)
npimg = np.fromstring(img, dtype=np.uint8)
source = cv2.imdecode(npimg, 1)
cv2.imshow("Stream", source)
cv2.waitKey(1)
except KeyboardInterrupt:
cv2.destroyAllWindows()
break
Streamer.py
import base64
import cv2
import zmq
context = zmq.Context()
footage_socket = context.socket(zmq.PUB)
footage_socket.connect('tcp://localhost:5555')
camera = cv2.VideoCapture(0) # init the camera
while True:
try:
grabbed, frame = camera.read() # grab the current frame
frame = cv2.resize(frame, (640, 480)) # resize the frame
encoded, buffer = cv2.imencode('.jpg', frame)
jpg_as_text = base64.b64encode(buffer)
footage_socket.send(jpg_as_text)
except KeyboardInterrupt:
camera.release()
cv2.destroyAllWindows()
break
I changed the code from #mguijarr to work with Python 3. Changes made to the code:
data is now a byte literal instead of a string literal
Changed "H" to "L" to send larger frame sizes. Based on the documentation, we can now send frames of size 2^32 instead of just 2^16.
Server.py
import pickle
import socket
import struct
import cv2
HOST = ''
PORT = 8089
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Socket created')
s.bind((HOST, PORT))
print('Socket bind complete')
s.listen(10)
print('Socket now listening')
conn, addr = s.accept()
data = b'' ### CHANGED
payload_size = struct.calcsize("L") ### CHANGED
while True:
# Retrieve message size
while len(data) < payload_size:
data += conn.recv(4096)
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("L", packed_msg_size)[0] ### CHANGED
# Retrieve all data based on message size
while len(data) < msg_size:
data += conn.recv(4096)
frame_data = data[:msg_size]
data = data[msg_size:]
# Extract frame
frame = pickle.loads(frame_data)
# Display
cv2.imshow('frame', frame)
cv2.waitKey(1)
Client.py
import cv2
import numpy as np
import socket
import sys
import pickle
import struct
cap=cv2.VideoCapture(0)
clientsocket=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
clientsocket.connect(('localhost',8089))
while True:
ret,frame=cap.read()
# Serialize frame
data = pickle.dumps(frame)
# Send message length first
message_size = struct.pack("L", len(data)) ### CHANGED
# Then data
clientsocket.sendall(message_size + data)
as #Rohan Sawant said i used zmq library without using base64 encoding. here is the new code
Streamer.py
import base64
import cv2
import zmq
import numpy as np
import time
context = zmq.Context()
footage_socket = context.socket(zmq.PUB)
footage_socket.connect('tcp://192.168.1.3:5555')
camera = cv2.VideoCapture(0) # init the camera
while True:
try:
grabbed, frame = camera.read() # grab the current frame
frame = cv2.resize(frame, (640, 480)) # resize the frame
encoded, buffer = cv2.imencode('.jpg', frame)
footage_socket.send(buffer)
except KeyboardInterrupt:
camera.release()
cv2.destroyAllWindows()
break
Viewer.py
import cv2
import zmq
import base64
import numpy as np
context = zmq.Context()
footage_socket = context.socket(zmq.SUB)
footage_socket.bind('tcp://*:5555')
footage_socket.setsockopt_string(zmq.SUBSCRIBE, np.unicode(''))
while True:
try:
frame = footage_socket.recv()
npimg = np.frombuffer(frame, dtype=np.uint8)
#npimg = npimg.reshape(480,640,3)
source = cv2.imdecode(npimg, 1)
cv2.imshow("Stream", source)
cv2.waitKey(1)
except KeyboardInterrupt:
cv2.destroyAllWindows()
break
I'm kind of late but my powerful & threaded VidGear Video Processing python library now provide NetGear API, which is exclusively designed to transfer video frames synchronously between interconnecting systems over the network in real-time. Here's an example:
A. Server End:(Bare-Minimum example)
Open your favorite terminal and execute the following python code:
Note: You can end streaming anytime on both server and client side by pressing [Ctrl+c] on your keyboard on server end!
# import libraries
from vidgear.gears import VideoGear
from vidgear.gears import NetGear
stream = VideoGear(source='test.mp4').start() #Open any video stream
server = NetGear() #Define netgear server with default settings
# infinite loop until [Ctrl+C] is pressed
while True:
try:
frame = stream.read()
# read frames
# check if frame is None
if frame is None:
#if True break the infinite loop
break
# do something with frame here
# send frame to server
server.send(frame)
except KeyboardInterrupt:
#break the infinite loop
break
# safely close video stream
stream.stop()
# safely close server
server.close()
B. Client End:(Bare-Minimum example)
Then open another terminal on the same system and execute the following python code and see the output:
# import libraries
from vidgear.gears import NetGear
import cv2
#define netgear client with `receive_mode = True` and default settings
client = NetGear(receive_mode = True)
# infinite loop
while True:
# receive frames from network
frame = client.recv()
# check if frame is None
if frame is None:
#if True break the infinite loop
break
# do something with frame here
# Show output window
cv2.imshow("Output Frame", frame)
key = cv2.waitKey(1) & 0xFF
# check for 'q' key-press
if key == ord("q"):
#if 'q' key-pressed break out
break
# close output window
cv2.destroyAllWindows()
# safely close client
client.close()
More advanced usage and related docs can be found here: https://github.com/abhiTronix/vidgear/wiki/NetGear
Recently I publish imagiz package for Fast and none blocking live video streaming over network with OpenCV and ZMQ.
https://pypi.org/project/imagiz/
Client :
import imagiz
import cv2
client=imagiz.Client("cc1",server_ip="localhost")
vid=cv2.VideoCapture(0)
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]
while True:
r,frame=vid.read()
if r:
r, image = cv2.imencode('.jpg', frame, encode_param)
client.send(image)
else:
break
Server :
import imagiz
import cv2
server=imagiz.Server()
while True:
message=server.recive()
frame=cv2.imdecode(message.image,1)
cv2.imshow("",frame)
cv2.waitKey(1)
I have made it to work on my MacOS.
I used the code from #mguijarr and changed the struct.pack from "H" to "L".
# Server.py:
import socket
import sys
import cv2
import pickle
import numpy as np
import struct ## new
HOST=''
PORT=8089
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
print 'Socket created'
s.bind((HOST,PORT))
print 'Socket bind complete'
s.listen(10)
print 'Socket now listening'
conn,addr=s.accept()
# new
data = ""
payload_size = struct.calcsize("L")
while True:
while len(data) < payload_size:
data += conn.recv(4096)
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("L", packed_msg_size)[0]
while len(data) < msg_size:
data += conn.recv(4096)
frame_data = data[:msg_size]
data = data[msg_size:]
frame=pickle.loads(frame_data)
print frame
cv2.imshow('frame',frame)
key = cv2.waitKey(10)
if (key == 27) or (key == 113):
break
cv2.destroyAllWindows()
# Client.py
import cv2
import numpy as np
import socket
import sys
import pickle
import struct ### new code
cap=cv2.VideoCapture(0)
clientsocket=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
clientsocket.connect(('localhost',8089))
while True:
ret,frame=cap.read()
data = pickle.dumps(frame) ### new code
clientsocket.sendall(struct.pack("L", len(data))+data) ### new code

Categories