How to add meta data to UDP stream in python - python

I have client server application which is sending udp server from one client to other in python. In the UDP stream I am sending a video by reading the video from disk using CV2 and then sending it using UDP.
I want to add some meta data along with the video stream. I want to add the frame number, timestamp, video name, playback time and the starting time to the video stream.
How can I do the same. I have created multiple UDP sockets which are managed by using threads.
The following is the code for sender
import cv2
import socket
import math
import pickle
import sys
max_length = 65000
host = '127.0.0.1'
port = 6000
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
cap = cv2.VideoCapture("input2.mp4")
print(cap)
ret, frame = cap.read()
print(ret)
while ret:
# compress frame
retval, buffer = cv2.imencode(".jpg", frame)
if retval:
# convert to byte array
buffer = buffer.tobytes()
# get size of the frame
buffer_size = len(buffer)
print(buffer_size)
num_of_packs = 1
if buffer_size > max_length:
num_of_packs = math.ceil(buffer_size/max_length)
frame_info = {"packs":num_of_packs}
# send the number of packs to be expected
print("Number of packs:", num_of_packs)
sock.sendto(pickle.dumps(frame_info), (host, port))
left = 0
right = max_length
for i in range(num_of_packs):
print("left:", left)
print("right:", right)
# truncate data to send
data = buffer[left:right]
left = right
right += max_length
# send the frames accordingly
sock.sendto(data, (host, port))
ret, frame = cap.read()
print("done")
The following is the code for server
from threading import Thread
import cv2
import socket
import math
import pickle
import sys
import numpy as np
import json
def sendToClient(threadNum,hostRec,portRec,hostSend,portSend):
max_length = 65540
sockRec = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
sockRec.bind((hostRec,portRec))
sockSend = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
while True:
data, address = sockRec.recvfrom(max_length)
print("Received data on "+str(threadNum))
sockSend.sendto(data,(hostSend,portSend))
def main():
host = '0.0.0.0'
ports = [6000,6001,6002]
thread1 = Thread(name = 'Thread-1',target = sendToClient, args=('thread-1',host,ports[0],'127.0.0.1',7000))
thread2 = Thread(name = 'Thread-2',target = sendToClient, args=('thread-2',host,ports[1],'127.0.0.1',7001))
thread3 = Thread(name = 'Thread-3',target = sendToClient, args=('thread-3',host,ports[2],'127.0.0.1',7002))
thread1.daemon = True
thread2.daemon = True
thread3.daemon = True
thread1.start()
thread2.start()
thread3.start()
thread1.join()
thread2.join()
thread3.join()
main()
The following is the code for receiver
from concurrent.futures import thread
import cv2
import socket
import pickle
import numpy as np
from threading import Thread
def display(threadNum,host,port,videoPlayerInfo):
max_length = 65540
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((host, port))
frame_info = None
buffer = None
frame = None
print("-> waiting for connection on thread "+ str(threadNum))
while True:
data, address = sock.recvfrom(max_length)
print("Received Data from "+str(threadNum))
if len(data) < 100:
frame_info = pickle.loads(data)
if frame_info:
nums_of_packs = frame_info["packs"]
for i in range(nums_of_packs):
data, address = sock.recvfrom(max_length)
if i == 0:
buffer = data
else:
buffer += data
frame = np.frombuffer(buffer, dtype=np.uint8)
frame = frame.reshape(frame.shape[0], 1)
frame = cv2.imdecode(frame, cv2.IMREAD_COLOR)
frame = cv2.flip(frame, 1)
if frame is not None and type(frame) == np.ndarray:
cv2.imshow(videoPlayerInfo, frame)
if cv2.waitKey(1) == 27:
break
print("goodbye from "+str(threadNum))
def main():
host = "0.0.0.0"
ports = [7000,7001,7002]
thread1 = Thread(name = 'Thread-1',target = display, args=('thread-1',host,ports[0],'player1'))
thread2 = Thread(name = 'Thread-2',target = display, args=('thread-2',host,ports[1],'player2'))
thread3 = Thread(name = 'Thread-3',target = display, args=('thread-3',host,ports[2],'player3'))
thread1.daemon = True
thread2.daemon = True
thread3.daemon = True
thread1.start()
thread2.start()
thread3.start()
thread1.join()
thread2.join()
thread3.join()
main()
Please help me how I can send the meta data as I am new to socket programming in python

Related

python socket pickled numpy data not showing up

I'm trying to send python-opencv frames over sockets. I'm pickling the data and unpickling but for some reason it's blank or nothing is showing up.
This is my terminal when I run client.py
new message length: b'720 '
It should be streaming the webcam from server but nothing is showing up.
Here is my code for the client and server:
client.py
import socket
import numpy as np
import cv2
import pickle
HEADERSIZE = 10
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((socket.gethostname(), 1232))
while True:
full_msg = b''
new_msg = True
while True:
msg = s.recv(16)
if new_msg:
print(f'new message length: {msg[:HEADERSIZE]}')
msglen = int(msg[:HEADERSIZE])
new_msg = False
full_msg += msg
if len(full_msg)-HEADERSIZE == msglen:
print('full msg recvd')
print(full_msg[HEADERSIZE:])
d = pickle.loads(full_msg[HEADERSIZE:])
print(d)
cv2.namedWindow('Webcam', cv2.WINDOW_NORMAL)
cv2.imshow('Webcam', full_msg[HEADERSIZE:])
new_msg = True
full_msg = b''
print(full_msg)
server.py
import socket
import numpy as np
import cv2
import time
import pickle
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
HEADERSIZE = 10
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((socket.gethostname(), 1232))
s.listen(5)
cap = cv2.VideoCapture(0)
while True:
clientsocket, address = s.accept()
print(f"Connection from {address} has been established!")
while True:
ret, frame = cap.read()
msg = pickle.dumps(frame)
print(frame)
msg = bytes(f'{len(frame):<{HEADERSIZE}}', "utf-8") + msg
clientsocket.send(msg)
I have no idea why nothing is showing up. I don't even know if anything is coming though. Does it have to do with numpy data? I heard that can be tricky.
When you stream frames of 720 bytes from server, you are actually sending 730 bytes (length 10 bytes + data 720 bytes) per frame continuously, one frame after another.
In client you are reading 16 bytes per recv(). Hence your condition if len(full_msg)-HEADERSIZE == msglen: will never be true, with header size 10, as 730 is not divisible by 16.
So your program is looping indefinitely on while True: in client.
Try below program for client. I tested with dummy data.
client.py
import socket
import numpy as np
import cv2
import pickle
HEADERSIZE = 10
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((socket.gethostname(), 1232))
while True:
msg_length=int(s.recv(HEADERSIZE))
full_msg=b''
while len(full_msg)<msg_length:
full_msg+=s.recv(msg_length-len(full_msg))
print(full_msg)
d = pickle.loads(full_msg)
cv2.namedWindow('Webcam', cv2.WINDOW_NORMAL)
cv2.imshow('Webcam', full_msg)

Send Thermal Image with mlx90640 via TCP

I have a problem with my program I am trying to send the images I collect from the MLX90640 thanks to the Raspberry to process them in a remote PC.
I am using a Raspberry 4 as a client and the data is routed to a PC. I am using the socket to start the server which is to receive and the images and thermal images. For the images connected to the camera, I took care of it my problem is to transfer the thermal images. I am currently using a wifi connection that I share with my cellphone for the tests.If necessary I will post the server code. But I have this error message I have tried many solutions and I have not found it. In fact, the Raspberry is the client and the PC is the server. So I collect data from the raspberry to transmit it to the PC for processing. I want to detect the temperature of the face and for that the MLX90640 which is connected to the Raspberry must send the thermal data. Knowing that it collects 768 values, so I want these values ​​to be transmitted or the maximum value to be returned to the PC. Can someone help me
import cv2
import io
import socket
import struct
import time
import pickle
import zlib
import adafruit_mlx90640
import board
import busio
import numpy as np
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect(('192.168.43.134', 8485))
connection = client_socket.makefile('wb')
i2c = busio.I2C(board.SCL, board.SDA, frequency=800000)
mlx = adafruit_mlx90640.MLX90640(i2c)
print("MLX addr detected on I2C")
print([hex(i) for i in mlx.serial_number])
mlx.refresh_rate = adafruit_mlx90640.RefreshRate.REFRESH_4_HZ
frame1 = np.zeros((24*32,))
#max_t=0
#moy = 0
#cam = cv2.VideoCapture(0)
#mlx.set(3, 32);
#mlx.set(4, 24);
img_counter = 0
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]
while True:
frame = mlx.getFrame(frame1)
result, frame = cv2.imencode('.jpg', frame, encode_param)
# data = zlib.compress(pickle.dumps(frame, 0))
data = pickle.dumps(frame, 0)
size = len(data)
print("{}: {}".format(img_counter, size))
client_socket.sendall(struct.pack(">L", size) + data)
img_counter += 1
```Traceback (most recent call last): File "client1.py", line 37, in <module> result, frame = cv2.imencode('.jpg', frame, encode_param) cv2.error: OpenCV(4.1.1) /home/pi/opencv/modules/imgcodecs/src/grfmt_base.cpp:145: error: (-10:Unknown error code -10) Raw image encoder error: Empty JPEG image (DNL not supported) in function 'throwOnEror'
Do you manage to get thermal at Raspberry pi? I did a similar approach but i am not using thermal camera. If your problem is not able to transfer image from raspberry pi to your computer
Server code at Raspberry pi
#!/usr/bin/env python3
import os
import datetime
import numpy as np
import cv2
import sys
import socket
import select
import queue
import pickle
import struct
import time
from threading import Thread
class WebcamVideoStream:
def __init__(self, src=0):
self.stream = cv2.VideoCapture(src)
cv2.VideoWriter_fourcc('M','J','P','G')
self.stream .set(cv2.CAP_PROP_BUFFERSIZE,1)
self.stream .set(5, 60)
self.stream .set(3,640)
self.stream .set(4,480)
(self.grabbed, self.frame) = self.stream.read()
self.stopped = False
def start(self):
Thread(target=self.update, args=()).start()
return self
def update(self):
while True:
if self.stopped:
return
(self.grabbed, self.frame) = self.stream.read()
time.sleep(0.1)
def read(self):
img= cv2.cvtColor(self.frame , cv2.COLOR_BGR2GRAY)
data = pickle.dumps(img)
return data
def stop(self):
self.stopped = True
def commandParser(cmd, stream):
reply = ""
if(cmd == "getimage"):
reply = stream.read()
time.sleep(0.1)
else:
reply = '/n'.encode()
return(reply)
if __name__ == '__main__':
camera_idx = 0
for i in range(3):
stream = cv2.VideoCapture(i)
test,frame = stream.read()
stream.release()
if test == True:
camera_idx = i
break
#stream = cv2.VideoCapture(camera_idx)
vs = WebcamVideoStream(src=camera_idx).start()
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
port = 8080
server.bind(('192.168.128.14', port))
server.listen(5)
inputs = [server]
outputs = []
message_queues = {}
cmd =""
while inputs:
readable, writable, exceptional = select.select(inputs, outputs, inputs, 1)
for s in readable:
if s is server:
connection, client_address = s.accept()
inputs.append(connection)
message_queues[connection] = queue.Queue(1024)
else:
data = s.recv(4096)
if data:
cmd = data.decode()
message_queues[s].put(commandParser(data.decode(), vs))
if s not in outputs:
outputs.append(s)
else:
if s in outputs:
outputs.remove(s)
inputs.remove(s)
s.close()
del message_queues[s]
for s in writable:
try:
next_msg = message_queues[s].get_nowait()
except queue.Empty:
outputs.remove(s)
else:
if(cmd == "getimage"):
size = len(next_msg)
s.sendall(struct.pack(">L", size) + next_msg)
else:
s.send("ABCDEFGHIJKLMNONOOO".encode())
for s in exceptional:
print ('handling exceptional condition for', s.getpeername())
inputs.remove(s)
if s in outputs:
outputs.remove(s)
s.close()
del message_queues[s]
vs.stop()
Client Code at PC
#!/usr/bin/env python3
import os
import datetime
import numpy as np
import cv2
import socket
import socket
import sys
import pickle
import struct ## new
import zlib
import time
server_address = ('192.168.128.14', 8080)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#print ('connecting to %s port %s' % server_address)
s.connect(server_address)
cv2.namedWindow('Streaming')
payload_size = struct.calcsize(">L")
while True:
s.send("getimage".encode())
data = b""
while len(data) < payload_size:
data += s.recv(4096)
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack(">L", packed_msg_size)[0]
while len(data) < msg_size:
data += s.recv(4096)
frame_data = data[:msg_size]
data = data[msg_size:]
frame=pickle.loads(frame_data, fix_imports=True, encoding="bytes")
cv2.imshow('Streaming',frame)
cv2.waitKey(1)
#cv2.imwrite("test.tiff", frame)
s.close()

How to send and recieve a cv2 image in the form of numpy array

I'm an intermediate in python and new to libraries like numpy and opencv. I'm trying to make a video calling app using sockets.This code is just a try. I've tried in the following way but as it receives the array it's size becomes 0 and shape becomes null. Please help me do this. I'm sorry for anymistakes I've done
any help will be appreciated
Thank you. Have a nice day enter
Server:-
import socket
import threading
from _thread import *
import cv2
import numpy as np
srvr = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_ip = socket.gethostbyname(socket.gethostname())
port = 9999
client = []
try:
srvr.bind((server_ip, port))
except socket.error as e:
print(e)
def connected_client(conn, addr):
global client
print("[SERVER]: CONNECTED WITH ", addr)
check_msg = "Welcome Client"
conn.send(str.encode(check_msg))
while True:
try:
data = conn.recv(5000)
if not data:
print("[SERVER]: DISCONNECTED..")
break
for i in client:
i.sendall(data)
except:
break
conn.close()
srvr.listen(5)
while True:
print("[SERVER]: STARTED...\n [SERVER]: ACCEPTING CONNECTIONS....")
conn, addr = srvr.accept()
start_new_thread(connected_client, (conn, addr))
client.append(conn)
Client:-
import socket, cv2
import threading
from _thread import *
import numpy as np
import time
import base64
video = cv2.VideoCapture(0)
clt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
SERVER = socket.gethostbyname(socket.gethostname())
PORT = 9999
ADDR = (SERVER, PORT)
clt.connect(ADDR)
print(clt.recv(2048).decode())
#recieving data from server
def recieve():
global clt
while True:
recv_frame = clt.recv(5000)
nparr = np.frombuffer(recv_frame, np.uint8)
img = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
key = cv2.waitKey(1)
if key == ord('a'):
break
print(img)
# cv2.imshow("aman", recv_frame)
# break
# https://stackoverflow.com/questions/17967320/python-opencv-convert-image-to-byte-string
# new thread while to continuously recieve data
start_new_thread(recieve, ())
# loop for continuously sending data
while True:
check, frame = video.read()
str_frame = cv2.imencode('.jpg', frame)[1].tobytes()
clt.sendall(str_frame)
video.release()
cv2.destroyAllWindows()

python video streaming using opencv and web socket

i'm trying to write a python script that streams video to browser using web socket. I'm using opencv as client to send frames via socket and browser script receive and display it on a browser. single image are displayed in browser but issue occurs while streaming video not able to display it on browser. python flask works fine but there are few issues so i have planned use web socket for browser display
client code sends frame to server using opencv and socket
import cv2
import numpy as np
import socket
import sys
import pickle
import struct ### new code
cap=cv2.VideoCapture("test.avi")
clientsocket=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
clientsocket.connect(('0.0.0.0',8082))
while True:
ret,frame=cap.read()
data = pickle.dumps(frame) ### new code
clientsocket.sendall(struct.pack("L", len(data))+data)
browser code
import socket #for sockets handling
import time #for time functions
import sys
import cv2
import pickle
import numpy as np
import struct ##
hostIP = '127.0.0.1'
SourcePort = 8082 #client socket
PlayerPort = 8081 #Internet Browser
def gen_headers():
# determine response code
h = ''
h = 'HTTP/1.1 200 OK\n'
# write further headers
current_date = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
h += 'Date: ' + current_date +'\n'
h += 'Content-Type: image/jpeg\n\n'
return h
def start_server():
socketFFMPEG = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# this is for easy starting/killing the app
socketFFMPEG.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
print('Socket created')
### new
data = b""
payload_size = struct.calcsize("L")
try:
socketFFMPEG.bind((hostIP, SourcePort))
print('Socket bind complete')
except socket.error as msg:
print('Bind failed. Error : ' + str(sys.exc_info()))
sys.exit()
#Start listening on socketFFMPEG
socketFFMPEG.listen(10)
print('Socket now listening. Waiting for video source from client socket on port', SourcePort)
conn, addr = socketFFMPEG.accept()
ip, port = str(addr[0]), str(addr[1])
print('Accepting connection from ' + ip + ':' + port)
socketPlayer = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socketPlayer.bind((hostIP, PlayerPort))
socketPlayer.listen(1) #listen just 1 petition
print('Waiting for Internet Browser')
conn2, addr2 = socketPlayer.accept()
#conn2.sendall(gen_headers().encode())
while True:
try :
while len(data) < payload_size:
data += conn.recv(4096)
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("L", packed_msg_size)[0]
while len(data) < msg_size:
data += conn.recv(4096)
frame_data = data[:msg_size]
data = data[msg_size:]
###
frame=pickle.loads(frame_data)
#send data to internet browser
print(frame)
ret, frame = cv2.imencode('.jpg', frame)
frames = frame.tobytes()
conn2.sendall( gen_headers().encode()+frames)
except socket.error:
print('Error data :' + str(frame))
print('send Error : ' + str(sys.exc_info()))
conn2.close()
sys.exit()
socketFFMPEG.close()
start_server()
Server.py
# This is server code to send video frames over UDP
import cv2, imutils, socket
import numpy as np
import time
import base64
BUFF_SIZE = 65536
server_socket = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
server_socket.setsockopt(socket.SOL_SOCKET,socket.SO_RCVBUF,BUFF_SIZE)
host_name = socket.gethostname()
host_ip = '192.168.1.102'# socket.gethostbyname(host_name)
print(host_ip)
port = 9999
socket_address = (host_ip,port)
server_socket.bind(socket_address)
print('Listening at:',socket_address)
vid = cv2.VideoCapture(0) # replace 'rocket.mp4' with 0 for webcam
fps,st,frames_to_count,cnt = (0,0,20,0)
while True:
msg,client_addr = server_socket.recvfrom(BUFF_SIZE)
print('GOT connection from ',client_addr)
WIDTH=400
while(vid.isOpened()):
_,frame = vid.read()
frame = imutils.resize(frame,width=WIDTH)
encoded,buffer = cv2.imencode('.jpg',frame,[cv2.IMWRITE_JPEG_QUALITY,80])
message = base64.b64encode(buffer)
server_socket.sendto(message,client_addr)
frame = cv2.putText(frame,'FPS: '+str(fps),(10,40),cv2.FONT_HERSHEY_SIMPLEX,0.7,(0,0,255),2)
cv2.imshow('TRANSMITTING VIDEO',frame)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
server_socket.close()
break
if cnt == frames_to_count:
try:
fps = round(frames_to_count/(time.time()-st))
st=time.time()
cnt=0
except:
pass
cnt+=1
Client.py
# This is client code to receive video frames over UDP
import cv2, imutils, socket
import numpy as np
import time
import base64
BUFF_SIZE = 65536
client_socket = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
client_socket.setsockopt(socket.SOL_SOCKET,socket.SO_RCVBUF,BUFF_SIZE)
host_name = socket.gethostname()
host_ip = '192.168.1.102'# socket.gethostbyname(host_name)
print(host_ip)
port = 9999
message = b'Hello'
client_socket.sendto(message,(host_ip,port))
fps,st,frames_to_count,cnt = (0,0,20,0)
while True:
packet,_ = client_socket.recvfrom(BUFF_SIZE)
data = base64.b64decode(packet,' /')
npdata = np.fromstring(data,dtype=np.uint8)
frame = cv2.imdecode(npdata,1)
frame = cv2.putText(frame,'FPS: '+str(fps),(10,40),cv2.FONT_HERSHEY_SIMPLEX,0.7,(0,0,255),2)
cv2.imshow("RECEIVING VIDEO",frame)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
client_socket.close()
break
if cnt == frames_to_count:
try:
fps = round(frames_to_count/(time.time()-st))
st=time.time()
cnt=0
except:
pass
cnt+=1

OpenCV live stream video over socket in Python 3

I am trying to create a simple application to send live stream video over the socket in Python 3 with OpenCV. I am new to OpenCV and socket programming so if you can provide answer in detail I will be very grateful. Thank you.
Here is sender.py
import socket
import time
import cv2
capture = cv2.VideoCapture(0)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(('192.168.1.10', 50505))
while True:
ret, frame = capture.read()
data = cv2.imencode('.jpg', frame)[1].tostring()
sock.sendall(data)
time.sleep(2)
Here is receiver.py
import socket
import cv2
import numpy as np
import time
HOST = '192.168.1.10'
PORT = 50505
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Socket created')
s.bind((HOST, PORT))
print('Socket bind complete')
s.listen(10)
print('Socket now listening')
conn, addr = s.accept()
while True:
data = conn.recv(8192)
nparr = np.fromstring(data, np.uint8)
frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
cv2.imshow('frame', frame)
time.sleep(2)
and this is the error
receiver.py", line 29, in <module>
cv2.imshow('frame', frame)
cv2.error: D:\Build\OpenCV\opencv-3.4.0\modules\highgui\src\window.cpp:339:
error: (-215) size.width>0 && size.height>0 in function cv::imshow
I'm the author of VidGear Video Processing python library that now also provides NetGear API, which is exclusively designed to transfer video frames synchronously between interconnecting systems over the network in real-time. You try it as follows:
A. Server End:(Bare-Minimum example)
Open your favorite terminal and execute the following python code:
Note: You can end streaming anytime on both server and client side by pressing [Ctrl+c] on your keyboard on server end!
# import libraries
from vidgear.gears import VideoGear
from vidgear.gears import NetGear
stream = VideoGear(source='test.mp4').start() #Open any video stream
server = NetGear() #Define netgear server with default settings
# infinite loop until [Ctrl+C] is pressed
while True:
try:
frame = stream.read()
# read frames
# check if frame is None
if frame is None:
#if True break the infinite loop
break
# do something with frame here
# send frame to server
server.send(frame)
except KeyboardInterrupt:
#break the infinite loop
break
# safely close video stream
stream.stop()
# safely close server
writer.close()
B. Client End:(Bare-Minimum example)
Then open another terminal on the same system and execute the following python code and see the output:
# import libraries
from vidgear.gears import NetGear
import cv2
#define netgear client with `receive_mode = True` and default settings
client = NetGear(receive_mode = True)
# infinite loop
while True:
# receive frames from network
frame = client.recv()
# check if frame is None
if frame is None:
#if True break the infinite loop
break
# do something with frame here
# Show output window
cv2.imshow("Output Frame", frame)
key = cv2.waitKey(1) & 0xFF
# check for 'q' key-press
if key == ord("q"):
#if 'q' key-pressed break out
break
# close output window
cv2.destroyAllWindows()
# safely close client
client.close()
NetGear as of now supports two ZeroMQ messaging patterns: i.e zmq.PAIR and zmq.REQ and zmq.REP and the supported protocol are: 'tcp' and 'ipc'
More advanced usage can be found here: https://abhitronix.github.io/vidgear/latest/gears/netgear/overview/
It is because you are receiving small amount of data, and image is not complete. 8192 bytes is not enough in 99.99% of the time, because every image is larger than 8Kb. You'll need to grab ALL data sent by sender in order to convert it to image.
You can take a look at my code on github and change it acording to your need.
Long story short, easy option is to first send number of bytes to the client, and then send an image itself. In client code, after receiving length of image, loop until all bytes are received.
for example:
...
img_len = 175428 # received by sender.py
e=0
data = ''
while e < img_len:
d = sock.recv(1024)
e += len(d)
data += d
nparr = np.fromstring(data, np.uint8)
frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
cv2.imshow('frame', frame)
Late answer, but for those looking for live video transmission and reception over socket:
Here is the snapshot of results:
server.py
import socket, cv2, pickle,struct
# Socket Create
server_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
host_name = socket.gethostname()
host_ip = socket.gethostbyname(host_name)
print('HOST IP:',host_ip)
port = 9999
socket_address = (host_ip,port)
# Socket Bind
server_socket.bind(socket_address)
# Socket Listen
server_socket.listen(5)
print("LISTENING AT:",socket_address)
# Socket Accept
while True:
client_socket,addr = server_socket.accept()
print('GOT CONNECTION FROM:',addr)
if client_socket:
vid = cv2.VideoCapture(0)
while(vid.isOpened()):
img,frame = vid.read()
a = pickle.dumps(frame)
message = struct.pack("Q",len(a))+a
client_socket.sendall(message)
cv2.imshow('TRANSMITTING VIDEO',frame)
key = cv2.waitKey(1) & 0xFF
if key ==ord('q'):
client_socket.close()
client.py
import socket,cv2, pickle,struct
# create socket
client_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
host_ip = '192.168.1.20' # paste your server ip address here
port = 9999
client_socket.connect((host_ip,port)) # a tuple
data = b""
payload_size = struct.calcsize("Q")
while True:
while len(data) < payload_size:
packet = client_socket.recv(4*1024) # 4K
if not packet: break
data+=packet
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("Q",packed_msg_size)[0]
while len(data) < msg_size:
data += client_socket.recv(4*1024)
frame_data = data[:msg_size]
data = data[msg_size:]
frame = pickle.loads(frame_data)
cv2.imshow("RECEIVING VIDEO",frame)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
client_socket.close()

Categories