I am using sockets to send video feed bytes from server to client. The video feed is being captured using openCV. But the method I am using right now works for a couple of seconds and stops with error OSError: [WinError 10040] A message sent on a datagram socket was larger than the internal message buffer or some other network limit, or the buffer used to receive a datagram into was smaller than the datagram itself Where did I go wrong and how can I fix it? Thanks in advance.
HOST
import cv2
import socket
import pickle
s_stream = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s_stream.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 10000000)
streamIp = "192.168.3.5"
streamPort = 8787
camera = cv2.VideoCapture(0)
while True:
ret, img = camera.read()
ret, buffer = cv2.imencode(
'.jpg', img, [int(cv2.IMWRITE_JPEG_QUALITY), 30])
x_as_bytes = pickle.dumps(buffer)
s_stream.sendto(x_as_bytes, (streamIp, streamPort))
CLIENT
import cv2, socket, pickle
s_stream=socket.socket(socket.AF_INET , socket.SOCK_DGRAM)
streamIp="192.168.3.5"
streamPort=8787
s_stream.bind((streamIp,streamPort))
while True:
x=s_stream.recvfrom(10000000)
clientip = x[1][0]
data=x[0]
data=pickle.loads(data)
print(data)
Related
I tried to stream a video over Kafka and displaying it in a simple HTTP server, built in Python. regardless the good streaming data, only the first timeframe (or first received timeframe) is served in the HTTP Server.
I have proved that the sent and received timeframes equal to the streamed video (by exporting out the sent and received time frames). my suspicion is that the HTTP Server doesn't automatically refresh or write the received timeframe every new messages are received.
KafkaProducer
import cv2
import io
from PIL import Image
from kafka import KafkaProducer
def recordedVid(video_file):
producer = KafkaProducer(
bootstrap_servers='localhost:9092'
)
cap = cv2.VideoCapture(video_file)
while(cap.isOpened()):
ret, frame = cap.read()
ret, buffer = cv2.imencode('.jpg', frame)
toStream = buffer.tobytes()
producer.send('Video', toStream)
print('sent')
time.sleep(0.1)
cap.release()
print('done')
recordedVid('sample.mp4')
KafkaConsumer
import cv2
import socket
from http.server import HTTPServer, BaseHTTPRequestHandler
from kafka import KafkaConsumer
from PIL import Image
import numpy as np
import io
from ensurepip import bootstrap
def extract_IP():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
HOST = s.getsockname()[0]
return HOST
def msg_process(msg) :
frame_bytes = msg.value
frame_arr = np.frombuffer(frame_bytes, np.uint8)
frame = cv2.imdecode(frame_arr, cv2.IMREAD_COLOR)
img = Image.fromarray(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))
toLoad = io.BytesIO()
img.save(toLoad, format='JPEG')
return toLoad.getvalue()
consumer = KafkaConsumer(
"Video",
bootstrap_servers = 'localhost:9092'
)
class Display(BaseHTTPRequestHandler):
def do_GET(self):
for msg in consumer:
frame = msg_process(msg)
self.send_response(200)
self.send_header("Content-type", "image/jpeg")
self.end_headers()
self.wfile.write(frame)
HOST = extract_IP()
PORT = 9999
server = HTTPServer((HOST, PORT), Display)
print("server is now running in ", HOST," and Port ", PORT)
server.serve_forever()
server.server_close()
Did I miss something or is there any workaround for my problem?
I am wondering how i can send an image through a python socket.
I have already tried it with pickle and sending the byte string piece by piece. However that takes forever.
Here is what I have tried.
my server code:
import socket
from PIL import Image
import pickle
host=""
port=80
server=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(("", port))
server.listen()
path=("path")
image=Image.open(path)
def acc():
while True:
conn, addr=server.accept()
print("connected to %s" %(conn))
conn.send(pickle.dumps(image))
acc()
my client code:
import socket
import pickle
host="192.168.1.11"
port=80
c=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
c.connect((host, port))
while True:
while True:
data=[]
packet=c.recv(100000)
if not packet: break
data.append(packet)
data_arr=b"".join(data)
print(pickle.loads(data_arr))
If the answer is not with pickle or PIL it is fine. I just need a way how this works. I'm looking forward for answers!
import socket, time
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(("10.0.0.9", 2000))
server.listen()
def acc(image_Path):
with open(image_Path, "rb") as image:
data = image.read() # Read the bytes from the path
image.close()
while True:
conn, addr = server.accept()
print("connected to %s" %(conn))
conn.sendall(data) # Send the bytes
acc("The path of the image to transform")
import socket
import pickle
host = "10.0.0.9"
port = 2000
c = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
c.connect((host, port))
while True:
data = c.recv(100000000) # We don't know the size of the image, so 100000000 just in case
with open("The path of the location of the image that recived", "wb") as newImage:
newImage.write(data) # Write the bytes in new path to create the image
newImage.close()
print("Got the image")
The answer is neither PIL nor Pickle!
If you have a 31kB JPEG of a 1920x1080 image and you open it with PIL, it will get JPEG-decompressed and expanded out into a 1920x1080x3 RGB image in memory - which will require a minimum of 6,220,800 bytes, i.e. you have increased its size by 200 times. Moral of the story is to send the (compressed) JPEG itself.
If you then pickle that 6MB monster, it will a) take longer and b) get bigger and it is entirely unnecessary because you can send binary data down a socket anyway.
The easiest way is to:
simply read() the JPEG/PNG file using Python read() rather than PIL Image.read()
do not pickle it
send a 4-byte header in network order (see htonl()) with the image size before your image and, on the receiving end, read 4 bytes and unpack them, then read the correct number of bytes in the image
put the received bytes into a BytesIO structure and then use PIL Image.open(BYTESIO_THING)
I'm trying to get frames from client, send it to server and from there write it into a video. But I keep failing in sending part, getting TypeError: Expected Ptr<cv::UMat> for argument '%s' error in out.write(frame).
I've also tried using pickle.dumps(frame) and then loading it in server side but it keeps getting truncated.
Server:
import numpy as np
import cv2, socket
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter("output.avi", fourcc, 19.0, (1366, 768))
s = socket.socket()
host = socket.gethostname()
port = 8080
s.bind((host,port))
s.listen(1)
print(host)
print("Waiting for any incoming connections ... ")
conn, addr = s.accept()
print(addr, "Has connected to the server")
while True:
frame = conn.recv(1024)
# write frame to video writer
out.write(frame)
if cv2.waitKey(1) == 27:
break
out.release()
cv2.destroyAllWindows()
Client:
import numpy as np
import cv2, socket
from PIL import ImageGrab
s = socket.socket()
host = input(str("Please enter the host address of the sender : "))
port = 8080
s.connect((host,port))
print("Connected ... ")
while True:
img = ImageGrab.grab()
img_np = np.array(img)
frame = img_np
s.send(frame)
Apparently in server, frame becomes <class 'bytes'>. So, I'm trying to find any way to fix this, including somehow converting bytes back into ndarray, or finding any other workaround.
Thanks.
Lets separate your question into two parts:
How to send data over a socket?
You are using a socket with 1024 bytes buffer which means that in every iteration you get 1024 bytes data at maximum.
What you should do when working in low level networking, is to put a unique end identifier token in the end of the frame and iterate in the server side with .recv() until you reached it. Another option is to send the length of your message and count the received bytes. This way, you know when you have a complete frame, then you can break the while loop, convert it to numpy array and .write() it.
How to pass numpy array over network?
You can pickle it and transfer the bytes into a io.BytesIO stream. Then load the stream with np.load() function.
You can also serialize the frame pixels as array of your pixel type, read them from the socket into io.BytesIO, then read them into numpy with np.fromfile(..., dtype= ...)
I'm trying to build a desktop streaming app. It consists of a server and a client for now. I learned that I should use the library pickle in order to serialize/deserialize the data. However, when I run both the scripts, I get the error "Pickle data was truncated" from the client side. Could you help me to solve this? I tried the solution the following link, whose OP apparently was trying to do the similar think but it didn't work.
python 3.6 socket pickle data was truncated
Server
import numpy as np
import cv2
from PIL import ImageGrab
import socket
import pickle
HOST = "0.0.0.0"
SOCKET = 5000
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((HOST,SOCKET))
while True:
s.listen(5)
client, addres = s.accept()
print(addres, " has connected")
img = ImageGrab.grab()
img_np = np.array(img)
img_np_serial = pickle.dumps(img_np)
client.send(img_np_serial)
if cv2.waitKey(1) == 27:
break
cv2.destroyAllWindows()
Client
import socket
import pickle
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((socket.gethostbyname(socket.gethostname()),5000))
data = b""
while True:
packet = s.recv(4096)
if not packet: break
data += packet
data_deserial = pickle.loads(data)
print((data_deserial))
I'm having a idea of using TCP socket to send array object captured by webcam to a client and reconstruct the image in another program.
Server Side:
import socket
import numpy as np
import cv2
UDP_IP = '192.168.1.3'
UDP_PORT = 8081
cap = cv2.VideoCapture(0)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind((UDP_IP,UDP_PORT))
sock.listen(1)
conn,addr=sock.accept()
print(addr)
while(True):
ret, frame = cap.read()
cv2.imshow('streamer',frame)
conn.send(frame.toBytes)
print(frame)
Client side:
import socket
import numpy
import time
import cv2
UDP_IP="192.168.1.3"
UDP_PORT = 8081
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((UDP_IP,UDP_PORT))
while True:
data = sock.recv(480*640*3)
print(data)
my issue is in server side the array is showing up correctly in the console. But in the client side the console is filled with Junk characters.
Why is that.? How should i reconstruct the array same as the server side?