Python asyncio - heartbeat() method not writing to stream - python

I trying to create a proof of concept with Python 3 asyncio, implementing a client that sends heartbeats periodically to a server in order to keep the connection alive.
Note that the server is simply an echo server and doesn't close the connection. But it is important that the client is able to send a heartbeat periodically.
Here is the current implementation:
stream_client.py
import asyncio
class StreamClient:
def __init__(self, heartbeat_int, loop=None):
self.heartbeat_int = heartbeat_int
if loop is not None:
self.loop = loop
else:
self.loop = asyncio.get_event_loop()
def start(self):
""" start manages the event loop, but it is not a coroutine """
self.loop.run_until_complete(self.get_client())
self.loop.create_task(self.start_timed_session())
msg = self.loop.run_until_complete(self.logon('hello'))
if msg == 'hello':
try:
self.loop.run_forever()
except KeyboardInterrupt:
print('Closing connection with server...')
self.writer.close()
self.loop.close()
else:
print('Logon unsuccessful, closing connection with server...')
self.writer.close()
self.loop.close()
#asyncio.coroutine
def get_client(self):
self.reader, self.writer = yield from asyncio.open_connection(
'127.0.0.1',
9871,
loop=self.loop
)
print('Connection established at "localhost:9871"')
#asyncio.coroutine
def timed_session(self):
yield from asyncio.sleep(self.heartbeat_int)
self.loop.create_task(self.start_timed_session())
#asyncio.coroutine
def start_timed_session(self):
heartbeat_task = self.loop.create_task(self.timed_session())
heartbeat_task.add_done_callback(self.heartbeat)
#asyncio.coroutine
def logon(self, msg):
print('Sending message:', msg)
self.writer.write(msg.encode())
data = yield from self.reader.read(15)
resp = data.decode()
print('Data received:', resp)
return resp
def heartbeat(self, fut):
"""
This is future's callback:
1) Can't be a coroutine
2) Takes a future as an argument
"""
print('> Sending heartbeat...')
self.writer.write('heartbeat'.encode())
# Start the client
client = StreamClient(5)
client.start()
stream_server.py
import asyncio
class StreamServer:
def __init__(self, loop=None):
if loop is not None:
self.loop = loop
else:
self.loop = asyncio.get_event_loop()
#asyncio.coroutine
def server_handler(self, reader, writer):
data = yield from reader.read(15)
msg = data.decode()
if data is not 'bye':
print('Received data: "{}" from {}'.format(msg, writer.get_extra_info('peername')))
print('Echoing the message...')
writer.write(data)
yield from writer.drain()
else:
print('Received data: "{}" from {}'.format(
data,
writer.get_extra_info('peername')
)
)
print('Closing the connection...')
writer.close()
loop = asyncio.get_event_loop()
stream_server = StreamServer(loop)
coro_server = asyncio.start_server(
stream_server.server_handler,
'127.0.0.1',
9871,
loop=stream_server.loop
)
server = loop.run_until_complete(coro_server)
print('Listening on:', server.sockets[0].getsockname())
try:
loop.run_forever()
except KeyboardInterrupt:
pass
print('Closing server')
server.close()
loop.run_until_complete(server.wait_closed())
loop.close()
Question:
On the heartbeat() method the line self.writer.write('heartbeat'.encode()) seems to never be executed.
How can I get it to work?

The code is being executed on the client side, you just don't have any code on the server-side to receive the message. server_handler is only called once per connection, not once per message. So, if you want it to be able to receive an infinite number of heartbeats from a given client, you need to set up a loop inside of server_handler to receive them:
#asyncio.coroutine
def server_handler(self, reader, writer):
while True: # Keep receiving data from client.
data = yield from reader.read(15)
msg = data.decode()
if data is not 'bye':
print('Received data: "{}" from {}'.format(msg, writer.get_extra_info('peername')))
print('Echoing the message...')
writer.write(data)
yield from writer.drain()
else:
print('Received data: "{}" from {}'.format(
data,
writer.get_extra_info('peername')
)
)
print('Closing the connection...')
writer.close()

Related

Best way to handle 2 websocket connections in the same time

i am handling data from 2 websocket servers and i would like to know whats the fastest way to handle both connections in the same time given that the 1st connection would send data every 0.1-10ms.
what i am doing so far is:
import json
import websockets
async def run():
async with websockets.connect("ws://localhost:8546/") as ws1:
async with websockets.connect(uri="wss://api.blxrbdn.com/ws", extra_headers = {"Authorization": "apikey") as ws2:
sub1 = await ws1.send("subscription 1")
sub2 = await ws2.send("subscription 2")
while True:
try:
msg1 = await ws1.recv()
msg1 = json.loads(msg1)
msg2 = await ws2.recv()
msg2 = json.loads(msg2)
# process msg1 & msg2
except Exception as e:
print(e, flush=True)
asyncio.run(run())
As stated in the comments, try to handle each connection in its own coroutine. Here is small example:
import asyncio
import websockets
async def worker(ws, msg, t):
while True:
sub = await ws.send(msg)
print("Received from the server:", await ws.recv())
await asyncio.sleep(t)
async def run():
url1 = "ws://localhost:8765/"
url2 = "ws://something_different:8765/"
async with websockets.connect(url1) as ws1, websockets.connect(url2) as ws2:
await asyncio.gather(worker(ws1, "sub1", 1), worker(ws2, "sub2", 2))
asyncio.run(run())

How can i make my TCP server respond to more than one request per connection?

So i have a code for my tcp server. It gets a string, echoes it and closes the connection. What i want to do is that server will get more than one request and respond to more than one client and closes connection only if there is a mistake or client disconnected. How can i do this?
import threading
import socket
def run_server(port):
serv_sock = create_serv_sock(port)
cid = 0
while True:
client_sock = accept_client_conn(serv_sock, cid)
t = threading.Thread(target=serve_client,
args=(client_sock, cid))
t.start()
cid += 1
def serve_client(client_sock, cid):
request = read_request(client_sock)
if request is None:
print(f'Client #{cid} unexpectedly disconnected')
else:
response = handle_request(request)
write_response(client_sock, response, cid)
def create_serv_sock(serv_port):
serv_sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM,proto=0)
serv_sock.bind(('localhost', serv_port))
serv_sock.listen()
return serv_sock
def accept_client_conn(serv_sock, cid):
client_sock, client_addr = serv_sock.accept()
print(f'Client #{cid} connected '
f'{client_addr[0]}:{client_addr[1]}')
return client_sock
def read_request(client_sock, delimiter=b'!'):
request = bytearray()
while True:
try:
chunk = client_sock.recv(4)
if not chunk:
return None
request += chunk
if delimiter in request:
return request
except ConnectionResetError:
return None
except:
raise
def handle_request(request):
return request[::-1]
def write_response(client_sock, response, cid):
client_sock.sendall(response)
client_sock.close()
print(f'Client #{cid} has been served')
if __name__ == '__main__':
run_server(port = 9090)
i tried to make 2 independent funtions with threading.Thread like this
def response(message,client_sock):
client_sock.sendall(f'you said: {message}')
def handle(client_sock):
while True:
try:
request = client_sock.recv(1024)
response(request,client_sock)
except:
client_sock.close()
print ("client has disconected")
break
def receive():
while True:
client_sock, client_addr = server_sock.accept()
print(f"Connected with {client_addr[0]}:{client_addr[1]}")
thread = threading.Thread(target = handle, args=(client_sock))
thread.start()
But I always get a lot of thread errors
1 - Replace this function, I removed its While loop.
def read_request(client_sock, delimiter=b'!'):
request = bytearray()
try:
chunk = client_sock.recv(4)
if not chunk:
return None
request += chunk
if delimiter in request:
return request
except ConnectionResetError:
return None
except:
raise
2 - Replace this function, I moved the while loop here, which will keep running and checking for new messages and processing them
def serve_client(client_sock, cid):
while True:
request = read_request(client_sock)
if request is None:
print(f'Client #{cid} unexpectedly disconnected')
else:
response = handle_request(request)
write_response(client_sock, response, cid)
3 - Replace this function, I removed the client_sock.close() as it would close the socket once you received one message, now the connection between client and server should stay on.
def write_response(client_sock, response, cid):
client_sock.sendall(response)
print(f'Client #{cid} has been served')

Processing queue with asyncio only fires after all items are placed in queue using asyncio in python

I am trying to create some code in python that will put data from a generator (currently a simple counting loop but will be a sensor data at some point) and place it in a queue. Once in a queue i want to pull data off it and send it over a TCP connection. This is a great time to use asyncio but I am doing something wrong.
Currently, the script will process all the numbers and does not return anything. Ideally I would want to make sure I have something in the queue so it never empties and send a set amount of data over say like 5 numbers everytime. How can I achieve this?
import asyncio
import random
class responder():
def __init__(self, parent=None):
super().__init__()
async def produce(self,queue, n):
for x in range(n):
# produce an item
print('producing {}/{}'.format(x, n))
# simulate i/o operation using sleep
await asyncio.sleep(random.random())
item = str(x)
# put the item in the queue
await queue.put(item)
async def consume(self,queue):
while True:
# wait for an item from the producer
item = await queue.get()
# process the item
print('consuming {}...'.format(item))
# simulate i/o operation using sleep
await asyncio.sleep(random.random())
# Notify the queue that the item has been processed
queue.task_done()
async def run(self,n):
queue = asyncio.Queue()
# schedule the consumer
self.consumer = asyncio.ensure_future(self.consume(queue))
# run the producer and wait for completion
await self.produce(queue, n)
# wait until the consumer has processed all items
await queue.join()
# the consumer is still awaiting for an item, cancel it
self.consumer.cancel()
async def handle_echo(self,reader, writer):
data = await reader.read(100)
message = data.decode()
addr = writer.get_extra_info('peername')
print("Received %r from %r" % (message, addr))
if (message == 'START_RUN'):
data = await self.run(10)
print("Send: %i" % data)
writer.write(data)
await writer.drain()
else:
print("Send: %r" % message)
writer.write(message)
await writer.drain()
print("Close the client socket")
writer.close()
def launch_server(self):
self.loop = asyncio.get_event_loop()
self.coro = asyncio.start_server(self.handle_echo, '127.0.0.1', 7780, loop=self.loop)
self.server = self.loop.run_until_complete(self.coro)
# Serve requests until Ctrl+C is pressed
print('Serving on {}'.format(self.server.sockets[0].getsockname()))
try:
self.loop.run_forever()
except KeyboardInterrupt:
pass
finally:
# Close the server
self.server.close()
self.loop.run_until_complete(self.server.wait_closed())
self.loop.close()
def main():
server = responder()
server.launch_server()
if __name__ == '__main__':
main()
The code generates the number stream but it runs through the entire list before moving on. Further I never get a value back.
My client code (which never gets anything back)
import asyncio
async def capture_stream(reader):
while not reader.at_eof:
data = await reader.read(100)
print( f'{who} received {len(data)} bytes' )
async def tcp_echo_client(message, loop):
reader, writer = await asyncio.open_connection('127.0.0.1',7780,loop=loop)
print('Send: %r' % message)
writer.write(message.encode())
if (message == "START_RUN"):
data = await reader.read(100)
print('Received: %r' % data.decode())
else:
collect_data = asyncio.create_task(capture_stream)
data = await collect_data
print('Close the socket')
writer.close()
message = 'START_RUN'
loop = asyncio.get_event_loop()
loop.run_until_complete(tcp_echo_client(message, loop))
loop.close()

Socket in a async loop seems to return nothing

I wanted to know asyncio in python better so decided to write a simple http proxy-server using only basic utilities such as sockets and asyncio.
Here is my code
import asyncio
import socket
BUFFER = 4096
class ProxyServer:
def __init__(self):
self.listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.listener.bind(("192.168.1.6", 80))
self.listener.listen()
async def invoke(self, loop):
try:
while 1:
conn, addr = self.listener.accept()
await self.process_new_conn(conn, loop)
except KeyboardInterrupt:
loop.stop()
print("Finishing...")
async def process_new_conn(self, conn, loop):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sender:
data = await loop.sock_recv(conn, BUFFER)
await loop.sock_connect(sender, (host, port))
sender.send(data)
response = await loop.sock_recv(sender, BUFFER)
conn.send(response)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
try:
ps = ProxyServer()
loop.run_until_complete(ps.invoke(loop))
except OSError:
print("80 port is being used.")
finally:
loop.close()
As far as I understand it, loop.run_until_complete adds my coro to loop's queue and executes it somewhen. When I call await self.process_new_conn(conn, loop) loop adds this coro to its queue. My question is how to put in the queue a loop? I mean it looks like the code in process_new_conn is executed but the other socket receives nothing, but when I try to type something like this loop.run_until_complete(self.process_new_conn(conn, loop)) it works(with runtime error but still). It looks like there should be another approach but I can't find it.
UPD: I hope, my tests were wrong. That's all. But here my changed code with a small explanation, in case somebody will need.
import asyncio
import socket
from select import select
BUFFER = 4096
class ProxyServer:
def __init__(self):
self.listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.listener.bind(("localhost", 80))
self.listener.listen()
self.loop = asyncio.get_event_loop()
def invoke(self):
try:
self.loop.run_until_complete(self.accept_connection())
except KeyboardInterrupt:
self.loop.stop()
self.loop.close()
print("Finishing...")
async def accept_connection(self):
while 1:
read, _, _ = select([self.listener], [], [], 0.1)
if read:
conn, addr = self.listener.accept()
await self.process_new_conn(conn)
async def process_new_conn(self, conn):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sender:
data = await self.loop.sock_recv(conn, BUFFER)
await self.loop.sock_connect(sender, (host, port))
sender.send(data)
response = await self.loop.sock_recv(sender, BUFFER)
await self.loop.sock_sendall(conn, response)
if __name__ == "__main__":
try:
ps = ProxyServer()
ps.invoke()
except OSError:
print("80 port is being used.")
As I understand it: using await we say to a loop that coro should be put in a queue and may be interrupted(this let a loop to switch current coro at the wordd await). This video helped me a lot https://www.youtube.com/watch?v=m28fiN9y_r8&t=679s

cannot receive datagram correctly in python3 asyncio

I write simple udp client program, but it cannot receive datagram correctly.
My code is below.
import asyncio
class EchoClientProtocol:
def __init__(self, message, loop):
self.message = message
self.loop = loop
self.transport = None
def connection_made(self, transport):
self.transport = transport
print('Send:', self.message)
self.transport.sendto(self.message.encode())
def datagram_received(self, data, addr):
print('Received:', data.decode())
async def sendChar(transport, msg):
print('send: ', msg)
transport.sendto(msg.encode())
if __name__ == '__main__':
loop = asyncio.get_event_loop()
message = 'Hello World!'
connect = loop.create_datagram_endpoint(
lambda: EchoClientProtocol(message, loop),
remote_addr=('127.0.0.1', 9999)
)
transport, protocol = loop.run_until_complete(connect)
while (True):
try:
ch = input()
except KeyboardInterrupt:
break
loop.run_until_complete(sendChar(transport, ch))
loop.run_forever()
transport.close()
loop.close()
And I write UDP echo server protocol program posted in asyncio document.
(https://docs.python.org/3/library/asyncio-protocol.html#udp-echo-server-protocol)
Running these programs, I think result is like this.
Send: Hello World!
Received: Hello World!
1
send: 1
Received: 1
2
send: 2
Received: 2
But result is this.
Send: Hello World!
1
send: 1
Received: Hello World!
2
send: 2
Received: 1
Why is result shifted?
I search about asyncio module, but I cannot solve this problem.
It is because while True loop does not give back control to ioloop until next send call - input is blocking. Simply add some async sleep, so ioloop could handle events to receive data
while (True):
try:
loop.run_until_complete(asyncio.sleep(0.001))
ch = input()
except KeyboardInterrupt:
break
loop.run_until_complete(sendChar(transport, ch))
And since input will block here as well, afaik it is the best to move it to separate thread:
import threading
import asyncio
class EchoClientProtocol:
def __init__(self, message, loop):
self.message = message
self.loop = loop
self.transport = None
def connection_made(self, transport):
self.transport = transport
print('Send:', self.message)
self.transport.sendto(self.message.encode())
def datagram_received(self, data, addr):
print('Received:', data.decode())
def error_received(self, data, addr=None):
raise data
async def sendChar(transport, msg):
print('send: ', msg)
transport.sendto(msg.encode())
user_input = [None]
# spawn a new thread to wait for input
def get_user_input(user_input_ref):
while True:
user_input_ref[0] = input()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
message = 'Hello World!'
connect = loop.create_datagram_endpoint(
lambda: EchoClientProtocol(message, loop),
remote_addr=('127.0.0.1', 9999)
)
transport, protocol = loop.run_until_complete(connect)
input_thread = threading.Thread(target=get_user_input, args=(user_input,))
input_thread.daemon = True
input_thread.start()
while (True):
if user_input[0] is not None:
loop.run_until_complete(sendChar(transport, user_input[0]))
user_input[0] = None
loop.run_until_complete(asyncio.sleep(1))
loop.run_forever()
transport.close()
loop.close()
I have mixed your code with waiting for user input in separate thread, of course to refactor :)

Categories