Execute async request every N seconds while websocket keeps streaming python - python

Using Async with websocket and web requests, how does one continuously watch data from websocket while also, with N seconds interval, updating data from web request.
I've tried different versions of
async def websocket_stream():
while True:
get_data_from_stream()
do_stuff_with_data()
async def web_requests():
await asyncio.sleep(30)
async with httpx.AsyncClient() as client:
for n in list_of_things:
html= client.get('www.webpage.com/stuff')
do_other_stuff_with_data(html)
async def do_sometimes(timeout, stuff):
while True:
await asyncio.sleep(timeout)
await stuff()
async def do_nonstop(stuff):
while True:
await stuff()
taskone=asyncio.create_task(do_nonstop(web_stream))
tasktwo=asyncio.create_task(do_sometimes(10, web_request()))
Which obviously doesnt work and just streams web_streams.
How do I define so that web_request() only does one loop thru list_of_things and then sleeps for N seconds while web_streams() websocket keeps streaming?

Related

Use the same websocket connection in multiple asynchronous loops (Python)

I am running two loops asynchronously, and want both to have access to the same websocket connection. One function periodic_fetch() fetches some data periodically (every 60 seconds) and sends a message to the websocket if a condition is met. The other retrieve_websocket() receives messages from the websocket and perform some action if a condition is met. As of now, I connect to the websocket in both functions, but that means retrieve_websocket() will not receive the response to the websocket message sent by periodic_fetch(). How do I create one websocket connection and use the same one in both loops as they run asynchronously? My code:
# Imports
import asyncio
import websockets
from datetime import datetime
websocket_url = "wss://localhost:5000/"
# Simulate fetching some data
async def fetch_data():
print("Fetching started")
await asyncio.sleep(2)
return {"data": 2}
# Receive and analyze websocket data
async def retrieve_websocket():
async with websockets.connect(websocket_url) as ws:
while True:
msg = await ws.recv()
print(msg)
# Perform some task if condition is met
# Periodically fetch data and send messages to websocket
async def periodic_fetch():
async with websockets.connect(websocket_url) as ws:
while True:
print(datetime.now())
fetch_task = asyncio.create_task(fetch_data())
wait_task = asyncio.create_task(asyncio.sleep(60))
res = await fetch_task
# Send message to websocket
await ws.send("Websocket message")
# Wait the remaining wait duration
await wait_task
loop = asyncio.get_event_loop()
cors = asyncio.wait([periodic_fetch(), retrieve_websocket()])
loop.run_until_complete(cors)
The solution was to open the connection in a separate function and use asyncio.gather() passing in the two functions with the websocket as parameter.
async def run_script():
async with websockets.connect(websocket_url) as ws:
await asyncio.gather(periodic_fetch(ws), retrieve_websocket(ws))
asyncio.run(run_script())

Python websockets fast one way but 10x slower with response

I have a sanic webserver running websockets, behind the scenes its using the "websockets" library.
Server
from asyncio import sleep
from sanic import Sanic
app = Sanic("websocket test")
#app.websocket("/")
async def test(_, ws):
while True:
data = await ws.recv()
data = await ws.send('Hi')
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8000)
Client
import asyncio
import websockets
async def hello():
uri = "ws://localhost:8000"
async with websockets.connect(uri) as websocket:
while iteration:
await websocket.send("Hi")
await websocket.recv()
asyncio.get_event_loop().run_until_complete(hello())
When I remove ws.send('Hi') from the server and await websocket.recv() from the client i can get 58000 messages a second, once I start listening for a response it goes all the way down to 6000 messages a second, I am just curious what is making this run 10x slower when the server responds.
I think the solution here would be to seperate your send and recv into seperate tasks so they can yield concurrently.
async def producer(send):
while True:
await send("...")
await asyncio.sleep(1)
async def consumer(recv):
while True:
message = await recv
print(message)
async def test(request, ws):
request.app.add_task(producer(ws.send)))
request.app.add_task(consumer(ws.recv))
Obviously, this is a very simple example, and at the very least you should use some sort of a Queue for your producer.
But, when you break them into seperate tasks, then your recv is not blocked by send.

Python Asyncio schedule tasks weekly

I am writing a discord bot in Python, which already has the two async overwritten methods on_ready and on_message to react to messages in a certain way.
Now I wanted to add a function, which should be called once in a week. I tried something with asyncio.sleep() but I don't want to start the bot at that specific time and then sleep 604.800 sec (1 week in seconds) to repeat that function every week.
Here's what I got so far:
class MyClient(discord.Client):
async def on_ready(self):
#some initial stuff
self.loop.create_task(self.routine())
async def on_message(self, message):
#reply to chat messages
await message.channel.send("Reply")
async def routine(self):
while True:
print("I'm on that routine boi")
await asyncio.sleep(3)
It works so far, that I can use the bot in discord and get the outputs every 3 seconds.
But I'd prefer something like from the schedule module to use something like
scheduler.every().sunday.at("8:55").do(self.routine())
Any chance to do something similar in combination with asyncio?
The Event loop provides mechanisms to schedule callback functions to be called at some point in the future, as shown here in Python docs
class MyClient(discord.Client):
async def on_ready(self):
#some initial stuff
self.loop.create_task(self.routine())
async def on_message(self, message):
#reply to chat messages
await message.channel.send("Reply")
async def execute():
res = await loop.call_later(delay, callback, *args)
delay += delay + 604800
return res
async def routine(self):
while True:
print("I'm on that routine boi")
res = await loop.run_until_complete(execute)

asyncio + aiohttp: overlapping IO with sleeping

When all coroutines are waiting, asyncio listens for events to wake them up again. A common example would be asyncio.sleep(), which registers a timed event. In practice an event is usually an IO socket ready for receiving or sending new data.
To get a better understanding of this behaviour, I set up a simple test: It sends an http request to localhost and waits for the response. On localhost, I've set up a flask server which waits for 1 second before responding. After sending the request, the client sleeps for 1 second, then it awaits the response. I would expect this to return in rougly a second, since both my program and the server should sleep in parallel. But it takes 2 seconds:
import aiohttp
import asyncio
from time import perf_counter
async def main():
async with aiohttp.ClientSession() as session:
# this http request will take 1 second to respond
async with session.get("http://127.0.0.1:5000/") as response:
# yield control for 1 second
await asyncio.sleep(1)
# wait for the http request to return
text = await response.text()
return text
loop = asyncio.get_event_loop()
start = perf_counter()
results = loop.run_until_complete(main())
stop = perf_counter()
print(f"took {stop-start} seconds") # 2.01909
What is asyncio doing here, why can't I overlap waiting times ?
I'm not interested in the specific scenario of HTTP requests, aiohttp is only used to construct an example. Which is probably a bit dangerous: This could be related to aiohttp and not asyncio at all.
Actually, I expect this to be the case (hence the question title about both asyncio and aiohttp).
My first intuition was that the request is maybe not sent before calling asyncio.sleep(). So I reordered things a bit:
# start coroutine
text = response.text()
# yield control for 1 second
await asyncio.sleep(1)
# wait for the http request to return
text = await text
But this still takes two seconds.
Ok, now to be really sure that the request was sent off before going to sleep, I added print("incoming") to the route on the server, before it goes to sleep. I also changed the length of sleeping time to 10 seconds on the client. The server prints incoming immediately after the client is run. The client takes 11 seconds in total.
#app.route('/')
def index():
print("incoming")
time.sleep(1)
return 'done'
Since the HTTP request is made immediately, the server has definitely sent off an answer before the client wakes up from asyncio.sleep(). It seems to me that the socket providing the HTTP request should be ready as soon as the client wakes up. But still, the total runtime is always an addition of client and server waiting times.
Am I misusing asyncio somehow, or is this related to aiohttp after all ?
The problem is that one second happens in server is performed in async with session.get("http://127.0.0.1:5000/") as response:.
The http request finishes before you get this response object.
You can test it by:
...
async def main():
async with aiohttp.ClientSession() as session:
start = perf_counter()
# this http request will take 1 second to respond
async with session.get("http://127.0.0.1:5000/") as response:
end = perf_counter()
print(f"took {end-start} seconds to get response")
# yield control for 1 second
await asyncio.sleep(1)
# wait for the http request to return
text = await response.text()
return text
...
And btw you can surely overlap this waiting time, as long as you have another running coroutine.
Your testing code has three awaits (two explicit and one hidden in async with) in series, so you don't get any parallel waiting. The code that tests the scenario you describe is something along the lines of:
async def download():
async with aiohttp.ClientSession() as session:
async with session.get("http://127.0.0.1:5000/") as response:
text = await response.text()
return text
async def main():
loop = asyncio.get_event_loop()
# have download start "in the background"
dltask = loop.create_task(download())
# now sleep
await asyncio.sleep(1)
# and now await the end of the download
text = await dltask
Running this coroutine should take the expected time.

listen to multiple socket with websockets and asyncio

I am trying to create a script in python that listens to multiple sockets using websockets and asyncio, the problem is that no matter what I do it only listen to the first socket I call.
I think its the infinite loop, what are my option to solve this? using threads for each sockets?
async def start_socket(self, event):
payload = json.dumps(event)
loop = asyncio.get_event_loop()
self.tasks.append(loop.create_task(
self.subscribe(event)))
# this should not block the rest of the code
await asyncio.gather(*tasks)
def test(self):
# I want to be able to add corotines at a different time
self.start_socket(event1)
# some code
self.start_socket(event2)
this is what I did eventually, that way its not blocking the main thread and all subscriptions are working in parallel.
def subscribe(self, payload):
ws = websocket.WebSocket(sslopt={"cert_reqs": ssl.CERT_NONE})
ws.connect(url)
ws.send(payload)
while True:
result = ws.recv()
print("Received '%s'" % result)
def start_thread(self, loop):
asyncio.set_event_loop(loop)
loop.run_forever()
def start_socket(self, **kwargs):
worker_loop = asyncio.new_event_loop()
worker = Thread(target=self.start_thread, args=(worker_loop,))
worker.start()
worker_loop.call_soon_threadsafe(self.subscribe, payload)
def listen(self):
self.start_socket(payload1)
# code
self.start_socket(payload2)
# code
self.start_socket(payload3)
Your code appears incomplete, but what you've shown has two issues. One is that run_until_complete accepts a coroutine object (or other kind of future), not a coroutine function. So it should be:
# note parentheses after your_async_function()
asyncio.get_event_loop().run_until_complete(your_async_function())
the problem is that no matter what I do it only listen to the first socket I call. I think its the infinite loop, what are my option to solve this? using threads for each sockets?
The infinite loop is not the problem, asyncio is designed to support such "infinite loops". The problem is that you are trying to do everything in one coroutine, whereas you should be creating one coroutine per websocket. This is not a problem, as coroutines are very lightweight.
For example (untested):
async def subscribe_all(self, payload):
loop = asyncio.get_event_loop()
# create a task for each URL
for url in url_list:
tasks.append(loop.create_task(self.subscribe_one(url, payload)))
# run all tasks in parallel
await asyncio.gather(*tasks)
async def subsribe_one(self, url, payload):
async with websockets.connect(url) as websocket:
await websocket.send(payload)
while True:
msg = await websocket.recv()
print(msg)
One way to efficiently listen to multiple websocket connections from a websocket server is to keep a list of connected clients and essentially juggle multiple conversations in parallel.
E.g. A simple server that sends random # to each connected client every few secs:
import os
import asyncio
import websockets
import random
websocket_clients = set()
async def handle_socket_connection(websocket, path):
"""Handles the whole lifecycle of each client's websocket connection."""
websocket_clients.add(websocket)
print(f'New connection from: {websocket.remote_address} ({len(websocket_clients)} total)')
try:
# This loop will keep listening on the socket until its closed.
async for raw_message in websocket:
print(f'Got: [{raw_message}] from socket [{id(websocket)}]')
except websockets.exceptions.ConnectionClosedError as cce:
pass
finally:
print(f'Disconnected from socket [{id(websocket)}]...')
websocket_clients.remove(websocket)
async def broadcast_random_number(loop):
"""Keeps sending a random # to each connected websocket client"""
while True:
for c in websocket_clients:
num = str(random.randint(10, 99))
print(f'Sending [{num}] to socket [{id(c)}]')
await c.send(num)
await asyncio.sleep(2)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
try:
socket_server = websockets.serve(handle_socket_connection, 'localhost', 6789)
print(f'Started socket server: {socket_server} ...')
loop.run_until_complete(socket_server)
loop.run_until_complete(broadcast_random_number(loop))
loop.run_forever()
finally:
loop.close()
print(f"Successfully shutdown [{loop}].")
A simple client that connects to the server and listens for the numbers:
import asyncio
import random
import websockets
async def handle_message():
uri = "ws://localhost:6789"
async with websockets.connect(uri) as websocket:
msg = 'Please send me a number...'
print(f'Sending [{msg}] to [{websocket}]')
await websocket.send(msg)
while True:
got_back = await websocket.recv()
print(f"Got: {got_back}")
asyncio.get_event_loop().run_until_complete(handle_message())
Mixing up threads and asyncio is more trouble than its worth and you still have code that will block on the most wasteful steps like network IO (which is the essential benefit of using asyncio).
You need to run each coroutine asynchronously in an event loop, call any blocking calls with await and define each method that interacts with any awaitable interactions with an async
See a working e.g.: https://github.com/adnantium/websocket_client_server

Categories