Django asynchronous views can respond immediately, while tasks run asynchronously,but in fact the task cannot continue.
async def task_async():
print('task begin')
await asyncio.sleep(2)
print('task run success')
async def view_async(request):
print('async begin')
loop = asyncio.get_event_loop()
loop.create_task(task_async())
print('return')
return HttpResponse("Non-blocking HTTP request")
I expect the task to continue running after the http response returns, but the result is:
async begin
return
task begin
Using uvicron is ok, but manage.py is not。
Related
I have a question.
I wrote a simple code that mimics http request:
from asyncio import sleep, run
async def get():
print("Started get()")
await sleep(3)
print("Finished get()")
async def async_main():
await get()
await get()
await get()
if __name__ == "__main__":
run(async_main())
I expected that the result should be like:
Started get()
Started get()
Started get()
(No output, just wait 3 seconds)
Finished get()
Finished get()
Finished get()
But the result was:
Started get()
(No output, just wait 3 seconds)
Finished get()
Started get()
(No output, just wait 3 seconds)
Finished get()
Started get()
(No output, just wait 3 seconds)
Finished get()
Why this is happening?
just run the task asynchronously like below
async def async_main():
await asyncio.gather(get(), get(), get())
if __name__ == "__main__":
# run(async_main()) or
asyncio.run(async_main())
You need to schedule the coroutines, either explicitly using asyncio.create_task() or implicitly using asyncio.gather():
from asyncio import sleep, run
async def get():
print("Started get()")
await sleep(3)
print("Finished get()")
async def async_main():
tasks = [asyncio.create_task(get()),
asyncio.create_task(get()),
asyncio.create_task(get())] # Explicit
await asyncio.gather(*tasks)
async def async_main(): # Option 2
await asyncio.gather(get(), get(), get()) # Implicit
if __name__ == "__main__":
run(async_main())
I need to be able to keep adding coroutines to the asyncio loop at runtime. I tried using create_task() thinking that this would do what I want, but it still needs to be awaited.
This is the code I had, not sure if there is a simple edit to make it work?
async def get_value_from_api():
global ASYNC_CLIENT
return ASYNC_CLIENT.get(api_address)
async def print_subs():
count = await get_value_from_api()
print(count)
async def save_subs_loop():
while True:
asyncio.create_task(print_subs())
time.sleep(0.1)
async def start():
global ASYNC_CLIENT
async with httpx.AsyncClient() as ASYNC_CLIENT:
await save_subs_loop()
asyncio.run(start())
I once created similar pattern when I was mixing trio and kivy, which was demonstration of running multiple coroutines asynchronously.
It use a trio.MemoryChannel which is roughly equivalent to asyncio.Queue, I'll just refer it as queue here.
Main idea is:
Wrap each task with class, which has run function.
Make class object's own async method to put object itself into queue when execution is done.
Create a global task-spawning loop to wait for the object in queue and schedule execution/create task for the object.
import asyncio
import traceback
import httpx
async def task_1(client: httpx.AsyncClient):
resp = await client.get("http://127.0.0.1:5000/")
print(resp.read())
await asyncio.sleep(0.1) # without this would be IP ban
async def task_2(client: httpx.AsyncClient):
resp = await client.get("http://127.0.0.1:5000/meow/")
print(resp.read())
await asyncio.sleep(0.5)
class CoroutineWrapper:
def __init__(self, queue: asyncio.Queue, coro_func, *param):
self.func = coro_func
self.param = param
self.queue = queue
async def run(self):
try:
await self.func(*self.param)
except Exception:
traceback.print_exc()
return
# put itself back into queue
await self.queue.put(self)
class KeepRunning:
def __init__(self):
# queue for gathering CoroutineWrapper
self.queue = asyncio.Queue()
def add_task(self, coro, *param):
wrapped = CoroutineWrapper(self.queue, coro, *param)
# add tasks to be executed in queue
self.queue.put_nowait(wrapped)
async def task_processor(self):
task: CoroutineWrapper
while task := await self.queue.get():
# wait for new CoroutineWrapper Object then schedule it's async method execution
asyncio.create_task(task.run())
async def main():
keep_running = KeepRunning()
async with httpx.AsyncClient() as client:
keep_running.add_task(task_1, client)
keep_running.add_task(task_2, client)
await keep_running.task_processor()
asyncio.run(main())
Server
import time
from flask import Flask
app = Flask(__name__)
#app.route("/")
def hello():
return str(time.time())
#app.route("/meow/")
def meow():
return "meow"
app.run()
Output:
b'meow'
b'1639920445.965701'
b'1639920446.0767004'
b'1639920446.1887035'
b'1639920446.2986999'
b'1639920446.4067013'
b'meow'
b'1639920446.516704'
b'1639920446.6267014'
...
You can see tasks running repeatedly on their own pace.
Old answer
Seems like you only want to cycle fixed amount of tasks.
In that case just iterate list of coroutine with itertools.cycle
But this is no different with synchronous, so lemme know if you need is asynchronous.
import asyncio
import itertools
import httpx
async def main_task(client: httpx.AsyncClient):
resp = await client.get("http://127.0.0.1:5000/")
print(resp.read())
await asyncio.sleep(0.1) # without this would be IP ban
async def main():
async with httpx.AsyncClient() as client:
for coroutine in itertools.cycle([main_task]):
await coroutine(client)
asyncio.run(main())
Server:
import time
from flask import Flask
app = Flask(__name__)
#app.route("/")
def hello():
return str(time.time())
app.run()
Output:
b'1639918937.7694323'
b'1639918937.8804302'
b'1639918937.9914327'
b'1639918938.1014295'
b'1639918938.2124324'
b'1639918938.3204308'
...
asyncio.create_task() works as you describe it. The problem you are having here is that you create an infinite loop here:
async def save_subs_loop():
while True:
asyncio.create_task(print_subs())
time.sleep(0.1) # do not use time.sleep() in async code EVER
save_subs_loop() keeps creating tasks but control is never yielded back to the event loop, because there is no await in there. Try
async def save_subs_loop():
while True:
asyncio.create_task(print_subs())
await asyncio.sleep(0.1) # yield control back to loop to give tasks a chance to actually run
This problem is so common I'm thinking python should raise a RuntimeError if it detects time.sleep() within a coroutine :-)
You might want to try the TaskThread framework
It allows you to add tasks in runtime
Tasks are re-scheduled periodically (like in your while loop up there)
There is a consumer / producer framework built in (parent/child relationships) which you seem to need
disclaimer: I wrote TaskThread out of necessity & it's been a life saver.
I would like to have a similar code to the following:
async def func(user_response):
#does_something
while condition:
#waits for a response from the user
#await func(response from user)
However, I want it to wait for another response from the user while the function is executing.
I have tried:
async def func(user_response):
#does_something
while condition:
#waits for a response from the user
#asyncio.create_task(response from user)
However, the problem i have found is that if the user responds twice, both functions will be carried out at the same time (when i was them to essentially be queued).
Can async tasks be queued
Yes, and you can use asyncio.Queue for that purpose:
async def func(user_response):
#does_something
async def worker(queue):
while True:
user_response = await queue.get()
await func(user_response)
async def main():
queue = asyncio.Queue()
# spawn the worker in the "background"
asyncio.create_task(worker(queue))
while condition:
#waits for a response from the user
# enqueue the response to be handled by the worker
await queue.put(user_response)
I am working on a project that uses the ccxt async library which requires all resources used by a certain class to be released with an explicit call to the class's .close() coroutine. I want to exit the program with ctrl+c and await the close coroutine in the exception. However, it is never awaited.
The application consists of the modules harvesters, strategies, traders, broker, and main (plus config and such). The broker initiates the strategies specified for an exchange and executes them. The strategy initiates the associated harvester which collects the necessary data. It also analyses the data and spawns a trader when there is a profitable opportunity. The main module creates a broker for each exchange and runs it. I have tried to catch the exception at each of these levels, but the close routine is never awaited. I'd prefer to catch it in the main module in order to close all exchange instances.
Harvester
async def harvest(self):
if not self.routes:
self.routes = await self.get_routes()
for route in self.routes:
self.logger.info("Harvesting route {}".format(route))
await asyncio.sleep(self.exchange.rateLimit / 1000)
yield await self.harvest_route(route)
Strategy
async def execute(self):
async for route_dct in self.harvester.harvest():
self.logger.debug("Route dictionary: {}".format(route_dct))
await self.try_route(route_dct)
Broker
async def run(self):
for strategy in self.strategies:
self.strategies[strategy] = getattr(
strategies, strategy)(self.share, self.exchange, self.currency)
while True:
try:
await self.execute_strategies()
except KeyboardInterrupt:
await safe_exit(self.exchange)
Main
async def main():
await load_exchanges()
await load_markets()
brokers = [Broker(
share,
exchanges[id]["api"],
currency,
exchanges[id]["strategies"]
) for id in exchanges]
futures = [broker.run() for broker in brokers]
for future in asyncio.as_completed(futures):
executed = await future
return executed
if __name__ == "__main__":
status = asyncio.run(main())
sys.exit(status)
I had expected the close() coroutine to be awaited, but I still get an error from the library that I must explicitly call it. Where do I catch the exception so that all exchange instances are closed properly?
Somewhere in your code should be entry point, where event loop is started.
Usually it is one of functions below:
loop.run_until_complete(main())
loop.run_forever()
asyncio.run(main())
When ctrl+C happens KeyboardInterrupt can be catched at this line. When it happened to execute some finalizing coroutine you can run event loop again.
This little example shows idea:
import asyncio
async def main():
print('Started, press ctrl+C')
await asyncio.sleep(10)
async def close():
print('Finalazing...')
await asyncio.sleep(1)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(main())
except KeyboardInterrupt:
loop.run_until_complete(close())
finally:
print('Program finished')
Testing out Sanic, currently have routes that when hit triggers a write to SQS. Trying to make that write asynchronous by adding the info to a queue that is then consumed 'independently' (in a non-blocking manner) of the response returned by the Sanic Server.
Below is the code I've got so far. It makes a call to SQS however it seems I have an error with using the wrong loop / creating multiple loops -> I get an error stating "loop argument must agree with future" and the server just hangs, not returning a response at all.
Also, Sanic uses uvloop, and I'm not sure how / if I should be integrating the queue into a uvloop rather than a separate asyncio loop. The Sanic server is instantiated by passing it a uvloop (uvloop.new_event_loop()).
import asyncio
asyncio_loop = asyncio.get_event_loop
async_queue = asyncio.Queue()
async def consumer_async_queue(q):
while True:
item = await q.get()
if item is None:
q.task_done()
break
else:
# function call to SQS
await record_to_sqs(item['json'], item['log_to_meta'])
q.task_done()
async def producer_async_queue(q, item):
await q.put(item)
await q.put(None)
await q.join()
async def main(q, item):
producers = asyncio_loop.create_task(producer_async_queue(q, item))
consumers = asyncio_loop.create_task(consumer_async_queue(q))
await asyncio.wait([producers] + [consumers])
async def service():
* Other Stuff *
try:
print(dir(asyncio_loop))
asyncio_loop.create_task(main(async_queue, item))
asyncio_loop.run_forever()
except Exception as e:
print(e)
print("ERRORING")
finally:
pass
# asyncio_loop.close()
#app.route('/api/example', methods=['GET'])
async def route(request):
return await service(request)