Call coroutine within Thread - python

Is it possible to make a thread run method async so it can execute coroutines inside it? I realise I am mixing paradigms - I am trying to integrate a 3rd party library that uses coroutines whereas my project uses threads. Before I consider updating my project to use coroutines instead I'd like to explore executing coroutines within my threads.
Below is my example usecase where I have a thread but I want to call a coroutine from within my thread. My problem is the function MyThread::run() doesn't appear to be executing (printing). Is what I am trying possible...and advisable?
from threading import Thread
class MyThread(Thread):
def __init__(self):
Thread.__init__(self)
self.start()
# def run(self):
# while True:
# print("MyThread::run() sync")
async def run(self):
while True:
# This line isn't executing/printing
print("MyThread::run() sync")
# Call coroutine...
# volume = await market_place.get_24h_volume()
try:
t = MyThread()
while True:
pass
except KeyboardInterrupt:
pass

You need to create a asyncio event loop, and wait until the coroutine complete.
import asyncio
from threading import Thread
class MyThread(Thread):
def run(self):
loop = asyncio.new_event_loop() # loop = asyncio.get_event_loop()
loop.run_until_complete(self._run())
loop.close()
# asyncio.run(self._run()) In Python 3.7+
async def _run(self):
while True:
print("MyThread::run() sync")
await asyncio.sleep(1)
# OR
# volume = await market_place.get_24h_volume()
t = MyThread()
t.start()
try:
t.join()
except KeyboardInterrupt:
pass

Related

Python asyncio listener loop doesn't run using idle main loop

I have a "listener" loop that constantly watches for items to process from an asyncio queue. This loop runs in a part of the application that is not using asyncio, so I've been trying to set up a passive asyncio main loop that the listener can be transferred to as needed. The listener is started and stopped as needed per input from the user.
For some reason the code below never results in the listener() actually running (i.e. print("Listener Running") is never printed). start_IOLoop_thread is run at startup of the application.
Can anyone point out what the problem is with this setup? Please let me know if more info is needed.
Edit: replaced code with a runnable example per the comments:
import asyncio
import threading
from asyncio.queues import Queue
import time
class Client:
def __init__(self):
self.streamQ = Queue()
self.loop = None
self.start_IOLoop_thread()
self.stream_listener()
def stream_listener(self):
self.streaming = True
async def listener():
print("Listener Running")
while self.streaming:
data = await self.streamQ.get()
# DEBUG
print(data)
print("Listener Stopped")
print("Starting Listener")
self.listener = asyncio.run_coroutine_threadsafe(listener(), self.loop)
def start_IOLoop_thread(self):
async def inf_loop():
# Keep the main thread alive and doing nothing
# so we can freely give it tasks as needed
while True:
await asyncio.sleep(1)
async def main():
await inf_loop()
def start_IO():
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
asyncio.run(main())
print("Main Exited")
threading.Thread(target=start_IO, daemon=True).start()
# A small delay is needed to give the loop time to initialize,
# otherwise self.loop is passed as "None"
time.sleep(0.1)
if __name__ == "__main__":
C = Client()
input("Enter to exit")
You never start the newly created loop. I adjusted to call main (although here it does nothing I assume the original code is more complex). All changes are in start_IO. Tested with python 3.10 (I think there was some change in the past regarding threads and async)
import asyncio
import threading
from asyncio.queues import Queue
import time
class Client:
def __init__(self):
self.streamQ = Queue()
self.loop = None
self.start_IOLoop_thread()
self.stream_listener()
def stream_listener(self):
self.streaming = True
async def listener():
print("Listener Running")
while self.streaming:
data = await self.streamQ.get()
# DEBUG
print(data)
print("Listener Stopped")
print("Starting Listener")
self.listener = asyncio.run_coroutine_threadsafe(listener(), self.loop)
def start_IOLoop_thread(self):
async def inf_loop():
# Keep the main thread alive and doing nothing
# so we can freely give it tasks as needed
while True:
await asyncio.sleep(1)
async def main():
await inf_loop()
def start_IO():
self.loop = asyncio.new_event_loop()
self.loop.create_task(main())
asyncio.set_event_loop(self.loop)
self.loop.run_forever()
print("Main Exited")
threading.Thread(target=start_IO, daemon=True).start()
# A small delay is needed to give the loop time to initialize,
# otherwise self.loop is passed as "None"
time.sleep(0.1)
if __name__ == "__main__":
C = Client()
input("Enter to exit")

How to run a periodic task and a while loop asynchronously

I have two functions:
i = 0
def update():
global i
i += 1
def output():
print(i)
I want to run output() every 3 seconds and loop update() without any interval, both of course asynchronously.
I tried using asyncio, threading, multithreading and timeloop but I couldn't get it to work in neither of these libraries. If you figure out how to do it in any of these libraries, or some other library, please help. I'm ok with working with any library.
Using AsyncIO this would resemble:
import asyncio
def update(value):
value["int"] += 1
def output(value):
print(value["int"])
async def update_worker(value):
while True:
update(value)
await asyncio.sleep(0)
async def output_worker(value):
while True:
output(value)
await asyncio.sleep(3)
async def main():
value = {"int": 0}
await asyncio.gather(
update_worker(value),
output_worker(value))
if __name__ == "__main__":
asyncio.run(main())
Notice that I changed the global value to be a shared value since it is best practice to do so. In other programming languages it would be unsafe to share a value both in read and write to multiple concurrent contexts but since most Python objects are thread safe is is ok in this case. Otherwise, you should use a mutex of any other concurrency primitive to synchronise reads and writes.
AsyncIO concurrency is based on a cooperative multitasking model so asynchronous tasks must explicitly yield control to other concurrent tasks when they are waiting for something (noted by all await keywords). Thus, to ensure that output_worker has a chance to run one must add an await asyncio.sleep(0) in the infinite loop of the update_worker so that the AsyncIO event loop can run output_worker.
Here is the same code using multithreading instead of AsyncIO:
from time import sleep
from threading import Thread, Lock
def update(value, lock):
with lock:
value["int"] += 1
def output(value, lock):
with lock:
print(value["int"])
def update_worker(value, lock):
while True:
update(value, lock)
def output_worker(value, lock):
while True:
output(value, lock)
sleep(3)
def main():
value = {"int": 0}
lock = Lock()
t1 = Thread(target=update_worker, args=(value, lock), daemon=True)
t2 = Thread(target=output_worker, args=(value, lock), daemon=True)
t1.start()
t2.start()
t1.join()
t2.join()
if __name__ == "__main__":
main()
Even though it is not necessary in this particular Python program, I used a Lock to synchronize reads and writes as it is the correct way to handle concurrency.

Is it possible to run multiple asyncio in the same time in python?

Based on the solution that i got: Running multiple sockets using asyncio in python
i tried to add also the computation part using asyncio
Setup: Python 3.7.4
import msgpack
import threading
import os
import asyncio
import concurrent.futures
import functools
import nest_asyncio
nest_asyncio.apply()
class ThreadSafeElem(bytes):
def __init__(self, * p_arg, ** n_arg):
self._lock = threading.Lock()
def __enter__(self):
self._lock.acquire()
return self
def __exit__(self, type, value, traceback):
self._lock.release()
elem = ThreadSafeElem()
async def serialize(data):
return msgpack.packb(data, use_bin_type=True)
async def serialize1(data1):
return msgpack.packb(data1, use_bin_type=True)
async def process_data(data,data1):
loop = asyncio.get_event_loop()
future = await loop.run_in_executor(None, functools.partial(serialize, data))
future1 = await loop.run_in_executor(None, functools.partial(serialize1, data1))
return await asyncio.gather(future,future1)
################ Calculation#############################
def calculate_data():
global elem
while True:
try:
... data is calculated (some dictionary))...
elem, elem1= asyncio.run(process_data(data, data1))
except:
pass
#####################################################################
def get_data():
return elem
def get_data1():
return elem1
########### START SERVER AND get data contionusly ################
async def client_thread(reader, writer):
while True:
try:
bytes_received = await reader.read(100)
package_type = np.frombuffer(bytes_received, dtype=np.int8)
if package_type ==1 :
nn_output = get_data1()
if package_type ==2 :
nn_output = get_data()
writer.write(nn_output)
await writer.drain()
except:
pass
async def start_servers(host, port):
server = await asyncio.start_server(client_thread, host, port)
await server.serve_forever()
async def start_calculate():
await asyncio.run(calculate_data())
def enable_sockets():
try:
host = '127.0.0.1'
port = 60000
sockets_number = 6
loop = asyncio.get_event_loop()
for i in range(sockets_number):
loop.create_task(start_servers(host,port+i))
loop.create_task(start_calculate())
loop.run_forever()
except:
print("weird exceptions")
##############################################################################
enable_sockets()
The issue is that when i make a call from client, the server does not give me anything.
I tested the program with dummy data and no asyncio on calculation part so without this loop.create_task(start_calculate()) and the server responded correctly.
I also run the calculate data without adding it in the enable sockets and it worked. It also working with this implementation, but the problem is the server is not returning anything.
I did it like this cos i need the calculate part to run continuously and when one of the clients is calling to return the data at that point.
An asyncio event loop cannot be nested inside another, and there is no point in doing so: asyncio.run (and similar) blocks the current thread until done. This does not increase parallelism, and merely disables any outer event loop.
If you want to nest another asyncio task, directly run it in the current event loop. If you want to run a non-cooperative, blocking task, run it in the event loop executor.
async def start_calculate():
loop = asyncio.get_running_loop()
await loop.run_in_executor(None, calculate_data)
The default executor uses threads – this allows running blocking tasks, but does not increase parallelism. Use a custom ProcessPoolExecutor to use additional cores:
import concurrent.futures
async def start_calculate():
loop = asyncio.get_running_loop()
with concurrent.futures.ProcessPoolExecutor() as pool:
await loop.run_in_executor(pool, calculate_data)
Why do you call asyncio.run() multiple times?
This function always creates a new event loop and closes it at the end. It should be used as a main entry point for asyncio programs, and should ideally >only be called once.
I would advise you to read the docs

Python event handler with Async (non-blocking while loop)

import queue
qq = queue.Queue()
qq.put('hi')
class MyApp():
def __init__(self, q):
self._queue = q
def _process_item(self, item):
print(f'Processing this item: {item}')
def get_item(self):
try:
item = self._queue.get_nowait()
self._process_item(item)
except queue.Empty:
pass
async def listen_for_orders(self):
'''
Asynchronously check the orders queue for new incoming orders
'''
while True:
self.get_item()
await asyncio.sleep(0)
a = MyApp(qq)
loop = asyncio.get_event_loop()
loop.run_until_complete(a.listen_for_orders())
Using Python 3.6.
I'm trying to write an event handler that constantly listens for messages in the queue, and processes them (prints them in this case). But it must be asynchronous - I need to be able to run it in a terminal (IPython) and manually feed things to the queue (at least initially, for testing).
This code does not work - it blocks forever.
How do I make this run forever but return control after each iteration of the while loop?
Thanks.
side note:
To make the event loop work with IPython (version 7.2), I'm using this code from the ib_insync library, I'm using this library for the real-world problem in the example above.
You need to make your queue an asyncio.Queue, and add things to the queue in a thread-safe manner. For example:
qq = asyncio.Queue()
class MyApp():
def __init__(self, q):
self._queue = q
def _process_item(self, item):
print(f'Processing this item: {item}')
async def get_item(self):
item = await self._queue.get()
self._process_item(item)
async def listen_for_orders(self):
'''
Asynchronously check the orders queue for new incoming orders
'''
while True:
await self.get_item()
a = MyApp(qq)
loop = asyncio.get_event_loop()
loop.run_until_complete(a.listen_for_orders())
Your other thread must put stuff in the queue like this:
loop.call_soon_threadsafe(qq.put_nowait, <item>)
call_soon_threadsafe will ensure correct locking, and also that the event loop is woken up when a new queue item is ready.
This is not an async queue. You need to use asyncio.Queue
qq = queue.Queue()
Async is an event loop. You call the loop transferring control to it and it loops until your function is complete which never happens:
loop.run_until_complete(a.listen_for_orders())
You commented:
I have another Thread that polls an external network resource for data (I/O intensive) and dumps the incoming messages into this thread.
Write that code async - so you'd have:
async def run():
while 1:
item = await get_item_from_network()
process_item(item)
loop = asyncio.get_event_loop()
loop.run_until_complete( run() )
If you don't want to do that what you can do is step through the loop though you don't want to do this.
import asyncio
def run_once(loop):
loop.call_soon(loop.stop)
loop.run_forever()
loop = asyncio.get_event_loop()
for x in range(100):
print(x)
run_once(loop)
Then you simply call your async function and each time you call run_once it will check your (asyncio queue) and pass control to your listen for orders function if the queue has an item in it.

How to run a coroutine and wait it result from a sync func when the loop is running?

I have a code like the foolowing:
def render():
loop = asyncio.get_event_loop()
async def test():
await asyncio.sleep(2)
print("hi")
return 200
if loop.is_running():
result = asyncio.ensure_future(test())
else:
result = loop.run_until_complete(test())
When the loop is not running is quite easy, just use loop.run_until_complete and it return the coro result but if the loop is already running (my blocking code running in app which is already running the loop) I cannot use loop.run_until_complete since it will raise an exception; when I call asyncio.ensure_future the task gets scheduled and run, but I want to wait there for the result, does anybody knows how to do this? Docs are not very clear how to do this.
I tried passing a concurrent.futures.Future calling set_result inside the coro and then calling Future.result() on my blocking code, but it doesn't work, it blocks there and do not let anything else to run. ANy help would be appreciated.
To implement runner with the proposed design, you would need a way to single-step the event loop from a callback running inside it. Asyncio explicitly forbids recursive event loops, so this approach is a dead end.
Given that constraint, you have two options:
make render() itself a coroutine;
execute render() (and its callers) in a thread different than the thread that runs the asyncio event loop.
Assuming #1 is out of the question, you can implement the #2 variant of render() like this:
def render():
loop = _event_loop # can't call get_event_loop()
async def test():
await asyncio.sleep(2)
print("hi")
return 200
future = asyncio.run_coroutine_threadsafe(test(), loop)
result = future.result()
Note that you cannot use asyncio.get_event_loop() in render because the event loop is not (and should not be) set for that thread. Instead, the code that spawns the runner thread must call asyncio.get_event_loop() and send it to the thread, or just leave it in a global variable or a shared structure.
Waiting Synchronously for an Asynchronous Coroutine
If an asyncio event loop is already running by calling loop.run_forever, it will block the executing thread until loop.stop is called [see the docs]. Therefore, the only way for a synchronous wait is to run the event loop on a dedicated thread, schedule the asynchronous function on the loop and wait for it synchronously from another thread.
For this I have composed my own minimal solution following the answer by user4815162342. I have also added the parts for cleaning up the loop when all work is finished [see loop.close].
The main function in the code below runs the event loop on a dedicated thread, schedules several tasks on the event loop, plus the task the result of which is to be awaited synchronously. The synchronous wait will block until the desired result is ready. Finally, the loop is closed and cleaned up gracefully along with its thread.
The dedicated thread and the functions stop_loop, run_forever_safe, and await_sync can be encapsulated in a module or a class.
For thread-safery considerations, see section “Concurrency and Multithreading” in asyncio docs.
import asyncio
import threading
#----------------------------------------
def stop_loop(loop):
''' stops an event loop '''
loop.stop()
print (".: LOOP STOPPED:", loop.is_running())
def run_forever_safe(loop):
''' run a loop for ever and clean up after being stopped '''
loop.run_forever()
# NOTE: loop.run_forever returns after calling loop.stop
#-- cancell all tasks and close the loop gracefully
print(".: CLOSING LOOP...")
# source: <https://xinhuang.github.io/posts/2017-07-31-common-mistakes-using-python3-asyncio.html>
loop_tasks_all = asyncio.Task.all_tasks(loop=loop)
for task in loop_tasks_all: task.cancel()
# NOTE: `cancel` does not guarantee that the Task will be cancelled
for task in loop_tasks_all:
if not (task.done() or task.cancelled()):
try:
# wait for task cancellations
loop.run_until_complete(task)
except asyncio.CancelledError: pass
#END for
print(".: ALL TASKS CANCELLED.")
loop.close()
print(".: LOOP CLOSED:", loop.is_closed())
def await_sync(task):
''' synchronously waits for a task '''
while not task.done(): pass
print(".: AWAITED TASK DONE")
return task.result()
#----------------------------------------
async def asyncTask(loop, k):
''' asynchronous task '''
print("--start async task %s" % k)
await asyncio.sleep(3, loop=loop)
print("--end async task %s." % k)
key = "KEY#%s" % k
return key
def main():
loop = asyncio.new_event_loop() # construct a new event loop
#-- closures for running and stopping the event-loop
run_loop_forever = lambda: run_forever_safe(loop)
close_loop_safe = lambda: loop.call_soon_threadsafe(stop_loop, loop)
#-- make dedicated thread for running the event loop
thread = threading.Thread(target=run_loop_forever)
#-- add some tasks along with my particular task
myTask = asyncio.run_coroutine_threadsafe(asyncTask(loop, 100200300), loop=loop)
otherTasks = [asyncio.run_coroutine_threadsafe(asyncTask(loop, i), loop=loop)
for i in range(1, 10)]
#-- begin the thread to run the event-loop
print(".: EVENT-LOOP THREAD START")
thread.start()
#-- _synchronously_ wait for the result of my task
result = await_sync(myTask) # blocks until task is done
print("* final result of my task:", result)
#... do lots of work ...
print("*** ALL WORK DONE ***")
#========================================
# close the loop gracefully when everything is finished
close_loop_safe()
thread.join()
#----------------------------------------
main()
here is my case, my whole programe is async, but call some sync lib, then callback to my async func.
follow the answer by user4815162342.
import asyncio
async def asyncTask(k):
''' asynchronous task '''
print("--start async task %s" % k)
# await asyncio.sleep(3, loop=loop)
await asyncio.sleep(3)
print("--end async task %s." % k)
key = "KEY#%s" % k
return key
def my_callback():
print("here i want to call my async func!")
future = asyncio.run_coroutine_threadsafe(asyncTask(1), LOOP)
return future.result()
def sync_third_lib(cb):
print("here will call back to your code...")
cb()
async def main():
print("main start...")
print("call sync third lib ...")
await asyncio.to_thread(sync_third_lib, my_callback)
# await loop.run_in_executor(None, func=sync_third_lib)
print("another work...keep async...")
await asyncio.sleep(2)
print("done!")
LOOP = asyncio.get_event_loop()
LOOP.run_until_complete(main())

Categories