Stop Python from Terminating As Long As Background Async Task is Running - python

I've been looking around and have seen a lot of information regarding asyncio. I'm having trouble creating a program that won't terminate as long as the background task is running.
def loop_test():
print("task is running")
time.sleep(2)
print("task is finished")
async def start_pipeline(self):
print("Starting TD Stream")
# Build data pipeline
await self.td_stream_client.build_pipeline()
data_response_count = 0
self.streaming = True
# Keep going while receiving data
while self.streaming:
print("Streaming")
data = await self.td_stream_client.start_pipeline()
# Parse if data inside
if 'data' in data:
content = data['data'][0]['content']
print("Key: {}".format(content[0]['key']))
pprint.pprint(content, indent=4)
print('-' * 80)
data_response_count += 1
print("Done with while loop")
async def main():
_ = asyncio.create_task(td_stream_client.start_pipeline())
coro = asyncio.to_thread(TDA_Streaming.loop_test)
await coro
asyncio.run(main())
The idea of the program is to have a background task that streams data from an API to my program. While this is happening, I want to be able to do other things. Maybe have manual input...maybe have a GUI where I can interact with things.
The issue is that my program terminates as soon as the master thread finishes. How do I prevent this from happening? If I have a while loop with an "input" call, this input blocks the program. What is the best way to proceed?

Please wait for the background task explicitly:
def loop_test():
print("task is running")
time.sleep(2)
print("task is finished")
async def start_pipeline(self):
print("Starting TD Stream")
# Build data pipeline
await self.td_stream_client.build_pipeline()
data_response_count = 0
self.streaming = True
# Keep going while receiving data
while self.streaming:
print("Streaming")
data = await self.td_stream_client.start_pipeline()
# Parse if data inside
if 'data' in data:
content = data['data'][0]['content']
print("Key: {}".format(content[0]['key']))
pprint.pprint(content, indent=4)
print('-' * 80)
data_response_count += 1
print("Done with while loop")
async def main():
background_task = asyncio.create_task(td_stream_client.start_pipeline())
coro = asyncio.to_thread(TDA_Streaming.loop_test)
await coro
await background_task
asyncio.run(main())

Related

Python update queue with several asyncio

I want to update Queue with several asyncio
I receive data from each A,B,C( using websocket and "while true") and then i want to put in the queue and all the provider will be able to write in the same Queue
( I know that maybe i need to use multiThread or something else but i dont find the right way
**if __name__ == '__main__':
global_queue = queue.Queue()
asyncio.run(A_Orderbook.data_stream(global_queue))
asyncio.run(B_Orderbook.data_stream(global_queue))
asyncio.run(C_Orderbook.data_stream(global_queue))
print(global_queue.qsize())**
Thks
You can do it the following way:
import asyncio
async def worker(worker_name: str, q: asyncio.Queue):
"""Produces tasks for consumer."""
for i in range(1, 6):
await asyncio.sleep(1)
await q.put(f"{worker_name}-{i}")
async def consumer(q: asyncio.Queue):
"""Consumes tasks from workers."""
while True:
item = await q.get()
await asyncio.sleep(1)
print(item)
# we need it to ensure that all tasks were done
q.task_done()
async def main_wrapper():
"""Main function - entry point of our async app."""
q = asyncio.Queue()
# we do not need to await the asyncio task it is run in "parallel"
asyncio.create_task(consumer(q))
await asyncio.gather(*[worker(f"w{i}", q) for i in range(1, 5)]) # create worker-tasks
await q.join() # we wait until asyncio.create_task(consumer(q)) consume all tasks
print("All DONE !")
if __name__ == '__main__':
asyncio.run(main_wrapper())

Processing queue with asyncio only fires after all items are placed in queue using asyncio in python

I am trying to create some code in python that will put data from a generator (currently a simple counting loop but will be a sensor data at some point) and place it in a queue. Once in a queue i want to pull data off it and send it over a TCP connection. This is a great time to use asyncio but I am doing something wrong.
Currently, the script will process all the numbers and does not return anything. Ideally I would want to make sure I have something in the queue so it never empties and send a set amount of data over say like 5 numbers everytime. How can I achieve this?
import asyncio
import random
class responder():
def __init__(self, parent=None):
super().__init__()
async def produce(self,queue, n):
for x in range(n):
# produce an item
print('producing {}/{}'.format(x, n))
# simulate i/o operation using sleep
await asyncio.sleep(random.random())
item = str(x)
# put the item in the queue
await queue.put(item)
async def consume(self,queue):
while True:
# wait for an item from the producer
item = await queue.get()
# process the item
print('consuming {}...'.format(item))
# simulate i/o operation using sleep
await asyncio.sleep(random.random())
# Notify the queue that the item has been processed
queue.task_done()
async def run(self,n):
queue = asyncio.Queue()
# schedule the consumer
self.consumer = asyncio.ensure_future(self.consume(queue))
# run the producer and wait for completion
await self.produce(queue, n)
# wait until the consumer has processed all items
await queue.join()
# the consumer is still awaiting for an item, cancel it
self.consumer.cancel()
async def handle_echo(self,reader, writer):
data = await reader.read(100)
message = data.decode()
addr = writer.get_extra_info('peername')
print("Received %r from %r" % (message, addr))
if (message == 'START_RUN'):
data = await self.run(10)
print("Send: %i" % data)
writer.write(data)
await writer.drain()
else:
print("Send: %r" % message)
writer.write(message)
await writer.drain()
print("Close the client socket")
writer.close()
def launch_server(self):
self.loop = asyncio.get_event_loop()
self.coro = asyncio.start_server(self.handle_echo, '127.0.0.1', 7780, loop=self.loop)
self.server = self.loop.run_until_complete(self.coro)
# Serve requests until Ctrl+C is pressed
print('Serving on {}'.format(self.server.sockets[0].getsockname()))
try:
self.loop.run_forever()
except KeyboardInterrupt:
pass
finally:
# Close the server
self.server.close()
self.loop.run_until_complete(self.server.wait_closed())
self.loop.close()
def main():
server = responder()
server.launch_server()
if __name__ == '__main__':
main()
The code generates the number stream but it runs through the entire list before moving on. Further I never get a value back.
My client code (which never gets anything back)
import asyncio
async def capture_stream(reader):
while not reader.at_eof:
data = await reader.read(100)
print( f'{who} received {len(data)} bytes' )
async def tcp_echo_client(message, loop):
reader, writer = await asyncio.open_connection('127.0.0.1',7780,loop=loop)
print('Send: %r' % message)
writer.write(message.encode())
if (message == "START_RUN"):
data = await reader.read(100)
print('Received: %r' % data.decode())
else:
collect_data = asyncio.create_task(capture_stream)
data = await collect_data
print('Close the socket')
writer.close()
message = 'START_RUN'
loop = asyncio.get_event_loop()
loop.run_until_complete(tcp_echo_client(message, loop))
loop.close()

Python - Cancel task in asyncio?

I have written code for async pool below. in __aexit__ i'm cancelling the _worker tasks after the tasks get finished. But when i run the code, the worker tasks are not getting cancelled and the code is running forever. This what the task looks like: <Task pending coro=<AsyncPool._worker() running at \async_pool.py:17> wait_for=<Future cancelled>>. The asyncio.wait_for is getting cancelled but not the worker tasks.
class AsyncPool:
def __init__(self,coroutine,no_of_workers,timeout):
self._loop = asyncio.get_event_loop()
self._queue = asyncio.Queue()
self._no_of_workers = no_of_workers
self._coroutine = coroutine
self._timeout = timeout
self._workers = None
async def _worker(self):
while True:
try:
ret = False
queue_item = await self._queue.get()
ret = True
result = await asyncio.wait_for(self._coroutine(queue_item), timeout = self._timeout,loop= self._loop)
except Exception as e:
print(e)
finally:
if ret:
self._queue.task_done()
async def push_to_queue(self,item):
self._queue.put_nowait(item)
async def __aenter__(self):
assert self._workers == None
self._workers = [asyncio.create_task(self._worker()) for _ in range(self._no_of_workers)]
return self
async def __aexit__(self,type,value,traceback):
await self._queue.join()
for worker in self._workers:
worker.cancel()
await asyncio.gather(*self._workers, loop=self._loop, return_exceptions =True)
To use the Asyncpool:
async def something(item):
print("got", item)
await asyncio.sleep(item)
async def main():
async with AsyncPool(something, 5, 2) as pool:
for i in range(10):
await pool.push_to_queue(i)
asyncio.run(main())
The Output in my terminal:
The problem is that your except Exception exception clause also catches cancellation, and ignores it. To add to the confusion, print(e) just prints an empty line in case of a CancelledError, which is where the empty lines in the output come from. (Changing it to print(type(e)) shows what's going on.)
To correct the issue, change except Exception to something more specific, like except asyncio.TimeoutError. This change is not needed in Python 3.8 where asyncio.CancelledError no longer derives from Exception, but from BaseException, so except Exception doesn't catch it.
When you have an asyncio task created and then cancelled, you still have the task alive that need to be "reclaimed". So you want to await worker for it. However, once you await such a cancelled task, as it will never give you back the expected return value, the asyncio.CancelledError will be raised and you need to catch it somewhere.
Because of this behavior, I don't think you should gather them but to await for each of the cancelled tasks, as they are supposed to return right away:
async def __aexit__(self,type,value,traceback):
await self._queue.join()
for worker in self._workers:
worker.cancel()
for worker in self._workers:
try:
await worker
except asyncio.CancelledError:
print("worker cancelled:", worker)
This appears to work. The event is a counting timer and when it expires it cancels the tasks.
import asyncio
from datetime import datetime as dt
from datetime import timedelta as td
import random
import time
class Program:
def __init__(self):
self.duration_in_seconds = 20
self.program_start = dt.now()
self.event_has_expired = False
self.canceled_success = False
async def on_start(self):
print("On Start Event Start! Applying Overrides!!!")
await asyncio.sleep(random.randint(3, 9))
async def on_end(self):
print("On End Releasing All Overrides!")
await asyncio.sleep(random.randint(3, 9))
async def get_sensor_readings(self):
print("getting sensor readings!!!")
await asyncio.sleep(random.randint(3, 9))
async def evauluate_data(self):
print("checking data!!!")
await asyncio.sleep(random.randint(3, 9))
async def check_time(self):
if (dt.now() - self.program_start > td(seconds = self.duration_in_seconds)):
self.event_has_expired = True
print("Event is DONE!!!")
else:
print("Event is not done! ",dt.now() - self.program_start)
async def main(self):
# script starts, do only once self.on_start()
await self.on_start()
print("On Start Done!")
while not self.canceled_success:
readings = asyncio.ensure_future(self.get_sensor_readings())
analysis = asyncio.ensure_future(self.evauluate_data())
checker = asyncio.ensure_future(self.check_time())
if not self.event_has_expired:
await readings
await analysis
await checker
else:
# close other tasks before final shutdown
readings.cancel()
analysis.cancel()
checker.cancel()
self.canceled_success = True
print("cancelled hit!")
# script ends, do only once self.on_end() when even is done
await self.on_end()
print('Done Deal!')
async def main():
program = Program()
await program.main()

Python non-blocking Asyncio

I'm trying to create a class which allows me to put data into it while a websocket connection is open, but i can't figure out how to make the main call to the class non-blocking. Can someone point me in the right direction?
Here is what I have so far (some extraneous code removed):
class Audio_Sender:
def __init__(self,IP_Address):
self.Remote_IP_Address = IP_Address
self.audio_queue = queue.Queue(10) #Max of 10 items
async def Connect(self):
uri = "ws://127.0.0.1:8765"
async with websockets.connect(uri) as websocket:
await websocket.send(json_voice_start)
while self.status == "Run":
if not self.audio_queue.empty():
audio_data = self.audio_queue.get()
await websocket.send(audio_data)
#pull any remaing data out:
while not self.audio_queue.empty():
audio_data = self.audio_queue.get()
await websocket.send(audio_data)
await websocket.send(json_voice_stop)
voice_response = await websocket.recv()
message = json.loads(voice_response)
print("\t- " + message["result"])
async def run_connect(self):
task = asyncio.create_task(self.Connect())
while not task.done():
print("Task is not Done")
await asyncio.sleep(1)
def go(self):
asyncio.run(self.run_connect())
#Create the Audio Sender
A = Audio_Sender("127.0.0.1","r")
#Put some data into it's queue
A.audio_queue.put(b"abc")
A.audio_queue.put(b"abc")
A.audio_queue.put(b"abc")
#Finished putting data in
A.status = "Done"
#Now send the data
#Ideally I would like to have the go part way through the queuing of
# data (above), but A.go() is blocking.. how to make it not blocking?
A.go()
So playing with this a bit more I realized i could use a thread for the go(), which works:
Updated code is as follows:
#Create the Audio Sender
A = Audio_Sender("127.0.0.1","r")
s = threading.Thread(target=A.go,args=())
s.start()
i=0
while i<10:
#Put some data into it's queue
A.audio_queue.put(b"abc")
i = i +1
print(i)
#Finished putting data in
A.status = "Done"

run async while loop independently

Is it possible to run an async while loop independently of another one?
Instead of the actual code I isolated the issue I am having in the following example code
import asyncio, time
class Time:
def __init__(self):
self.start_time = 0
async def dates(self):
while True:
t = time.time()
if self.start_time == 0:
self.start_time = t
yield t
await asyncio.sleep(1)
async def printer(self):
while True:
print('looping') # always called
await asyncio.sleep(self.interval)
async def init(self):
async for i in self.dates():
if i == self.start_time:
self.interval = 3
await self.printer()
print(i) # Never Called
loop = asyncio.get_event_loop()
t = Time()
loop.run_until_complete(t.init())
Is there a way to have the print function run independently so print(i) gets called each time?
What it should do is print(i) each second and every 3 seconds call self.printer(i)
Essentially self.printer is a separate task that does not need to be called very often, only every x seconds(in this case 3).
In JavaScript the solution is to do something like so
setInterval(printer, 3000);
EDIT: Ideally self.printer would also be able to be canceled / stopped if a condition or stopping function is called
The asyncio equivalent of JavaScript's setTimeout would be asyncio.ensure_future:
import asyncio
async def looper():
for i in range(1_000_000_000):
print(f'Printing {i}')
await asyncio.sleep(0.5)
async def main():
print('Starting')
future = asyncio.ensure_future(looper())
print('Waiting for a few seconds')
await asyncio.sleep(4)
print('Cancelling')
future.cancel()
print('Waiting again for a few seconds')
await asyncio.sleep(2)
print('Done')
if __name__ == '__main__':
asyncio.get_event_loop().run_until_complete(main())
You'd want to register your self.printer() coroutine as a separate task; pass it to asyncio.ensure_future() rather than await on it directly:
asyncio.ensure_future(self.printer())
By passing the coroutine to asyncio.ensure_future(), you put it on the list of events that the loop switches between as each awaits on further work to be completed.
With that change, your test code outputs:
1516819094.278697
looping
1516819095.283424
1516819096.283742
looping
1516819097.284152
# ... etc.
Tasks are the asyncio equivalent of threads in a multithreading scenario.

Categories