python aiohttp and asyncio without loop event - python

My understanding that the "async def " needs to be called with loop_event, e.g.
loop = asyncio.get_event_loop()
loop.run_until_complete(<method>)
I created some code below without the loop. It still supports 100 async calls without an issue. Did I miss anything?
Dummy server sleeps 5 seconds.
from aiohttp import web
import asyncio
import time
async def hello(request):
#time.sleep(1)
await asyncio.sleep(5)
return web.Response(text='dummy done')
app = web.Application()
app.add_routes([web.get('/', hello)])
web.run_app(app,host='127.0.0.1', port=8081)
Actual server taking requests.
import json
from aiohttp import web
import aiohttp
import asyncio
n = 0
def mcowA(n):
print (n, " : A")
return
async def fetch(session, url):
async with getattr(session,"get")(url) as response:
return await response.text()
def mcowB(n):
print (n, " : B")
return
async def runMcows(request):
global n
n = n + 1
mcowA(n)
async with aiohttp.ClientSession() as session:
html = await fetch(session, 'http://localhost:8081')
print(n,html)
mcowB(n)
return web.Response(text=html)
try:
app = web.Application()
app.add_routes([web.get('/', runMcows)])
#loop = asyncio.get_event_loop(web.run_app(app))
#loop.run_forever()
web.run_app(app)
finally:
loop.close()

Related

Best way to handle 2 websocket connections in the same time

i am handling data from 2 websocket servers and i would like to know whats the fastest way to handle both connections in the same time given that the 1st connection would send data every 0.1-10ms.
what i am doing so far is:
import json
import websockets
async def run():
async with websockets.connect("ws://localhost:8546/") as ws1:
async with websockets.connect(uri="wss://api.blxrbdn.com/ws", extra_headers = {"Authorization": "apikey") as ws2:
sub1 = await ws1.send("subscription 1")
sub2 = await ws2.send("subscription 2")
while True:
try:
msg1 = await ws1.recv()
msg1 = json.loads(msg1)
msg2 = await ws2.recv()
msg2 = json.loads(msg2)
# process msg1 & msg2
except Exception as e:
print(e, flush=True)
asyncio.run(run())
As stated in the comments, try to handle each connection in its own coroutine. Here is small example:
import asyncio
import websockets
async def worker(ws, msg, t):
while True:
sub = await ws.send(msg)
print("Received from the server:", await ws.recv())
await asyncio.sleep(t)
async def run():
url1 = "ws://localhost:8765/"
url2 = "ws://something_different:8765/"
async with websockets.connect(url1) as ws1, websockets.connect(url2) as ws2:
await asyncio.gather(worker(ws1, "sub1", 1), worker(ws2, "sub2", 2))
asyncio.run(run())

return a value from while loop when using asyncio function

I am trying to connect and recieve messages from multiple websockets concurrently.
For this purpose I made it with asyncio, and it prints messages correctly. But the problem is that I just can print it, not return it.
The simplified example of pseudo code which I am struggle with is as below:
import websockets
import json
symbols_id = [1,2]
## LOOP RUNNING EXAMPLE OF ASYNCIO
async def get_connect(symbols_id):
tasks = []
for _id in symbols_id:
print('conncetion to', _id)
if _id == 1:
a = 0
elif _id == 2:
a = 200
tasks.append(asyncio.create_task(_loop(a)))
return tasks
async def _loop(a):
while True:
print(a)
a+=1
await asyncio.sleep(2.5)
async def ping_func():
while True:
print('------ ping')
await asyncio.sleep(5)
async def main():
tasks = await get_connect(symbols_id)
asyncio.create_task(ping_func())
await asyncio.gather(*tasks)
asyncio.run(main())
As you can see from the code above I used print(a) to print a in each loop.
I test return a instead of print(a) but it was not helpful.
thanks
yield a? return a will exit the function and the loop, yield is usually what you want in asyncio for looped tasks
Finally I found the way of using yield and async for to read data in each loop.
It will work correctly, by changing the code to the following one.
import websockets
import json
symbols_id = [1,2]
global a
a=0
## LOOP RUNNING EXAMPLE OF ASYNCIO
async def get_connect(symbols_id):
tasks = []
for _id in symbols_id:
print('conncetion to', _id)
if _id == 1:
a = 0
elif _id == 2:
a = 200
tasks.append(asyncio.create_task(_loop(a)))
return tasks
async def _loop(param):
global a
a = param
while True:
print(a)
a+=1
await asyncio.sleep(2.5)
async def ping_func():
while True:
print('------ ping')
await asyncio.sleep(5)
async def get_result():
global a
while True:
yield a
await asyncio.sleep(1)
async def main():
tasks = await get_connect(symbols_id)
asyncio.create_task(ping_func())
async for x in get_result():
print(x)
await asyncio.gather(*tasks)
asyncio.run(main())
I was confused with how to use generated data from this code snippet inside the other code snippet. what I found is:
1- Generated data can be accessible with global variables.
2- By defining a class and a property, it can be accessible from every part of the code.

With PyTest, run_until_complete does continue without finishing

Basically, what it does, is to do 20 requests async to google.
If I launch it without using PyTest, just a snip of code, like this, it works:
import asyncio
import aiohttp
async def get(
session: aiohttp.ClientSession,
) -> dict:
url = f"https://www.google.com/"
resp = await session.request('GET', url=url)
data = await resp.json()
return data
async def sessions():
async with aiohttp.ClientSession() as session:
tasks = []
for i in range(20):
tasks.append(get(session=session))
return await asyncio.gather(*tasks, return_exceptions=True)
def main():
loop = asyncio.new_event_loop()
try:
asyncio.set_event_loop(loop)
htmls = loop.run_until_complete(sessions())
finally:
loop.close()
print(htmls)
But when I use PyTest, in spite of being the same code (almost), the "htmls" variable at the end is not assignated any value
import aiohttp
import asyncio
async def get(
session: aiohttp.ClientSession,
) -> dict:
url = f"https://www.google.com/"
resp = await session.request('GET', url=url)
data = await resp.json()
return data
async def sessions(self):
async with aiohttp.ClientSession() as session:
tasks = []
for i in range(20):
tasks.append(self.get(session=session))
return await asyncio.gather(*tasks, return_exceptions=True)
def test_example(self):
loop = asyncio.new_event_loop()
try:
asyncio.set_event_loop(loop)
htmls = loop.run_until_complete(self.sessions())
finally:
loop.close()
print(htmls)
Why is this? It is like loop.run_until_complete(self.sessions()) is not waiting for it to finish.
It is resolved. It needed a self as first parameter for the get() method :S

How to use AsyncModbusTCPClient from pymodbus.client.asynchronous.tcp in a coroutine?

Based on the 'Async Asyncio Client Example' of PyModbus I tried to initialise the client in a coroutine.
The example in run_with_already_running_loop() is working fine but initialising ModbusClient hangs without timeout or error message when run in coroutine async_read().
#!/usr/bin/env python
import asyncio
import logging
from pymodbus.client.asynchronous.tcp import AsyncModbusTCPClient as ModbusClient
from pymodbus.client.asynchronous import schedulers
from threading import Thread
import time
# --------------------------------------------------------------------------- #
# configure the client logging
# --------------------------------------------------------------------------- #
logging.basicConfig()
log = logging.getLogger()
log.setLevel(logging.DEBUG)
UNIT = 0x01
TCP_IP = '192.168.0.168'
ADDRESS = 40035
COUNTS = 16
async def start_async_test(client):
rr = await client.read_holding_registers(ADDRESS, COUNTS, unit=UNIT)
print(rr.registers)
def run_with_already_running_loop():
"""
An already running loop is passed to ModbusClient Factory
:return:
"""
log.debug("Running Async client with asyncio loop already started")
log.debug("------------------------------------------------------")
def done(future):
log.info("future: Done !!!")
def start_loop(loop):
"""
Start Loop
:param loop:
:return:
"""
asyncio.set_event_loop(loop)
loop.run_forever()
loop = asyncio.new_event_loop()
t = Thread(target=start_loop, args=[loop])
t.daemon = True
# Start the loop
t.start()
assert loop.is_running()
loop, client = ModbusClient(schedulers.ASYNC_IO,
host=TCP_IP,
loop=loop)
future = asyncio.run_coroutine_threadsafe(
start_async_test(client.protocol), loop=loop)
future.add_done_callback(done)
while not future.done():
print('sleep')
time.sleep(0.2)
loop.stop()
log.debug("--------DONE RUN_WITH_ALREADY_RUNNING_LOOP-------------")
async def async_read():
"""
An already running loop is passed to ModbusClient Factory
:return:
"""
log.debug("Running Async client in async function")
log.debug("------------------------------------------------------")
loop = asyncio.get_running_loop()
assert loop.is_running()
# python hangs when initialising client
loop, client = ModbusClient(schedulers.ASYNC_IO,
host=TCP_IP,
loop=loop)
future = asyncio.run_coroutine_threadsafe(
start_async_test(client.protocol), loop=loop)
log.debug("------- DONE IN ASYNC FUNCTION -------------")
log.debug("")
if __name__ == '__main__':
log.debug(
"------------------- Run with already running loop -------------------")
run_with_already_running_loop()
print('new test'.center(90, '-'))
asyncio.run(async_read())
The working sync code was
from pymodbus.client.sync import ModbusTcpClient
client = ModbusTcpClient(TCP_IP)
client.connect()
client.read_holding_registers(ADDRESS, count=COUNT)
Any idea how to have a similar simple solution with asyncio?
here is my working example. Feel free to give me a suggestion for improvement
import asyncio
from threading import Thread
from pymodbus import constants
from pymodbus.client.asynchronous import schedulers
from pymodbus.client.asynchronous.tcp import AsyncModbusTCPClient
def start_loop(loop):
asyncio.set_event_loop(loop)
loop.run_forever()
loop = asyncio.new_event_loop()
t = Thread(target=start_loop, args=[loop])
t.daemon = True
t.start()
assert loop.is_running()
asyncio.set_event_loop(loop)
constants.Defaults.UnitId = 0
loop, client = AsyncModbusTCPClient(schedulers.ASYNC_IO, host="192.168.178.32", port=502, loop=loop, timeout=20)
if not client.protocol:
raise ConnectionError("Modbus Device is not avaiable")
async def execute_read():
async def read():
return await client.protocol.read_holding_registers(0, 2)
future = asyncio.run_coroutine_threadsafe(read(), loop=loop)
while not future.done():
await asyncio.sleep(0.1)
return future.result()
response = await execute_read()
print(response.__dict__['registers'])

Python asyncio task list generation without executing the function

While working in asyncio, I'm trying to use a list comprehension to build my task list. The basic form of the function is as follows:
import asyncio
import urllib.request as req
#asyncio.coroutine
def coro(term):
print(term)
google = "https://www.google.com/search?q=" + term.replace(" ", "+") + "&num=100&start=0"
request = req.Request(google, None, headers)
(some beautiful soup stuff)
My goal is to use a list of terms to create my task list:
terms = ["pie", "chicken" ,"things" ,"stuff"]
tasks=[
coro("pie"),
coro("chicken"),
coro("things"),
coro("stuff")]
My initial thought was:
loop = asyncio.get_event_loop()
tasks = [my_coroutine(term) for term in terms]
loop.run_until_complete(asyncio.wait(tasks))
loop.close()
This doesn't create the task list it runs the function during the list comprehension. Is there a way to use a shortcut to create the task list wihout writing every task?
Your HTTP client does not support asyncio, and you will not get the expected results. Try this to see .wait() does work as you expected:
import asyncio
import random
#asyncio.coroutine
def my_coroutine(term):
print("start", term)
yield from asyncio.sleep(random.uniform(1, 3))
print("end", term)
terms = ["pie", "chicken", "things", "stuff"]
loop = asyncio.get_event_loop()
tasks = [my_coroutine(term) for term in terms]
print("Here we go!")
loop.run_until_complete(asyncio.wait(tasks))
loop.close()
If you use asyncio.gather() you get one future encapsulating all your tasks, which can be easily canceled with .cancel(), here demonstrated with python 3.5+ async def/await syntax (but works the same with #coroutine and yield from):
import asyncio
import random
async def my_coroutine(term):
print("start", term)
n = random.uniform(0.2, 1.5)
await asyncio.sleep(n)
print("end", term)
return "Term {} slept for {:.2f} seconds".format(term, n)
async def stop_all():
"""Cancels all still running tasks after one second"""
await asyncio.sleep(1)
print("stopping")
fut.cancel()
return ":-)"
loop = asyncio.get_event_loop()
terms = ["pie", "chicken", "things", "stuff"]
tasks = (my_coroutine(term) for term in terms)
fut = asyncio.gather(stop_all(), *tasks, return_exceptions=True)
print("Here we go!")
loop.run_until_complete(fut)
for task_result in fut.result():
if not isinstance(task_result, Exception):
print("OK", task_result)
else:
print("Failed", task_result)
loop.close()
And finally, if you want to use an async HTTP client, try aiohttp. First install it with:
pip install aiohttp
then try this example, which uses asyncio.as_completed:
import asyncio
import aiohttp
async def fetch(session, url):
print("Getting {}...".format(url))
async with session.get(url) as resp:
text = await resp.text()
return "{}: Got {} bytes".format(url, len(text))
async def fetch_all():
async with aiohttp.ClientSession() as session:
tasks = [fetch(session, "http://httpbin.org/delay/{}".format(delay))
for delay in (1, 1, 2, 3, 3)]
for task in asyncio.as_completed(tasks):
print(await task)
return "Done."
loop = asyncio.get_event_loop()
resp = loop.run_until_complete(fetch_all())
print(resp)
loop.close()
this works in python 3.5 (added the new async-await syntax):
import asyncio
async def coro(term):
for i in range(3):
await asyncio.sleep(int(len(term))) # just sleep
print("cor1", i, term)
terms = ["pie", "chicken", "things", "stuff"]
tasks = [coro(term) for term in terms]
loop = asyncio.get_event_loop()
cors = asyncio.wait(tasks)
loop.run_until_complete(cors)
should't your version yield from req.Request(google, None, headers)? and (what library is that?) is this library even made for use with asyncio?
(here is the same code with the python <= 3.4 syntax; the missing parts are the same as above):
#asyncio.coroutine
def coro(term):
for i in range(3):
yield from asyncio.sleep(int(len(term))) # just sleep
print("cor1", i, term)
Create queue and run event loop
def main():
while terms:
tasks.append(asyncio.create_task(terms.pop())
responses = asyncio.gather(*tasks, return_exception=True)
loop = asyncio.get_event_loop()
loop.run_until_complete(responses)

Categories