I have an asynchronous API which I'm using to connect and send mail to an SMTP server which has some setup and tear down to it. So it fits nicely into using a contextmanager from Python 3's contextlib.
Though, I don't know if it's possible write because they both use the generator syntax to write.
This might demonstrate the problem (contains a mix of yield-base and async-await syntax to demonstrate the difference between async calls and yields to the context manager).
#contextmanager
async def smtp_connection():
client = SMTPAsync()
...
try:
await client.connect(smtp_url, smtp_port)
await client.starttls()
await client.login(smtp_username, smtp_password)
yield client
finally:
await client.quit()
Is this kind of thing possible within python currently? and how would I use a with as statement if it is? If not is there a alternative way I could achieve this - maybe using the old style context manager?
Since Python 3.7, you can write:
from contextlib import asynccontextmanager
#asynccontextmanager
async def smtp_connection():
client = SMTPAsync()
...
try:
await client.connect(smtp_url, smtp_port)
await client.starttls()
await client.login(smtp_username, smtp_password)
yield client
finally:
await client.quit()
Before 3.7, you can use the async_generator package for this. On 3.6, you can write:
# This import changed, everything else is the same
from async_generator import asynccontextmanager
#asynccontextmanager
async def smtp_connection():
client = SMTPAsync()
...
try:
await client.connect(smtp_url, smtp_port)
await client.starttls()
await client.login(smtp_username, smtp_password)
yield client
finally:
await client.quit()
And if you want to work all the way back to 3.5, you can write:
# This import changed again:
from async_generator import asynccontextmanager, async_generator, yield_
#asynccontextmanager
#async_generator # <-- added this
async def smtp_connection():
client = SMTPAsync()
...
try:
await client.connect(smtp_url, smtp_port)
await client.starttls()
await client.login(smtp_username, smtp_password)
await yield_(client) # <-- this line changed
finally:
await client.quit()
Thanks to #jonrsharpe was able to make an async context manager.
Here's what mine ended up looking like for anyone who want's some example code:
class SMTPConnection():
def __init__(self, url, port, username, password):
self.client = SMTPAsync()
self.url = url
self.port = port
self.username = username
self.password = password
async def __aenter__(self):
await self.client.connect(self.url, self.port)
await self.client.starttls()
await self.client.login(self.username, self.password)
return self.client
async def __aexit__(self, exc_type, exc, tb):
await self.client.quit()
usage:
async with SMTPConnection(url, port, username, password) as client:
await client.sendmail(...)
Feel free to point out if I've done anything stupid.
The asyncio_extras package has a nice solution for this:
import asyncio_extras
#asyncio_extras.async_contextmanager
async def smtp_connection():
client = SMTPAsync()
...
For Python < 3.6, you'd also need the async_generator package and replace yield client with await yield_(client).
I find that you need to call obj.__aenter__(...) in the try and obj.__aexit__(...) in the final. Perhaps you do too if all you want is abstract an overly complicated object that has resources.
e.g.
import asyncio
from contextlib import asynccontextmanager
from pycoq.common import CoqContext, LocalKernelConfig
from pycoq.serapi import CoqSerapi
from pdb import set_trace as st
#asynccontextmanager
async def get_coq_serapi(coq_ctxt: CoqContext) -> CoqSerapi:
"""
Returns CoqSerapi instance that is closed with a with statement.
CoqContext for the file is also return since it can be used to manipulate the coq file e.g. return
the coq statements as in for `stmt in pycoq.split.coq_stmts_of_context(coq_ctxt):`.
example use:
```
filenames = pycoq.opam.opam_strace_build(coq_package, coq_package_pin)
filename: str
for filename in filenames:
with get_coq_serapi(filename) as coq, coq_ctxt:
for stmt in pycoq.split.coq_stmts_of_context(coq_ctxt):
```
ref:
- https://stackoverflow.com/questions/37433157/asynchronous-context-manager
- https://stackoverflow.com/questions/3693771/understanding-the-python-with-statement-and-context-managers
Details:
Meant to replace (see Brando's pycoq tutorial):
```
async with aiofile.AIOFile(filename, 'rb') as fin:
coq_ctxt = pycoq.common.load_context(filename)
cfg = opam.opam_serapi_cfg(coq_ctxt)
logfname = pycoq.common.serapi_log_fname(os.path.join(coq_ctxt.pwd, coq_ctxt.target))
async with pycoq.serapi.CoqSerapi(cfg, logfname=logfname) as coq:
```
usually then you loop through the coq stmts e.g.
```
for stmt in pycoq.split.coq_stmts_of_context(coq_ctxt):
```
"""
try:
import pycoq
from pycoq import opam
from pycoq.common import LocalKernelConfig
import os
# - note you can't return the coq_ctxt here so don't create it due to how context managers work, even if it's needed layer for e.g. stmt in pycoq.split.coq_stmts_of_context(coq_ctxt):
# _coq_ctxt: CoqContext = pycoq.common.load_context(coq_filepath)
# - not returned since it seems its only needed to start the coq-serapi interface
cfg: LocalKernelConfig = opam.opam_serapi_cfg(coq_ctxt)
logfname = pycoq.common.serapi_log_fname(os.path.join(coq_ctxt.pwd, coq_ctxt.target))
# - needed to be returned to talk to coq
coq: CoqSerapi = pycoq.serapi.CoqSerapi(cfg, logfname=logfname)
# - crucial, or coq._kernel is None and .execute won't work
await coq.__aenter__() # calls self.start(), this must be called by itself in the with stmt beyond yield
yield coq
except Exception as e:
# fin.close()
# coq.close()
import traceback
await coq.__aexit__(Exception, e, traceback.format_exc())
# coq_ctxt is just a data class serapio no need to close it, see: https://github.com/brando90/pycoq/blob/main/pycoq/common.py#L32
finally:
import traceback
err_msg: str = 'Finally exception clause'
exception_type, exception_value = Exception('Finally exception clause'), ValueError(err_msg)
print(f'{traceback.format_exc()=}')
await coq.__aexit__(exception_type, exception_value, traceback.format_exc())
# coq_ctxt is just a data class so no need to close it, see: https://github.com/brando90/pycoq/blob/main/pycoq/common.py#L32
# -
async def loop_through_files_original():
''' '''
import os
import aiofile
import pycoq
from pycoq import opam
coq_package = 'lf'
from pycoq.test.test_autoagent import with_prefix
coq_package_pin = f"file://{with_prefix('lf')}"
print(f'{coq_package=}')
print(f'{coq_package_pin=}')
print(f'{coq_package_pin=}')
filenames: list[str] = pycoq.opam.opam_strace_build(coq_package, coq_package_pin)
filename: str
for filename in filenames:
print(f'-> {filename=}')
async with aiofile.AIOFile(filename, 'rb') as fin:
coq_ctxt: CoqContext = pycoq.common.load_context(filename)
cfg: LocalKernelConfig = opam.opam_serapi_cfg(coq_ctxt)
logfname = pycoq.common.serapi_log_fname(os.path.join(coq_ctxt.pwd, coq_ctxt.target))
async with pycoq.serapi.CoqSerapi(cfg, logfname=logfname) as coq:
print(f'{coq._kernel=}')
for stmt in pycoq.split.coq_stmts_of_context(coq_ctxt):
print(f'--> {stmt=}')
_, _, coq_exc, _ = await coq.execute(stmt)
if coq_exc:
raise Exception(coq_exc)
async def loop_through_files():
"""
to test run in linux:
```
python ~pycoq/pycoq/utils.py
python -m pdb -c continue ~/pycoq/pycoq/utils.py
```
"""
import pycoq
coq_package = 'lf'
from pycoq.test.test_autoagent import with_prefix
coq_package_pin = f"file://{with_prefix('lf')}"
print(f'{coq_package=}')
print(f'{coq_package_pin=}')
print(f'{coq_package_pin=}')
filenames: list[str] = pycoq.opam.opam_strace_build(coq_package, coq_package_pin)
filename: str
for filename in filenames:
print(f'-> {filename=}')
coq_ctxt: CoqContext = pycoq.common.load_context(filename)
async with get_coq_serapi(coq_ctxt) as coq:
print(f'{coq=}')
print(f'{coq._kernel=}')
stmt: str
for stmt in pycoq.split.coq_stmts_of_context(coq_ctxt):
print(f'--> {stmt=}')
_, _, coq_exc, _ = await coq.execute(stmt)
if coq_exc:
raise Exception(coq_exc)
if __name__ == '__main__':
asyncio.run(loop_through_files_original())
asyncio.run(loop_through_files())
print('Done!\a\n')
see code: https://github.com/brando90/pycoq/blob/main/pycoq/utils.py
Related
I have a discord bot written in python using the discord.py library and want to combine it with a basic selfwritten IRC client. My idea was to use the discord bot to control the IRC client (join and part channels) and run them both simultaneously.
discordbot.py:
import time
import configparser
import datetime as dt
import os
from typing import (
Any,
Optional,
Dict,
List
)
import discord
from discord.ext import commands
from irc import IRCSimpleClient
root_path = os.path.dirname(__file__)
config = configparser.ConfigParser()
config.read("config.cfg")
class Main(commands.Bot):
def __init__(self) -> None:
intents = discord.Intents.all()
super().__init__(command_prefix=commands.when_mentioned_or('!'),
intents=intents)
async def on_ready(self):
pass
def watch_message():
while True:
msg = irc.get_response()
msg = ""
if "PING" in msg:
irc.respond_ping(msg)
print(dt.datetime.strftime(dt.datetime.now(), "%H:%M") + " PONG")
try:
msg = msg.strip().split(":")
print("[{}][{}]<{}> {}".format(
dt.datetime.strftime(dt.datetime.now(), "%H:%M"),
"#" + msg[1].split(" #")[1].strip(),
msg[1].split("!")[0],
msg[2].strip()))
except IndexError:
pass
bot = Main()
#bot.command(name="join")
async def test(ctx: commands.Context):
irc.join_channel("test")
username = config["Auth"]["username"]
oauth = config["Auth"]["oauth"]
irc = IRCSimpleClient(username, oauth)
irc.connect()
irc.join_channel("lcs")
# watch_message()
token = config["discord"]["token"]
bot.run(token)
irc.py:
#!/usr/bin/env python
import socket
import time
class IRCSimpleClient():
def __init__(self, nick, oauth):
self.username = nick
self.oauth = oauth
self.server = "irc.chat.twitch.tv"
self.port = 80
def connect(self):
self.conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.conn.connect((self.server, self.port))
self.conn.send(f"PASS oauth:{self.oauth}\r\n".encode("utf-8"))
self.conn.send(f"NICK {self.username}\r\n".encode("utf-8"))
while not self.is_connected:
resp = self.get_response()
print(resp.strip())
if "376" in resp:
self.is_connected = True
if "PING" in resp:
self.respond_ping(resp)
def get_response(self):
return self.conn.recv(1024).decode("utf-8", "ignore")
def send_cmd(self, cmd, message):
command = "{} {}\r\n".format(cmd, message).encode("utf-8")
self.conn.send(command)
def send_message_to_channel(self, channel, message):
command = "PRIVMSG {}".format(channel)
message = ":" + message
self.send_cmd(command, message)
def join_channel(self, channel: str):
joined = False
cmd = "JOIN"
if not channel.startswith("#"):
channel = "#" + channel
self.send_cmd(cmd, channel)
while not joined:
resp = self.get_response()
print(resp.strip())
if "366" in resp:
joined = True
if "PING" in resp:
self.respond_ping(resp)
def part_channel(self, channel: str):
cmd = "PART"
if not channel.startswith("#"):
channel = "#" + channel
self.send_cmd(cmd, channel)
def respond_ping(self, message):
self.send_cmd("PONG", ":" + message.split(":")[1])
As far as I know, discord.py uses asyncio under the hood so I wanted to use it as well but since the IRC client is blocking when waiting to receive new messages, I'm not sure how to run both at the same time.
I tried asyncio and threading but the "watch_message" function is always blocking the discord bot run function from executing.
I am struggling to have my both my websocket script and my I/O serial script running together in one together.
Just some basic info before I continue:
I am using Windows PC(Have no access to linux PC)
This is the reason why I am using the AIOserial library instead of pyserial-asyncio
I have no "super" experience with asyncio, so be kind please :)
Here is my "old" websocket script:
from aiohttp import web
import socketio
import aiohttp_cors
import asyncio
import random
# creates a new Async Socket IO Server
sio = socketio.AsyncServer()
# Creates
app = web.Application()
sio.attach(app)
server_is_responding = "Message from the server:"
the_response = "Hello there!"
async def index(request):
with open('index.html') as f:
print("Somebody entered the server from the browser!")
return web.Response(text=f.read(), content_type='text/html')
#sio.on("android-device")
async def message(sid, data):
print("message: ", data)
#return send_message_to_client()
#sio.on('sendTextToServer')
async def message(sid, data):
print("message: " , data)
if data == "hei":
await sio.emit("ServerMessage", {"hehe"})
if data == "lol":
await sio.emit("ServerMessage", {"Message from server:": "hehe, funny right?.."})
else:
await sio.emit("ServerMessage", {"Message from server:": "Hello There!"})
# We bind our aiohttp endpoint to our app
# router
cors = aiohttp_cors.setup(app)
app.router.add_get('/', index)
# We kick off our server
if __name__ == '__main__':
web.run_app(app)
And here is my I/O serial script(which works and read the data), that I am trying to use with some of the websocket functions above:
import asyncio
import websockets
import socketio
import aiohttp_cors
import logging
from AIOExtensions.AIOSerial import (AIOSerial, AIOSerialClosedException,
AIOSerialErrorException, AIOSerialNotOpenException)
logging.basicConfig(level=logging.DEBUG)
sio = socketio.AsyncServer()
async def hello(websocket, path):
name = await websocket.recv()
print(f"< {name}")
greeting = f"Hello {name}!"
await websocket.send(greeting)
print(f"> {greeting}")
#sio.on("android-device")
async def message(sid, data):
print("message: ", data)
async def read_IO_serial():
try:
async with AIOSerial('COM8', baudrate=115200, line_mode=True) as aios:
await asyncio.sleep(100)
try:
while True:
# read with timeout
rcvd = await asyncio.wait_for(aios.read(), timeout=1.0)
# print the data received
print(f"data received: {rcvd}")
if rcvd == b'RF initialized\n':
print("CATCHED THIS LINE!")
except asyncio.TimeoutError:
print("reception timed out ;-(")
except AIOSerialNotOpenException:
print("Unable to open the port!")
print()
print("Have you specified the right port number? COM7? COM8?")
# port fatal error
except AIOSerialErrorException:
print("Port error!")
# port already closed
except AIOSerialClosedException:
print("Serial port is closed!")
start_server = websockets.serve(hello, "http://192.168.1.6", 8080)
#sio.attach(start_server) # HOW CAN I ATTACH THIS SO IT CAN BE USED WITH THE SIO FUNCTIONS BELOW?
if start_server:
print("Server started!")
asyncio.run(read_IO_serial())
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
As you can see in my first simple websocket script, I could use "sio.attach(app)" which made it possible to listed to events from client, so I need a way of replacing this "app" on my current script..
Someone who can please help me with this?
I solved it using asyncio.gather(), this is how I dit it:
from aiohttp import web
import socketio
import aiohttp_cors
import asyncio
import random
import asyncio as aio
import logging
import sys
# creates a new Async Socket IO Server
sio = socketio.AsyncServer()
# Creates
app = web.Application()
sio.attach(app)
server_is_responding = "Message from the server:"
the_response = "Hello there!"
async def index(request):
with open('index.html') as f:
print("Somebody entered the server from the browser!")
return web.Response(text=f.read(), content_type='text/html')
#sio.event
async def join(sid, message):
sio.enter_room(sid, message['room'])
await sio.emit('my_response', {'data': 'Entered room: ' + message['room']}, room=sid)
#sio.on("android-device")
async def message(sid, data):
print("message: ", data)
#sio.on("receiveMessageFromServer")
async def message(sid, data):
print("message: ", data)
# await asyncio.sleep(1 * random.random())
return "OKKKK", 123
from AIOExtensions.AIOSerial import (AIOSerial, AIOSerialClosedException,
AIOSerialErrorException, AIOSerialNotOpenException)
logging.basicConfig(level=logging.DEBUG)
async def read_IO_serial():
try:
async with AIOSerial('COM8', baudrate=115200, line_mode=True) as aios:
# aios.sp.baudrate = 230400
# aios.sp.baudrate = 115200
# await aios.write(b"AT\r\n")
# await aios.read()
# await aios.close()
await aio.sleep(100)
try:
while True:
# read with timeout
rcvd = await aio.wait_for(aios.read(), timeout=1.0)
# print the data received
print(f"data received: {rcvd}")
if rcvd == b'RF initialized\n':
print("CATCHED THIS LINE!")
except aio.TimeoutError:
print("reception timed out ;-(")
except AIOSerialNotOpenException:
print("Unable to open the port!")
print()
print("Have you specified the right port number? COM7? COM8?")
# port fatal error
except AIOSerialErrorException:
print("Port error!")
# port already closed
except AIOSerialClosedException:
print("Serial port is closed!")
async def on_startup(app):
pass
cors = aiohttp_cors.setup(app)
app.router.add_get('/', index)
# We kick off our server
if __name__ == '__main__':
loop = asyncio.get_event_loop()
group2 = asyncio.gather(read_IO_serial())
group1 = asyncio.gather(web.run_app(app))
all_groups = asyncio.gather(group1, group2)
results = loop.run_until_complete(all_groups)
# loop.close()
#print(results)
I am scraping blog urls from main page, and later I iterate over all urls to retrive text on it.
Will generator be faster if I move loop to blogscraper and make yield some_text ? I guess app will still be one threaded and It wont request next pages while computing text from html.
Should I use asyncio? or there are some better modules to make it parrel? Create generator that yields coroutine results as the coroutines finish
I also want to make later small rest app for displaying results
def readmainpage(self):
blogurls = []
while(nextPage):
r = requests.get(url)
...
blogurls += [new_url]
return blogurls
def blogscraper(self, url):
r = request.get(url)
...
return sometext
def run(self):
blog_list = self.readmainpage()
for blog in blog_list:
data = self.blogscraper(blog['url'])
Using threading package, you can run your top function (object initialitization) asynchronously. It will create sub parallel sub-process for your requests. For example, single page fetching is 2 mins and you have 10 pages. In threading, all will take 2 mins. Threading in Python 3.x
With asyncio you can try to use aiohttp module:
pip install aiohttp
As example code it's can looks something like this, also can be done some improvements but it depends on your code...
import sys
import aiohttp
import asyncio
import socket
from urllib.parse import urlparse
class YourClass:
def __init__(self):
self.url = "..."
url_parsed = urlparse( self.url )
self.session = aiohttp.ClientSession(
headers = { "Referer": f"{ url_parsed.scheme }://{ url_parsed.netloc }" },
auto_decompress = True,
connector = aiohttp.TCPConnector(family=socket.AF_INET, verify_ssl=False) )
async def featch(self, url):
async with self.session.get( url ) as resp:
assert resp.status == 200
return await resp.text()
async def readmainpage(self):
blogurls = []
while nextPage:
r = await self.featch(self.url)
# ...
blogurls += [new_url]
return blogurls
async def blogscraper(self, url):
r = await self.featch(url)
return r
# ...
return sometext
async def __call__(self):
url_parsed = urlparse( self.url )
blog_list = await self.readmainpage()
coros = [ asyncio.Task( self.blogscraper( blog['url']) ) for blog in blog_list ]
for data in await asyncio.gather( *coros ):
print(data)
# do not forget to close session if not using with statement
await self.session.close()
def main():
featcher = YourClass()
loop = asyncio.get_event_loop()
loop.run_until_complete( featcher() )
sys.exit(0)
if __name__ == "__main__":
main()
I'm using aiohttp with limited_as_completed method to speed up scrapping (around 100 million static website pages). However, the code stops after several minutes, and returns the TimeoutError. I tried several things, but still could not prevent the raise asyncio.TimeoutError. May I ask how can I ignore the error, and continue?
The code I'm running is:
N=123
import html
from lxml import etree
import requests
import asyncio
import aiohttp
from aiohttp import ClientSession, TCPConnector
import pandas as pd
import re
import csv
import time
from itertools import islice
import sys
from contextlib import suppress
start = time.time()
data = {}
data['name'] = []
filename = "C:\\Users\\xxxx"+ str(N) + ".csv"
def limited_as_completed(coros, limit):
futures = [
asyncio.ensure_future(c)
for c in islice(coros, 0, limit)
]
async def first_to_finish():
while True:
await asyncio.sleep(0)
for f in futures:
if f.done():
futures.remove(f)
try:
newf = next(coros)
futures.append(
asyncio.ensure_future(newf))
except StopIteration as e:
pass
return f.result()
while len(futures) > 0:
yield first_to_finish()
async def get_info_byid(i, url, session):
async with session.get(url,timeout=20) as resp:
print(url)
with suppress(asyncio.TimeoutError):
r = await resp.text()
name = etree.HTML(r).xpath('//h2[starts-with(text(),"Customer Name")]/text()')
data['name'].append(name)
dataframe = pd.DataFrame(data)
dataframe.to_csv(filename, index=False, sep='|')
limit = 1000
async def print_when_done(tasks):
for res in limited_as_completed(tasks, limit):
await res
url = "http://xxx.{}.html"
loop = asyncio.get_event_loop()
async def main():
connector = TCPConnector(limit=10)
async with ClientSession(connector=connector,headers=headers,raise_for_status=False) as session:
coros = (get_info_byid(i, url.format(i), session) for i in range(N,N+1000000))
await print_when_done(coros)
loop.run_until_complete(main())
loop.close()
print("took", time.time() - start, "seconds.")
The error log is:
Traceback (most recent call last):
File "C:\Users\xxx.py", line 111, in <module>
loop.run_until_complete(main())
File "C:\Users\xx\AppData\Local\Programs\Python\Python37-32\lib\asyncio\base_events.py", line 573, in run_until_complete
return future.result()
File "C:\Users\xxx.py", line 109, in main
await print_when_done(coros)
File "C:\Users\xxx.py", line 98, in print_when_done
await res
File "C:\Users\xxx.py", line 60, in first_to_finish
return f.result()
File "C:\Users\xxx.py", line 65, in get_info_byid
async with session.get(url,timeout=20) as resp:
File "C:\Users\xx\AppData\Local\Programs\Python\Python37-32\lib\site-packages\aiohttp\client.py", line 855, in __aenter__
self._resp = await self._coro
File "C:\Users\xx\AppData\Local\Programs\Python\Python37-32\lib\site-packages\aiohttp\client.py", line 391, in _request
await resp.start(conn)
File "C:\Users\xx\AppData\Local\Programs\Python\Python37-32\lib\site-packages\aiohttp\client_reqrep.py", line 770, in start
self._continue = None
File "C:\Users\xx\AppData\Local\Programs\Python\Python37-32\lib\site-packages\aiohttp\helpers.py", line 673, in __exit__
raise asyncio.TimeoutError from None
concurrent.futures._base.TimeoutError
I have tried
1) add expect asyncio.TimeoutError: pass. Not working
async def get_info_byid(i, url, session):
async with session.get(url,timeout=20) as resp:
print(url)
try:
r = await resp.text()
name = etree.HTML(r).xpath('//h2[starts-with(text(),"Customer Name")]/text()')
data['name'].append(name)
dataframe = pd.DataFrame(data)
dataframe.to_csv(filename, index=False, sep='|')
except asyncio.TimeoutError:
pass
2) suppress(asyncio.TimeoutError)as shown above. Not working
I just learned aiohttp yesterday, so maybe there is other things wrong in my code that causes timeout error only after a few minutes' running? Thank you very much if anyone knows how to deal with it!
what #Yurii Kramarenko has done will raise Unclosed client session excecption for sure, since the session has never be properly closed. What I recommend is sth like this:
import asyncio
import aiohttp
async def main(urls):
async with aiohttp.ClientSession(timeout=self.timeout) as session:
tasks=[self.do_something(session,url) for url in urls]
await asyncio.gather(*tasks)
I like #jbxiaoyu answer, but the timeout kwarg seems to take a special object, so I thought I'd add you need to create a ClientTimeout object, then pass it to the Session, like this:
from aiohttp import ClientSession, ClientTimeout
timeout = ClientTimeout(total=600)
async with ClientSession(timeout=timeout) as session:
tasks=[self.do_something(session,url) for url in urls]
await asyncio.gather(*tasks)
Simple example (not very good, but works fine):
import asyncio
from aiohttp.client import ClientSession
class Wrapper:
def __init__(self, session):
self._session = session
async def get(self, url):
try:
async with self._session.get(url, timeout=20) as resp:
return await resp.text()
except Exception as e:
print(e)
loop = asyncio.get_event_loop()
wrapper = Wrapper(ClientSession())
responses = loop.run_until_complete(
asyncio.gather(
wrapper.get('http://google.com'),
wrapper.get('http://google.com'),
wrapper.get('http://google.com'),
wrapper.get('http://google.com'),
wrapper.get('http://google.com')
)
)
print(responses)
I am writing a simple producer/consumer app to call multiple URL's asynchronously.
In the following code if I set the conn_count=1, and add 2 items to the Queue it works fine as only one consumer is created. But if I make conn_count=2 and add 4 items to the Queue only 3 request are being made. The other request fails with ClientConnectorError.
Can you please help be debug the reason for failure with multiple consumers? Thank You.
I am using a echo server I created.
Server:
import os
import logging.config
import yaml
from aiohttp import web
import json
def start():
setup_logging()
app = web.Application()
app.router.add_get('/', do_get)
app.router.add_post('/', do_post)
web.run_app(app)
async def do_get(request):
return web.Response(text='hello')
async def do_post(request):
data = await request.json()
return web.Response(text=json.dumps(data))
def setup_logging(
default_path='logging.yaml',
default_level=logging.INFO,
env_key='LOG_CFG'
):
path = default_path
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with open(path, 'rt') as f:
config = yaml.safe_load(f.read())
logging.config.dictConfig(config)
else:
logging.basicConfig(level=default_level)
if __name__ == '__main__':
start()
Client:
import asyncio
import collections
import json
import sys
import async_timeout
from aiohttp import ClientSession, TCPConnector
MAX_CONNECTIONS = 100
URL = 'http://localhost:8080'
InventoryAccount = collections.namedtuple("InventoryAccount", "op_co customer_id")
async def produce(queue, num_consumers):
for i in range(num_consumers * 2):
await queue.put(InventoryAccount(op_co=i, customer_id=i * 100))
for j in range(num_consumers):
await queue.put(None)
async def consumer(n, queue, session, responses):
print('consumer {}: starting'.format(n))
while True:
try:
account = await queue.get()
if account is None:
queue.task_done()
break
else:
print(f"Consumer {n}, Updating cloud prices for account: opCo = {account.op_co!s}, customerId = {account.customer_id!s}")
params = {'opCo': account.op_co, 'customerId': account.customer_id}
headers = {'content-type': 'application/json'}
with async_timeout.timeout(10):
print(f"Consumer {n}, session state " + str(session.closed))
async with session.post(URL,
headers=headers,
data=json.dumps(params)) as response:
assert response.status == 200
responses.append(await response.text())
queue.task_done()
except:
e = sys.exc_info()[0]
print(f"Consumer {n}, Error updating cloud prices for account: opCo = {account.op_co!s}, customerId = {account.customer_id!s}. {e}")
queue.task_done()
print('consumer {}: ending'.format(n))
async def start(loop, session, num_consumers):
queue = asyncio.Queue(maxsize=num_consumers)
responses = []
consumers = [asyncio.ensure_future(loop=loop, coro_or_future=consumer(i, queue, session, responses)) for i in range(num_consumers)]
await produce(queue, num_consumers)
await queue.join()
for consumer_future in consumers:
consumer_future.cancel()
return responses
async def run(loop, conn_count):
async with ClientSession(loop=loop, connector=TCPConnector(verify_ssl=False, limit=conn_count)) as session:
result = await start(loop, session, conn_count)
print("Result: " + str(result))
if __name__ == '__main__':
conn_count = 2
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(run(loop, conn_count))
finally:
loop.close()
Reference:
https://pymotw.com/3/asyncio/synchronization.html
https://pawelmhm.github.io/asyncio/python/aiohttp/2016/04/22/asyncio-aiohttp.html
https://hackernoon.com/asyncio-for-the-working-python-developer-5c468e6e2e8e