I write a telegram bot on aiogram that gives me information about my accounts market.csgo.com. The meaning of the script is simple - I click on the button, it displays the text and and the function is run.
My functions send async requests and work fine, but I don't know how to get aiohttp and aiogram to work together.
from aiogram import Bot, types
from aiogram.dispatcher import Dispatcher
from aiogram.utils import executor
from auth import *
import asyncio
import aiohttp
bot = Bot(token=token)
dp = Dispatcher(bot)
def users():
***Data of my accounts from txt to dict***
async def get_info(session, dictt, message):
total_wallet = 0
async with session.get(f'https://market.csgo.com/api/v2/get-money?key={dictt[1][1]}') as resp:
html = await resp.json()
total_wallet += int(html['money'])
#await bot.send_message(message.from_user.id, f'{total_wallet}')
async def get_on_sale(session, dictt, message):
sale_total_sum = 0
async with session.get(f'https://market.csgo.com/api/v2/items?key={dictt[1][1]}') as resp:
html = await resp.json()
for i in html['items']:
sale_total_sum += i['price']
#await bot.send_message(message.from_user.id, f'{sale_total_sum}')
#dp.message_handler(content_types=['text'])
async def Main():
try:
profiles = users()
async with aiohttp.ClientSession(trust_env=True) as session:
tasks = []
if message.text == 'info 📊':
await bot.send_message(message.from_user.id, 'Wait for information..')
for i in profiles.items():
task = asyncio.ensure_future(get_info(session, i))
tasks.append(task)
await asyncio.gather(*tasks)
if message.text == 'on sale 💰':
await bot.send_message(message.from_user.id, 'Wait for information..')
for i in profiles.items():
task = asyncio.ensure_future(get_on_sale(session, i))
tasks.append(task)
await asyncio.gather(*tasks)
except Exception as ex:
print(f'Error {ex}')
loop = asyncio.get_event_loop()
loop.run_until_complete(Main())
executor.start_polling(dp, skip_updates=True)
My problem is that I don't know how to properly pass the message argument to the Main function
#dp.message_handler(content_types=['text'])
async def Main(): #async def Main(message)
And run aiogram along with aiohttp.
loop.run_until_complete(Main()) #loop.run_until_complete(Main(message))
If I do like this: async def Main(message) and loop.run_until_complete(Main(message)) Then I get an error:
loop.run_until_complete(Main(message))
NameError: name 'message' is not defined
or if I use only async def Main(message) get this:
loop.run_until_complete(Main())
TypeError: Main() missing 1 required positional argument: 'message'
Solution:
async def loop_on(message):
loop = asyncio.get_event_loop()
loop.run_until_complete(Main(message))
There is the following code:
import asyncio
import aiohttp
aut_token = ("token")
tasks = []
iter_flag = False
class WAPI:
async def receiver(WAPI_S):
async for msg in WAPI_S:
data = msg.json()
raise aiohttp.ClientError #test
async def heartbeating(WAPI_S):
while iter_flag:
await WAPI_S.send_json({
"op": 1,
"d": None
})
await asyncio.sleep(42.5)
async def event_manager():
loop = asyncio.get_running_loop()
try:
async with aiohttp.ClientSession().ws_connect("url") as WAPI_S:
task_receive = loop.create_task(WAPI.receiver(WAPI_S)); task_heartbeating = loop.create_task(WAPI.heartbeating(WAPI_S))
tasks.append(task_receive); tasks.append(task_heartbeating)
await asyncio.gather(*tasks)
except aiohttp.ClientError:
global iter_flag
iter_flag = False
await asyncio.sleep(44)
[task.cancel() for task in tasks]
try:
loop.close()
except:
loop.stop()
asyncio.run(WAPI.event_manager())
I want to correctly shutdown the client when the exception is raised. My implementation throws "RuntimeError: Event loop stopped before Future completed" exception while executing. How to do it right?
In method event_manager, the statement:
async with aiohttp.ClientSession().ws_connect("url") as WAPI_S:
needs to be replaced with:
async with aiohttp.ClientSession() as session:
async with session.ws_connect("url") as WAPI_S:
Also, it is considered anti-Pythonic to use a list comprehension for its side effects. See Is it Pythonic to use list comprehensions for just side effects? So you really should replace:
[task.cancel() for task in tasks]
with:
for task in tasks:
task.cancel()
Putting this all together:
async def event_manager():
loop = asyncio.get_running_loop()
try:
async with aiohttp.ClientSession() as session:
async with session.ws_connect("url") as WAPI_S:
task_receive = loop.create_task(WAPI.receiver(WAPI_S)); task_heartbeating = loop.create_task(WAPI.heartbeating(WAPI_S))
tasks.append(task_receive); tasks.append(task_heartbeating)
await asyncio.gather(*tasks)
except aiohttp.ClientError:
global iter_flag
iter_flag = False
await asyncio.sleep(44)
for task in tasks:
task.cancel()
try:
loop.close()
except:
loop.stop()
I got this function:
async def download(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
if resp.status == 200:
return resp
To get the data, I'd need to write:
text = await (await utils.download(url)).text()
How to change download so I can write like this
text = await utils.download(url).text()
without getting AttributeError: 'coroutine' object has no attribute 'text'?
May be this can help simplifying the usage:
async def download(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
if resp.status == 200:
return await resp.text()
Then to use it:
async def main():
text = await download("http://python.org")
My understanding that the "async def " needs to be called with loop_event, e.g.
loop = asyncio.get_event_loop()
loop.run_until_complete(<method>)
I created some code below without the loop. It still supports 100 async calls without an issue. Did I miss anything?
Dummy server sleeps 5 seconds.
from aiohttp import web
import asyncio
import time
async def hello(request):
#time.sleep(1)
await asyncio.sleep(5)
return web.Response(text='dummy done')
app = web.Application()
app.add_routes([web.get('/', hello)])
web.run_app(app,host='127.0.0.1', port=8081)
Actual server taking requests.
import json
from aiohttp import web
import aiohttp
import asyncio
n = 0
def mcowA(n):
print (n, " : A")
return
async def fetch(session, url):
async with getattr(session,"get")(url) as response:
return await response.text()
def mcowB(n):
print (n, " : B")
return
async def runMcows(request):
global n
n = n + 1
mcowA(n)
async with aiohttp.ClientSession() as session:
html = await fetch(session, 'http://localhost:8081')
print(n,html)
mcowB(n)
return web.Response(text=html)
try:
app = web.Application()
app.add_routes([web.get('/', runMcows)])
#loop = asyncio.get_event_loop(web.run_app(app))
#loop.run_forever()
web.run_app(app)
finally:
loop.close()
I'm try to asynchronously scrape data from a leaderboard for a video game. There are weekly and daily challenges. I've based my code so far on this async client with semaphores. The difference is I'm trying to contain the end where the loop is used in a function. Here's the relevant portion of my code:
from urllib.parse import urljoin
import asyncio
import aiohttp
async def fetch(url, session):
async with session.get(url) as response:
return await response.read()
async def bound_fetch(url, session, sem):
async with sem:
await fetch(url, session)
async def fetch_pages(url,pages,session):
tasks = []
sem = asyncio.Semaphore(LIMIT)
for page in range(pages+1):
task_url = urljoin(url,str(page))
task = asyncio.ensure_future(bound_fetch(task_url, session, sem))
tasks.append(task)
await asyncio.gather(*tasks)
def leaderboard_crawler(date, entries=0, pages=1):
website = "https://www.thronebutt.com/archive/"
date_url = urljoin(website,date+"/")
entries_per_page = 30
number_of_entries = entries or pages * entries_per_page
full_pages, last_page = divmod(number_of_entries,30)
entry_list = [30 for x in range(full_pages)]
if last_page != 0:
entry_list.append(last_page)
loop = asyncio.get_event_loop()
with aiohttp.ClientSession() as session:
future = asyncio.ensure_future(fetch_pages(date_url,pages,session))
date_html = loop.run_until_complete(future)
return date_html
def weekly_leaderboard(week, year, entries=0, pages=1):
weekly_date = "{0:02d}{1}".format(week, year)
return leaderboard_crawler(weekly_date,entries,pages)
def daily_leaderboard(day, month, year, entries=0, pages=1):
daily_date = "{0:02d}{1:02d}{2}".format(day, month, year)
return leaderboard_crawler(daily_date, entries, pages)
I think the problem is in the asyncio.gather(*tasks) portion in the fetch_urls function. I can't figure out how to pass that to the leaderboard_crawler. Right now date_html is None. I've tried return await asyncio.gather(*tasks), which returns an array of Nones. I've also tried wrapping it in asyncio.ensure_future then passing it to loop.run_until_complete but that doesn't seem to work either.
The reason is simple, you are missing return in your call stack:
async def bound_fetch(url, session, sem):
async with sem:
# await fetch(url, session) # missing return
return await fetch(url, session) # this one is right
async def fetch_pages(url,pages,session):
tasks = []
sem = asyncio.Semaphore(LIMIT)
for page in range(pages+1):
task_url = urljoin(url,str(page))
task = asyncio.ensure_future(bound_fetch(task_url, session, sem))
tasks.append(task)
# await asyncio.gather(*tasks) # missing return
return await asyncio.gather(*tasks) # this one is right.
The working example is here:
from urllib.parse import urljoin
import asyncio
import aiohttp
async def fetch(url, session):
async with session.get(url) as response:
return await response.read()
async def bound_fetch(url, session, sem):
async with sem:
return await fetch(url, session)
async def fetch_pages(url,pages,session):
tasks = []
sem = asyncio.Semaphore(5)
for page in range(pages+1):
task_url = urljoin(url,str(page))
task = asyncio.ensure_future(bound_fetch(task_url, session, sem))
tasks.append(task)
return await asyncio.gather(*tasks)
def leaderboard_crawler(date, entries=0, pages=1):
website = "https://www.thronebutt.com/archive/"
date_url = urljoin(website,date+"/")
entries_per_page = 30
number_of_entries = entries or pages * entries_per_page
full_pages, last_page = divmod(number_of_entries,30)
entry_list = [30 for x in range(full_pages)]
if last_page != 0:
entry_list.append(last_page)
loop = asyncio.get_event_loop()
with aiohttp.ClientSession() as session:
future = asyncio.ensure_future(fetch_pages(date_url,pages,session))
date_html = loop.run_until_complete(future)
return date_html
def weekly_leaderboard(week, year, entries=0, pages=1):
weekly_date = "{0:02d}{1}".format(week, year)
return leaderboard_crawler(weekly_date,entries,pages)
def daily_leaderboard(day, month, year, entries=0, pages=1):
daily_date = "{0:02d}{1:02d}{2}".format(day, month, year)
return leaderboard_crawler(daily_date, entries, pages)