Error AttributeError: module 'aiohttp' has no attribute 'ClientSession', But ClientSession exists in module, idk how to solve it. i tried everthing someone help
import aiohttp
import asyncio
import json
import time
async def get_page (session,url):
async with session.get(url) as r:
return await r.text()
async def get_all(session,urls) :
tasks = []
for url in urls:
task = asyncio.create_task(get_page(session,url) )
tasks.append(task)
results = await asyncio.gather(*tasks)
return results
async def main (urls) :
async with aiohttp.ClientSession() as session : # Error here
data = await get_all(session,urls)
return data
def parse(results):
for html in results:
data = json.loads(html)
return
if __name__ == '__main__':
urls = ['https://www.google.com']
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
results = asyncio.run(main(urls))
parse(results)
The problem is you've named the script aiohttp.py which interferes with python's ability to use the aiohttp module.
Rename that file to aiohttp_custom.py (or something else) and your problem should be gone.
Related
I am using aiohttp and asyncio to run multiple requests asynchronously, the problem is when i try to print the data i receive i end up getting the data of another request in the task queue. I have tried to debug this and look at the docs for any answers but i am unable to solve this problem.
here's my code:
from time import sleep
import aiohttp
import asyncio
async def search(query, session):
search_params = {
"query":query
}
async with session.get(
url,
params=search_params,
) as response:
json_response = await response.json()
data = json_response["data"]
print(data)
"""the above line always prints the data from the response of the first task to get executed
and not the current data from this request with a different query"""
async def main():
async with aiohttp.ClientSession() as session:
await init_session(session)
await enable_search(session)
while True:
tasks = [asyncio.create_task(search(session=session, query)) for query in inputs]
await asyncio.gather(*tasks)
sleep(5)
if __name__ == "__main__":
asyncio.run(main())
async def get_quote():
async with aiohttp.ClientSession() as session:
async with session.get("https://zenquotes.io/api/random") as response:
json_data = json.loads(response.text)
quote = json_data[0]["q"]
return (quote)
#client.command(aliases=["wow"])
async def inspire(ctx):
quote = await get_quote()
await ctx.send(quote)
I was told to use aiohttp instead of requests module, so I did. Then when I used aiohttp, it said module 'aiohttp' has no attribute 'get'. So this is my final code right now. Please help.
response.text is an async function, not an attribute, so it must be called and awaited. Try:
async def get_quote():
async with aiohttp.ClientSession() as session:
async with session.get("https://zenquotes.io/api/random") as response:
json_data = json.loads(await response.text())
quote = json_data[0]["q"]
return quote
I used requests.
import requests
#bot.command()
async def quote(ctx):
response = requests.get('https://api.quotable.io/random')
r = response.json()
await ctx.send(r["content"])
This works fine for me.
please install the package aiohttp and import that one
pip3 install aiohttp
import aiohttp
Please refer the first example from this link docs.aiohttp.org
install httpx
pip3 install httpx
for stater test this one
import asyncio
import httpx
async def test():
async with httpx.AsyncClient() as client:
resp = await client.get('https://dog.ceo/api/breeds/image/random')
print(resp)
asyncio.run(test())
I found this package aiomultiprocess that seems like it can do both multiprocessing and asyncio.
from aiohttp import request
from aiomultiprocess import Pool
async def get(url):
async with request("GET", url) as response:
return await response.text("utf-8")
async def main():
urls = ["https://jreese.sh", "https://www.google.com", ]
async with Pool() as pool:
async for result in pool.map(get, urls):
print(result)
Trying to run the sample code, though, does absolutely nothing.
Trying to call the main() gives me an error RuntimeWarning: coroutine 'main' was never awaited. I can't find an actual example of how to trigger the code.
The only other question about this isn't answered.
The aiomultiprocess documentation example does not cover how to call the loop. The function needs to be called via asyncio.
import asyncio
from aiohttp import request
from aiomultiprocess import Pool
async def get(url):
async with request("GET", url) as response:
return await response.read()
async def main():
urls = ["https://jreese.sh", "https://www.google.com", ]
async with Pool() as pool:
async for result in pool.map(get, urls):
print(result)
if __name__ == '__main__':
# for Python 3.7
asyncio.run(main())
# for Python 3.6
# loop = asyncio.get_event_loop()
# loop.run_until_complete(main())
import asyncio
import Response
import aiohttp
async def resolve_response_json(res):
new_res = Response()
async with res:
new_res.status = res.status
new_res.json = await res.json()
return new_res
class Client:
async def request(url):
async with aiohttp.ClientSession() as sess:
res = await sess.get(url=url)
return await resolve_response_json(res).json
client = Client()
loop = asyncio.get_event_loop()
value = loop.run_until_complete(client.request('https://example.com/api/v1/resource'))
Why does this piece of code give me:
> return await resolve_response_json(res).json
E AttributeError: 'coroutine' object has no attribute 'json'
I thought that the await keyword always returns an actual value. If it actually does, why is my code throwing this error?
Or am I just silly and probably forgot to put an await somewhere?
You are awaiting resolve_response_json(res).json, not resolve_response_json(res).
Change it to (await resolve_response_json(res)).json may work.
My code is as follows:
import asyncio
import aiohttp
urls = [
'http://www.163.com/',
'http://www.sina.com.cn/',
'https://www.hupu.com/',
'http://www.csdn.net/'
]
async def get_url_data(u):
"""
read url data
:param u:
:return:
"""
print('running ', u)
resp = await aiohttp.ClientSession().get(url=u)
headers = resp.headers
print(u, headers)
return headers
async def request_url(u):
"""
main func
:param u:
:return:
"""
res = await get_url_data(u)
return res
loop = asyncio.get_event_loop()
task_lists = asyncio.wait([request_url(u) for u in urls])
loop.run_until_complete(task_lists)
loop.close()
When i running my code, it's display a warning message:
Unclosed client session
Anybody can give me some solutions about that?
Thanks a lot
You should close the connection in the end.
You have 2 options:
You can close the connection manually:
import aiohttp
session = aiohttp.ClientSession()
# use the session here
session.close()
Or you can use it with a contex manager:
import aiohttp
import asyncio
async def fetch(client):
async with client.get('http://python.org') as resp:
assert resp.status == 200
return await resp.text()
async def main(loop):
async with aiohttp.ClientSession(loop=loop) as client:
html = await fetch(client)
print(html)
loop = asyncio.get_event_loop()
loop.run_until_complete(main(loop))
The client session supports the context manager protocol for self closing.
If you are not using context manager, the proper way to close it would also need an await. Many answers on the internet miss that part, and few people actually notice it, presumably because most people use the more convenient context manager. But the manual await session.close() is essential when/if you are closing a class-wide session inside the tearDownClass() when doing unittesting.
import aiohttp
session = aiohttp.ClientSession()
# use the session here
await session.close()
You should use ClientSession using async context manager for proper blocking/freeing resources:
async def get_url_data(u):
"""
read url data
:param u:
:return:
"""
print('running ', u)
async with aiohttp.ClientSession() as session:
resp = await session.get(url=u)
headers = resp.headers
print(u, headers)
return headers