Aiohttp adding values to string in discord.py - python

I am trying to add the user's response into the URL. My code is
async def dungeondata():
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False)) as session:
async with session.get('https://sky.shiiyu.moe/api/v2/dungeons/{}/{}'.format(name, cutename)) as resp:
return await resp.json()
#bot.command(name='dungeon', aliases=['dungeons'])
async def dungeon(ctx, name, cutename):
JSONData = await dungeondata(name, cutename)
When the user does ?dungeons , I am trying to add the and to the URL so the url becomes https://sky.shiiyu.moe/api/v2/dungeons/name/cutename. How do I do that?

Add name and cutename as arguments in dungeon function.
async def dungeon(name, cutename):
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False)) as session:
async with session.get('https://sky.shiiyu.moe/api/v2/dungeons/{}/{}'.format(name, cutename)) as resp:
return await resp.json()

Related

Why playwright isn't modifying request body?

I need to modify the request body that is sent by the browser, i do it like this:
async def handle_uri_route(route: Route):
response = await route.fetch()
response_json = await response.json()
reg_notification_uri = response_json['confirmUri']
await page.route(reg_notification_uri, modify_notification_uri)
await route.continue_()
async def modify_notification_uri(route: Route):
post_data = route.request.post_data_json
post_data['notificationUi'] = 'https://httpbin.org/anything'
await route.continue_(post_data=urlencode(post_data))
pprint(route.request.post_data_json)
await page.route(re.compile(r'api\/common\/v1\/register_notification_uri\?'), handle_uri_route)
pprint displays the changed data, but if i open the devtools, then i see that the request hasn't really changed.
What am I doing wrong?

trying to run a get and a post with aiohttp

this is my code i am trying to do 2 requests a get and a post with async i get this error now:
async with session.post(
AttributeError: 'str' object has no attribute 'post'
any idea how i can run 2 requests using async and parsing the data?
async def main():
async with aiohttp.ClientSession() as session:
async with session.get(
'https://login.yahoo.com',
) as login:
crumb = (await login.text()).partition('crumb" value="')[-1].partition('"')[0]
acrumb = (await login.text()).partition('name="acrumb" value="')[-1].partition('" />')[0]
session = (await login.text()).partition('sessionIndex" value="')[-1].partition('"')[0]
print(acrumb)
print(session)
print(crumb)
async with session.post(
'SECONDURL',
) as check:
print(await check.text())
loop = asyncio.get_event_loop()
loop.run_until_complete(main())```
The issue is this line:
session = (await login.text()).partition('sessionIndex" value="')[-1].partition('"')[0]
You're overwriting your session variable with this variable also named session. I would change the variable name in the above line to something like:
session_index = (await login.text()).partition('sessionIndex" value="')[-1].partition('"')[0]
async def main():
# you create a variable named session here
async with aiohttp.ClientSession() as session:
...
# you're mistakenly overwriting the session variable here
session = (await login.text()).partition('sessionIndex" value="')[-1].partition('"')[0]
...

Chaining Requests using Python and Aiohttp

I am new to asynchronous programming in python, have been working on a script using aiohttp that fetches data from a get request and passes a specific variable from the response onto another post request. A sample of what I have tried is below:
async def fetch1(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp: # First hit to the url
data = resp.json() # Grab response
return await fetch2(data['uuid']) # Pass uuid to the second function for post request
async def fetch2(id):
url2 = "http://httpbin.org/post"
params = {'id': id}
async with aiohttp.ClientSession() as session:
async with session.post(url2,data=params) as resp:
return await resp.json()
async def main():
url = 'http://httpbin.org/uuid'
data = await fetch1(url)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
When I execute the script, I get the following error:
Traceback (most recent call last):
File ".\benchmark.py", line 27, in <module>
loop.run_until_complete(main())
File "C:\ProgramFiles\WindowsApps\PythonSoftwareFoundation.Python.3.8_3.8.2288.0_x64__qbz5n2kfra8p0\lib\asyncio\base_events.py", line 616, in run_until_complete
return future.result()
File ".\benchmark.py", line 22, in main
data = await fetch1(url)
File ".\benchmark.py", line 10, in fetch1
return fetch2(data['uuid'])
TypeError: 'coroutine' object is not subscriptable
sys:1: RuntimeWarning: coroutine 'ClientResponse.json' was never awaited
I know that the coroutine is a generator, but how do I go ahead, any help will be appreciated.
The error says coroutine 'ClientResponse.json' was never awaited which means it must have an await before the json part. This is because you are using an async function.
async def fetch1(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp: # First hit to the url
data = await resp.json() # Grab response
return await fetch2(data['uuid']) # Pass uuid to the second function for post request
async def fetch2(id):
url2 = "http://httpbin.org/post"
params = {'id': id}
async with aiohttp.ClientSession() as session:
async with session.post(url2,data=params) as resp:
return await resp.json()
async def main():
url = 'http://httpbin.org/uuid'
data = await fetch1(url)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())

Async processing of function requests using asyncio

I am trying to achieve aiohttp async processing of requests that have been defined in my class as follows:
class Async():
async def get_service_1(self, zip_code, session):
url = SERVICE1_ENDPOINT.format(zip_code)
response = await session.request('GET', url)
return await response
async def get_service_2(self, zip_code, session):
url = SERVICE2_ENDPOINT.format(zip_code)
response = await session.request('GET', url)
return await response
async def gather(self, zip_code):
async with aiohttp.ClientSession() as session:
return await asyncio.gather(
self.get_service_1(zip_code, session),
self.get_service_2(zip_code, session)
)
def get_async_requests(self, zip_code):
asyncio.set_event_loop(asyncio.SelectorEventLoop())
loop = asyncio.get_event_loop()
results = loop.run_until_complete(self.gather(zip_code))
loop.close()
return results
When running to get the results from the get_async_requests function, i am getting the following error:
TypeError: object ClientResponse can't be used in 'await' expression
Where am i going wrong in the code? Thank you in advance
When you await something like session.response, the I/O starts, but aiohttp returns when it receives the headers; it doesn't want for the response to finish. (This would let you react to a status code without waiting for the entire body of the response.)
You need to await something that does that. If you're expecting a response that contains text, that would be response.text. If you're expecting JSON, that's response.json. This would look something like
response = await session.get(url)
return await response.text()

python aiohttp performance: connect performed on the main thread

I have the following code
import asyncio
import aiohttp
urls = [
'http://54.224.27.241',
'http://54.224.27.241',
'http://54.224.27.241',
'http://54.224.27.241',
'http://54.224.27.241',
]
async def query(urls):
out = []
with aiohttp.ClientSession() as session:
for url in urls:
try:
async with session.get(url, timeout=5) as resp:
text = await resp.text()
out.append(resp.status)
except:
print('timeout')
return out
loop = asyncio.get_event_loop()
out = loop.run_until_complete(query(urls))
loop.close()
print(str(out))
The code is much slower than the one that uses a threadpool and keep increasing if you increase the number of urls (lets say 20, 50 etc.)
I have a feeling that when the initial connection establishment is not done in an async way.
(Note that I am connecting here to an non-existing server to deliberately produce a connection timeout).
Can someone point out what is wrong here?
Warning: I don't promise this code works, as I can't install aiohttp atm, but looking at the example in the docs
async def fetch(session, url):
async with async_timeout.timeout(10):
async with session.get(url) as response:
return await response.text()
async def main():
async with aiohttp.ClientSession() as session:
html = await fetch(session, 'http://python.org')
print(html)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
Notice how they're calling the aiohttp.ClientSession() with the async keyword. Additionally, I was getting some error in your line data = await async with session.get(url) as resp:
async def fetch(session, url):
async with session.get(url) as response:
return await response.text()
async def main():
out = []
async with aiohttp.ClientSession() as session:
for url in urls:
data = await fetch(session, url)
out.append(data)
return out
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())

Categories